diff --git a/.github/workflows/format_check.yml b/.github/workflows/format_check.yml
new file mode 100644
index 000000000..9225af459
--- /dev/null
+++ b/.github/workflows/format_check.yml
@@ -0,0 +1,25 @@
+name: Format Check
+
+on: [push, pull_request]
+
+jobs:
+ format-check:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install ruff
+
+ - name: Check code formatting
+ run: |
+ ruff check . --diff
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 000000000..c4fd6475b
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,25 @@
+name: Lint
+
+on: [push, pull_request]
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install ruff
+
+ - name: Run linter
+ run: |
+ ruff check .
diff --git a/.github/workflows/spell_check.yml b/.github/workflows/spell_check.yml
new file mode 100644
index 000000000..bf7cdece5
--- /dev/null
+++ b/.github/workflows/spell_check.yml
@@ -0,0 +1,25 @@
+name: Spell Check
+
+on: [push, pull_request]
+
+jobs:
+ spell-check:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+
+ - name: Install codespell
+ run: |
+ python -m pip install --upgrade pip
+ pip install codespell
+
+ - name: Run codespell
+ run: |
+ codespell .
diff --git a/build.py b/build.py
index 6aa7fac0d..aff73e332 100644
--- a/build.py
+++ b/build.py
@@ -1,8 +1,9 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python # noqa: EXE001, D100, CPY001
from bincrafters import build_template_default
-if __name__ == "__main__":
- builder = build_template_default.get_builder(build_types=["Release"], archs=["x86_64"])
+if __name__ == '__main__':
+ builder = build_template_default.get_builder(
+ build_types=['Release'], archs=['x86_64']
+ )
builder.run()
diff --git a/cmake/SimCenterFunctions.cmake b/cmake/SimCenterFunctions.cmake
index b97b3d599..6b0956416 100644
--- a/cmake/SimCenterFunctions.cmake
+++ b/cmake/SimCenterFunctions.cmake
@@ -23,7 +23,7 @@ set(SIMCENTER_GLOB_FILES *.cpp
# Careful: when file was added or deleted, cmake has to be run again
# EXCLUDE [optional] Files that should not be included in the module (but are located in module directory).
# This makes only sense if FILES was not specified, and all files have been added automatically.
-# BUILD_ONLY_IF_FOUND Before building the module test if all libraries specified here are availbable.
+# BUILD_ONLY_IF_FOUND Before building the module test if all libraries specified here are available.
# [optional] This is done using the ${arg}_FOUND variable.
# Example: simcenter_add_module(DEPENDS someModule BUILD_ONLY_IF_FOUND something)
# This module is only built if SOMETHING_FOUND is true.
diff --git a/cmake/SimCenterModuleDependencies.cmake b/cmake/SimCenterModuleDependencies.cmake
index 24676a623..8644aef57 100644
--- a/cmake/SimCenterModuleDependencies.cmake
+++ b/cmake/SimCenterModuleDependencies.cmake
@@ -218,7 +218,7 @@ endfunction(target_link_modules)
#######################################################################################################################
# If path contains a CMakeLists.txt, path is returned
-# otherwise recursivly the parent directories are checked and returned if they contain a CMakeLists.txt
+# otherwise recursively the parent directories are checked and returned if they contain a CMakeLists.txt
#######################################################################################################################
function(get_parent_with_cmakelists result dir)
diff --git a/conanfile.py b/conanfile.py
index 7af57d071..98c1d6706 100644
--- a/conanfile.py
+++ b/conanfile.py
@@ -1,57 +1,69 @@
-from conans import ConanFile, CMake, tools
-import os
-
-class simCenterBackendApps(ConanFile):
- name = "SimCenterBackendApplications"
- version = "1.2.2"
- description = "Backend applications for SimCenter software"
- license = "BSD 3-Clause"
- author = "Michael Gardner mhgardner@berkeley.edu"
- url = "https://github.com/NHERI-SimCenter/SimCenterBackendApplications"
- settings = {"os": None, "build_type": None, "compiler": None, "arch": ["x86_64","armv8"]}
- options = {"shared": [True, False]}
- default_options = {"mkl-static:threaded": False, "ipp-static:simcenter_backend": True, "libcurl:with_ssl":"openssl"}
- generators = "cmake"
- build_policy = "missing"
- requires = ["jansson/2.13.1",
- "zlib/1.2.11",
- "libcurl/8.1.1",
- "eigen/3.3.7",
- "clara/1.1.5",
- "jsonformoderncpp/3.7.0",
- "nanoflann/1.3.2",
- "nlopt/2.7.1"]
-
+import os # noqa: D100, CPY001
+
+from conans import CMake, ConanFile
+
+
+class simCenterBackendApps(ConanFile): # noqa: D101
+ name = 'SimCenterBackendApplications'
+ version = '1.2.2'
+ description = 'Backend applications for SimCenter software'
+ license = 'BSD 3-Clause'
+ author = 'Michael Gardner mhgardner@berkeley.edu'
+ url = 'https://github.com/NHERI-SimCenter/SimCenterBackendApplications'
+ settings = { # noqa: RUF012
+ 'os': None,
+ 'build_type': None,
+ 'compiler': None,
+ 'arch': ['x86_64', 'armv8'],
+ }
+ options = {'shared': [True, False]} # noqa: RUF012
+ default_options = { # noqa: RUF012
+ 'mkl-static:threaded': False,
+ 'ipp-static:simcenter_backend': True,
+ 'libcurl:with_ssl': 'openssl',
+ }
+ generators = 'cmake'
+ build_policy = 'missing'
+ requires = [ # noqa: RUF012
+ 'jansson/2.13.1',
+ 'zlib/1.2.11',
+ 'libcurl/8.1.1',
+ 'eigen/3.3.7',
+ 'clara/1.1.5',
+ 'jsonformoderncpp/3.7.0',
+ 'nanoflann/1.3.2',
+ 'nlopt/2.7.1',
+ ]
+
# Custom attributes for Bincrafters recipe conventions
- _source_subfolder = "source_subfolder"
- _build_subfolder = "build_subfolder"
+ _source_subfolder = 'source_subfolder'
+ _build_subfolder = 'build_subfolder'
# Set short paths for Windows
- short_paths = True
- scm = {
- "type": "git", # Use "type": "svn", if local repo is managed using SVN
- "subfolder": _source_subfolder,
- "url": "auto",
- "revision": "auto"
+ short_paths = True
+ scm = { # noqa: RUF012
+ 'type': 'git', # Use "type": "svn", if local repo is managed using SVN
+ 'subfolder': _source_subfolder,
+ 'url': 'auto',
+ 'revision': 'auto',
}
-
- def configure(self):
+ def configure(self): # noqa: D102
self.options.shared = False
- def configure_cmake(self):
+ def configure_cmake(self): # noqa: D102
cmake = CMake(self)
cmake.configure(source_folder=self._source_subfolder)
return cmake
-
- def build(self):
+
+ def build(self): # noqa: D102
cmake = self.configure_cmake()
cmake.build()
- def package(self):
- self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder)
+ def package(self): # noqa: D102
+ self.copy(pattern='LICENSE', dst='licenses', src=self._source_subfolder)
cmake = self.configure_cmake()
cmake.install()
- self.copy("*", dst="bin", src=self._source_subfolder + "/applications")
+ self.copy('*', dst='bin', src=self._source_subfolder + '/applications')
- def package_info(self):
- self.env_info.PATH.append(os.path.join(self.package_folder, "bin"))
+ def package_info(self): # noqa: D102
+ self.env_info.PATH.append(os.path.join(self.package_folder, 'bin')) # noqa: PTH118
diff --git a/ignore_words.txt b/ignore_words.txt
new file mode 100644
index 000000000..3b80f1084
--- /dev/null
+++ b/ignore_words.txt
@@ -0,0 +1,23 @@
+theRes
+nd
+ned
+pres
+writeT
+sOrD
+responseD
+WORKD
+workd
+oT
+WriteT
+Suh
+WLL
+VIE
+Momento
+lamda
+dum
+numer
+reord
+fro
+Equil
+ot
+repID
diff --git a/meta/backends.cache.json b/meta/backends.cache.json
index 8c3671107..44c024219 100644
--- a/meta/backends.cache.json
+++ b/meta/backends.cache.json
@@ -111,7 +111,7 @@
"name": "buildingFile",
"type": "FileName(W)",
"default": "buildingsFile.json",
- "help": "name of JSON file containing an array of id,filename pairs. For each building id the aplication creates a BIM file with name filename",
+ "help": "name of JSON file containing an array of id,filename pairs. For each building id the application creates a BIM file with name filename",
"app-specific": true
},
{
@@ -649,7 +649,7 @@
{
"name": "pathSW4results",
"type": "path",
- "help": "Path to directory containig output point files",
+ "help": "Path to directory containing output point files",
"app-specific": true
}
],
diff --git a/modules/ErrorCodes.txt b/modules/ErrorCodes.txt
index 914028bf6..15791949c 100644
--- a/modules/ErrorCodes.txt
+++ b/modules/ErrorCodes.txt
@@ -1,6 +1,6 @@
100, Workflow Application Failed
101, Workflow Application failed to open input file
-102, Workflow Application Failed to find approprate application
+102, Workflow Application Failed to find appropriate application
103, Workflow Application failed to launch a workflow application
200, BIM application Failed
300, EVENT application failed
diff --git a/modules/Workflow/AggregateResults.py b/modules/Workflow/AggregateResults.py
index c6383d06c..a075984a4 100644
--- a/modules/Workflow/AggregateResults.py
+++ b/modules/Workflow/AggregateResults.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,36 +38,40 @@
# Wael Elhaddad
#
+import argparse
import glob
+from datetime import datetime
+
import numpy as np
import pandas as pd
-import argparse
-from datetime import datetime
-from time import strftime
-
-def log_msg(msg):
- print('{} {}'.format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S:%fZ')[:-4], msg))
+def log_msg(msg): # noqa: D103
+ print( # noqa: T201
+ '{} {}'.format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S:%fZ')[:-4], msg) # noqa: DTZ003
+ )
-def main(threads = 1):
- headers = dict(
- IM = [0, 1, 2, 3],
- BIM = [0, ],
- EDP = [0, 1, 2, 3],
- DM = [0, 1, 2],
- DV = [0, 1, 2, 3])
+def main(threads=1): # noqa: C901, D103
+ headers = dict( # noqa: C408
+ IM=[0, 1, 2, 3],
+ BIM=[
+ 0,
+ ],
+ EDP=[0, 1, 2, 3],
+ DM=[0, 1, 2],
+ DV=[0, 1, 2, 3],
+ )
use_dask = threads > 1
if use_dask:
+ log_msg(f'{threads} threads requested. Using DASK.')
- log_msg('{} threads requested. Using DASK.'.format(threads))
+ import math # noqa: PLC0415
- from dask.distributed import Client, LocalCluster
- from dask import delayed
- import math
+ from dask import delayed # noqa: PLC0415
+ from dask.distributed import Client, LocalCluster # noqa: PLC0415
@delayed
def read_csv_files(file_list, header):
@@ -77,22 +80,23 @@ def read_csv_files(file_list, header):
def read_csv_np(file, header):
res = np.loadtxt(file, delimiter=',', dtype=str)
- first_row = header[-1]+1
+ first_row = header[-1] + 1
data = res[first_row:].T[1:].T
data[data == ''] = np.nan
tuples = [tuple(h) for h in res[:first_row].T[1:]]
- MI = pd.MultiIndex.from_tuples(tuples, names=res[:first_row].T[0])
+ MI = pd.MultiIndex.from_tuples(tuples, names=res[:first_row].T[0]) # noqa: N806
- df = pd.DataFrame(data, columns=MI, index=res[first_row:].T[0], dtype=float)
+ df = pd.DataFrame( # noqa: PD901
+ data, columns=MI, index=res[first_row:].T[0], dtype=float
+ )
- return df
+ return df # noqa: RET504
@delayed
def read_csv_files_np(file_list, header):
return [read_csv_np(fn, header=header) for fn in file_list]
-
cluster = LocalCluster()
client = Client(cluster)
@@ -100,36 +104,34 @@ def read_csv_files_np(file_list, header):
log_msg(client)
for res_type in ['IM', 'BIM', 'EDP', 'DM', 'DV']:
+ log_msg(f'Loading {res_type} files...')
- log_msg('Loading {} files...'.format(res_type))
-
- files = glob.glob('./results/{}/*/{}_*.csv'.format(res_type, res_type))
- #files = files[:1000]
+ files = glob.glob(f'./results/{res_type}/*/{res_type}_*.csv') # noqa: PTH207
+ # files = files[:1000]
if len(files) > 0:
-
if use_dask:
-
file_count = len(files)
- chunk = math.ceil(file_count/threads)
+ chunk = math.ceil(file_count / threads)
df_list = []
- print('Creating threads for {} files...'.format(file_count))
+ print(f'Creating threads for {file_count} files...') # noqa: T201
for t_i in range(threads):
+ # print(t_i)
- #print(t_i)
-
- if t_i*chunk < file_count-1:
-
- df_list_i = delayed(read_csv_files)(files[t_i*chunk:(t_i+1)*chunk], headers[res_type])
+ if t_i * chunk < file_count - 1:
+ df_list_i = delayed(read_csv_files)(
+ files[t_i * chunk : (t_i + 1) * chunk], headers[res_type]
+ )
df_i = delayed(pd.concat)(df_list_i, axis=0, sort=False)
df_list.append(df_i)
- elif t_i*chunk == file_count-1:
-
- df_i = delayed(read_csv_files)(files[t_i*chunk:(t_i+1)*chunk], headers[res_type])
+ elif t_i * chunk == file_count - 1:
+ df_i = delayed(read_csv_files)(
+ files[t_i * chunk : (t_i + 1) * chunk], headers[res_type]
+ )
df_i = df_i[0]
df_list.append(df_i)
@@ -142,25 +144,33 @@ def read_csv_files_np(file_list, header):
else:
log_msg('Loading all files')
- df_list = [pd.read_csv(resFileName, header=headers[res_type], index_col=0) for resFileName in files]
+ df_list = [
+ pd.read_csv(resFileName, header=headers[res_type], index_col=0)
+ for resFileName in files
+ ]
log_msg('Concatenating all files')
df_all = pd.concat(df_list, axis=0, sort=False)
- df_all.sort_index(axis=0, inplace=True)
+ df_all.sort_index(axis=0, inplace=True) # noqa: PD002
# save the results
log_msg('Saving results')
df_all.index = df_all.index.astype(np.int32)
- df_all.to_hdf('{}.hdf'.format(res_type), 'data', mode='w', format='fixed', complevel=1, complib='blosc:blosclz')
- #df_all.to_csv('{}.csv'.format(res_type))
+ df_all.to_hdf(
+ f'{res_type}.hdf',
+ 'data',
+ mode='w',
+ format='fixed',
+ complevel=1,
+ complib='blosc:blosclz',
+ )
+ # df_all.to_csv('{}.csv'.format(res_type))
else:
-
- print('No {} files found'.format(res_type))
+ print(f'No {res_type} files found') # noqa: T201
if use_dask:
-
log_msg('Closing cluster...')
cluster.close()
client.close()
@@ -168,9 +178,11 @@ def read_csv_files_np(file_list, header):
# aggregate the realizations files
log_msg('Aggregating individual realizations...')
- files = glob.glob('./results/{}/*/{}_*.hdf'.format('realizations','realizations'))
+ files = glob.glob( # noqa: PTH207
+ './results/{}/*/{}_*.hdf'.format('realizations', 'realizations')
+ )
- log_msg('Number of files: {}'.format(len(files)))
+ log_msg(f'Number of files: {len(files)}')
# get the keys from the first file
if len(files) > 0:
@@ -179,7 +191,7 @@ def read_csv_files_np(file_list, header):
first_file.close()
for key in keys:
- log_msg('Processing realizations for key {key}'.format(key=key))
+ log_msg(f'Processing realizations for key {key}')
df_list = [pd.read_hdf(resFileName, key) for resFileName in files]
log_msg('\t\tConcatenating files')
@@ -187,31 +199,46 @@ def read_csv_files_np(file_list, header):
df_all.index = df_all.index.astype(np.int32)
- df_all.sort_index(axis=0, inplace=True)
+ df_all.sort_index(axis=0, inplace=True) # noqa: PD002
try:
- df_all.astype(np.float32).to_hdf('realizations.hdf', key, mode='a', format='fixed', complevel=1, complib='blosc:blosclz')
- except:
- df_all.to_hdf('realizations.hdf', key, mode='a', format='fixed', complevel=1, complib='blosc:blosclz')
-
- log_msg('\t\tResults saved for {key}.'.format(key=key))
+ df_all.astype(np.float32).to_hdf(
+ 'realizations.hdf',
+ key,
+ mode='a',
+ format='fixed',
+ complevel=1,
+ complib='blosc:blosclz',
+ )
+ except: # noqa: E722
+ df_all.to_hdf(
+ 'realizations.hdf',
+ key,
+ mode='a',
+ format='fixed',
+ complevel=1,
+ complib='blosc:blosclz',
+ )
+
+ log_msg(f'\t\tResults saved for {key}.')
log_msg('End of script')
-if __name__ == "__main__":
+if __name__ == '__main__':
+ # Defining the command line arguments
- #Defining the command line arguments
+ workflowArgParser = argparse.ArgumentParser('Aggregate the results from rWHALE.') # noqa: N816
- workflowArgParser = argparse.ArgumentParser(
- "Aggregate the results from rWHALE.")
+ workflowArgParser.add_argument(
+ '-threads',
+ '-t',
+ type=int,
+ default=48,
+ help='Number of threads to use to aggregate the files.',
+ )
- workflowArgParser.add_argument("-threads", "-t",
- type=int, default=48,
- help="Number of threads to use to aggregate the files.")
-
- #Parsing the command line arguments
+ # Parsing the command line arguments
line_args = workflowArgParser.parse_args()
-
- main(line_args.threads)
\ No newline at end of file
+ main(line_args.threads)
diff --git a/modules/Workflow/CreateWorkflowJobs.py b/modules/Workflow/CreateWorkflowJobs.py
index 3c56eb980..72ad3324f 100644
--- a/modules/Workflow/CreateWorkflowJobs.py
+++ b/modules/Workflow/CreateWorkflowJobs.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,114 +38,136 @@
# Wael Elhaddad
#
-import os
-import math
-import json
import argparse
+import json
+import math
+import os
+
import numpy as np
-def generate_workflow_tasks(bldg_filter, config_file, out_dir, task_size,
- rWHALE_dir):
- jobId = os.getenv('SLURM_JOB_ID') # We might need this later
+def generate_workflow_tasks( # noqa: C901, D103
+ bldg_filter,
+ config_file,
+ out_dir,
+ task_size,
+ rWHALE_dir, # noqa: N803
+):
+ jobId = os.getenv('SLURM_JOB_ID') # We might need this later # noqa: N806, F841
# get the type of outputs requested
- with open(f'{rWHALE_dir}/{config_file}', 'r') as f:
+ with open(f'{rWHALE_dir}/{config_file}') as f: # noqa: PLW1514, PTH123
settings = json.load(f)
- output_types = [out_type for out_type, val in settings['outputs'].items()
- if val == True]
+ output_types = [
+ out_type
+ for out_type, val in settings['outputs'].items()
+ if val == True # noqa: E712
+ ]
# KZ@220324: check if regional site response is requested
- run_regional_event = settings['Applications'].get('RegionalEvent',None)
- if run_regional_event and run_regional_event.get('Application',None) == 'RegionalSiteResponse':
- generate_workflow_tasks_regionalsiteresponse("", config_file, out_dir, task_size, rWHALE_dir)
+ run_regional_event = settings['Applications'].get('RegionalEvent', None)
+ if (
+ run_regional_event
+ and run_regional_event.get('Application', None) == 'RegionalSiteResponse'
+ ):
+ generate_workflow_tasks_regionalsiteresponse(
+ '', config_file, out_dir, task_size, rWHALE_dir
+ )
# get the list of buildings requested to run
- if bldg_filter == "":
+ if bldg_filter == '': # noqa: PLC1901
# we pull the bldg_filter from the config file
- bldg_filter = settings['Applications']['Building']['ApplicationData'].get('filter', "")
+ bldg_filter = settings['Applications']['Building']['ApplicationData'].get(
+ 'filter', ''
+ )
- if bldg_filter == "":
- raise ValueError(
- "Running a regional simulation on DesignSafe requires either "
+ if bldg_filter == '': # noqa: PLC1901
+ raise ValueError( # noqa: TRY003
+ 'Running a regional simulation on DesignSafe requires either ' # noqa: EM101
"the 'buildingFilter' parameter to be set for the workflow "
"application or the 'filter' parameter set for the Building "
- "application in the workflow configuration file. Neither was "
- "provided in the current job. If you want to run every building "
+ 'application in the workflow configuration file. Neither was '
+ 'provided in the current job. If you want to run every building '
"in the input file, provide the filter like '#min-#max' where "
- "#min is the id of the first building and #max is the id of the "
- "last building in the inventory."
+ '#min is the id of the first building and #max is the id of the '
+ 'last building in the inventory.'
)
# note: we assume that there are no gaps in the indexes
bldgs_requested = []
for bldgs in bldg_filter.split(','):
- if "-" in bldgs:
- bldg_low, bldg_high = bldgs.split("-")
- bldgs_requested += list(
- range(int(bldg_low), int(bldg_high) + 1))
+ if '-' in bldgs:
+ bldg_low, bldg_high = bldgs.split('-')
+ bldgs_requested += list(range(int(bldg_low), int(bldg_high) + 1))
else:
bldgs_requested.append(int(bldgs))
count = len(bldgs_requested)
- tasksCount = int(math.ceil(count/task_size))
+ tasksCount = int(math.ceil(count / task_size)) # noqa: N806
- workflowScript = f"/tmp/{rWHALE_dir}/applications/Workflow/rWHALE.py"
+ workflowScript = f'/tmp/{rWHALE_dir}/applications/Workflow/rWHALE.py' # noqa: S108, N806
subfolder = 0
- for i in range(0, tasksCount):
-
- bldg_list = np.array(bldgs_requested[i*task_size : (i+1)*task_size])
+ for i in range(tasksCount):
+ bldg_list = np.array(bldgs_requested[i * task_size : (i + 1) * task_size])
# do not try to run sims if there are no bldgs to run
if len(bldg_list) > 0:
-
- min_ID = bldg_list[0]
- max_ID = bldg_list[-1]
+ min_ID = bldg_list[0] # noqa: N806
+ max_ID = bldg_list[-1] # noqa: N806
max_ids = np.where(np.diff(bldg_list) != 1)[0]
- max_ids = np.append(max_ids, [len(bldg_list) - 1, ]).astype(int)
+ max_ids = np.append(
+ max_ids,
+ [
+ len(bldg_list) - 1,
+ ],
+ ).astype(int)
min_ids = np.zeros(max_ids.shape, dtype=int)
min_ids[1:] = max_ids[:-1] + 1
- filter = ""
+ filter = '' # noqa: A001
for i_min, i_max in zip(min_ids, max_ids):
if i_min == i_max:
- filter += f",{bldg_list[i_min]}"
+ filter += f',{bldg_list[i_min]}' # noqa: A001
else:
- filter += f",{bldg_list[i_min]}-{bldg_list[i_max]}"
- filter = filter[1:] # to remove the initial comma
+ filter += f',{bldg_list[i_min]}-{bldg_list[i_max]}' # noqa: A001
+ filter = filter[1:] # to remove the initial comma # noqa: A001
- if (i%500) == 0:
- subfolder = subfolder + 1
+ if (i % 500) == 0:
+ subfolder = subfolder + 1 # noqa: PLR6104
- run_dir = (f"/tmp/{rWHALE_dir}"
- f"/applications/Workflow/RunDir{min_ID}-{max_ID}")
+ run_dir = (
+ f'/tmp/{rWHALE_dir}' # noqa: S108
+ f'/applications/Workflow/RunDir{min_ID}-{max_ID}'
+ )
- log_path = (f"{out_dir}/logs/{subfolder}"
- f"/log{min_ID:07d}-{max_ID:07d}.txt")
+ log_path = (
+ f'{out_dir}/logs/{subfolder}' f'/log{min_ID:07d}-{max_ID:07d}.txt'
+ )
- task_list = ""
+ task_list = ''
# create the subfolder to store log files
task_list += f'mkdir -p {out_dir}/logs/{subfolder}/ && '
# run the simulation
- task_list += (f'python3 {workflowScript} '
- f'/tmp/{rWHALE_dir}/{config_file} '
- f'-d /tmp/{rWHALE_dir}/input_data '
- f'-w {run_dir} -l {log_path} '
- f'--filter {filter} '
- f'-s parallel && ')
+ task_list += (
+ f'python3 {workflowScript} '
+ f'/tmp/{rWHALE_dir}/{config_file} ' # noqa: S108
+ f'-d /tmp/{rWHALE_dir}/input_data '
+ f'-w {run_dir} -l {log_path} '
+ f'--filter {filter} '
+ f'-s parallel && '
+ )
# copy the results from the task for aggregation
for out_type in output_types:
-
res_type = None
- if out_type in ['BIM', 'EDP', 'DM', 'DV']:
+ if out_type in ['BIM', 'EDP', 'DM', 'DV']: # noqa: PLR6201
res_type = out_type
file_name = f'{res_type}*.csv'
elif out_type == 'every_realization':
@@ -154,223 +175,266 @@ def generate_workflow_tasks(bldg_filter, config_file, out_dir, task_size,
file_name = f'{res_type}*.hdf'
if res_type is not None:
- task_list += (f'mkdir -p {out_dir}/results/{res_type}'
- f'/{subfolder}/ && ')
+ task_list += (
+ f'mkdir -p {out_dir}/results/{res_type}' f'/{subfolder}/ && '
+ )
- task_list += (f'cp -f {run_dir}/{file_name} {out_dir}/results'
- f'/{res_type}/{subfolder}/ && ')
+ task_list += (
+ f'cp -f {run_dir}/{file_name} {out_dir}/results'
+ f'/{res_type}/{subfolder}/ && '
+ )
# remove the results after the simulation is completed
- task_list += f"rm -rf {run_dir} \n"
+ task_list += f'rm -rf {run_dir} \n'
# write the tasks to the output file
- with open('WorkflowJobs.txt', 'a+') as tasksFile:
+ with open('WorkflowJobs.txt', 'a+') as tasksFile: # noqa: N806, PLW1514, PTH123
tasksFile.write(task_list)
-def generate_workflow_tasks_siteresponse(bldg_filter, config_file, out_dir, task_size,
- rWHALE_dir):
- jobId = os.getenv('SLURM_JOB_ID') # We might need this later
+def generate_workflow_tasks_siteresponse( # noqa: D103
+ bldg_filter,
+ config_file,
+ out_dir,
+ task_size,
+ rWHALE_dir, # noqa: N803
+):
+ jobId = os.getenv('SLURM_JOB_ID') # We might need this later # noqa: N806, F841
# get the type of outputs requested
- with open(f'{rWHALE_dir}/{config_file}', 'r') as f:
+ with open(f'{rWHALE_dir}/{config_file}') as f: # noqa: PLW1514, PTH123
settings = json.load(f)
- output_types = [out_type for out_type, val in settings['outputs'].items()
- if val == True]
+ output_types = [ # noqa: F841
+ out_type
+ for out_type, val in settings['outputs'].items()
+ if val == True # noqa: E712
+ ]
# get the list of buildings requested to run
- if bldg_filter == "":
+ if bldg_filter == '': # noqa: PLC1901
# we pull the bldg_filter from the config file
- bldg_filter = settings['Applications']['Building']['ApplicationData'].get('filter', "")
+ bldg_filter = settings['Applications']['Building']['ApplicationData'].get(
+ 'filter', ''
+ )
- if bldg_filter == "":
- raise ValueError(
- "Running a regional simulation on DesignSafe requires either "
+ if bldg_filter == '': # noqa: PLC1901
+ raise ValueError( # noqa: TRY003
+ 'Running a regional simulation on DesignSafe requires either ' # noqa: EM101
"the 'buildingFilter' parameter to be set for the workflow "
"application or the 'filter' parameter set for the Building "
- "application in the workflow configuration file. Neither was "
- "provided in the current job. If you want to run every building "
+ 'application in the workflow configuration file. Neither was '
+ 'provided in the current job. If you want to run every building '
"in the input file, provide the filter like '#min-#max' where "
- "#min is the id of the first building and #max is the id of the "
- "last building in the inventory."
+ '#min is the id of the first building and #max is the id of the '
+ 'last building in the inventory.'
)
# note: we assume that there are no gaps in the indexes
bldgs_requested = []
for bldgs in bldg_filter.split(','):
- if "-" in bldgs:
- bldg_low, bldg_high = bldgs.split("-")
- bldgs_requested += list(
- range(int(bldg_low), int(bldg_high) + 1))
+ if '-' in bldgs:
+ bldg_low, bldg_high = bldgs.split('-')
+ bldgs_requested += list(range(int(bldg_low), int(bldg_high) + 1))
else:
bldgs_requested.append(int(bldgs))
count = len(bldgs_requested)
- tasksCount = int(math.ceil(count/task_size))
+ tasksCount = int(math.ceil(count / task_size)) # noqa: N806
- print(f"tasksCount = {tasksCount}")
+ print(f'tasksCount = {tasksCount}') # noqa: T201
- workflowScript = f"/tmp/{rWHALE_dir}/applications/Workflow/SiteResponse_workflow.py"
+ workflowScript = ( # noqa: N806
+ f'/tmp/{rWHALE_dir}/applications/Workflow/SiteResponse_workflow.py' # noqa: S108
+ )
subfolder = 0
- for i in range(0, tasksCount):
-
- bldg_list = np.array(bldgs_requested[i*task_size : (i+1)*task_size])
+ for i in range(tasksCount):
+ bldg_list = np.array(bldgs_requested[i * task_size : (i + 1) * task_size])
# do not try to run sims if there are no bldgs to run
if len(bldg_list) > 0:
-
- min_ID = bldg_list[0]
- max_ID = bldg_list[-1]
+ min_ID = bldg_list[0] # noqa: N806
+ max_ID = bldg_list[-1] # noqa: N806
max_ids = np.where(np.diff(bldg_list) != 1)[0]
- max_ids = np.append(max_ids, [len(bldg_list) - 1, ]).astype(int)
+ max_ids = np.append(
+ max_ids,
+ [
+ len(bldg_list) - 1,
+ ],
+ ).astype(int)
min_ids = np.zeros(max_ids.shape, dtype=int)
min_ids[1:] = max_ids[:-1] + 1
- filter = ""
+ filter = '' # noqa: A001
for i_min, i_max in zip(min_ids, max_ids):
if i_min == i_max:
- filter += f",{bldg_list[i_min]}"
+ filter += f',{bldg_list[i_min]}' # noqa: A001
else:
- filter += f",{bldg_list[i_min]}-{bldg_list[i_max]}"
- filter = filter[1:] # to remove the initial comma
+ filter += f',{bldg_list[i_min]}-{bldg_list[i_max]}' # noqa: A001
+ filter = filter[1:] # to remove the initial comma # noqa: A001
- if (i%500) == 0:
- subfolder = subfolder + 1
+ if (i % 500) == 0:
+ subfolder = subfolder + 1 # noqa: PLR6104
- run_dir = (f"/tmp/{rWHALE_dir}"
- f"/applications/Workflow/RunDir{min_ID}-{max_ID}")
+ run_dir = (
+ f'/tmp/{rWHALE_dir}' # noqa: S108
+ f'/applications/Workflow/RunDir{min_ID}-{max_ID}'
+ )
- log_path = (f"{out_dir}/logs/{subfolder}"
- f"/log{min_ID:07d}-{max_ID:07d}.txt")
+ log_path = (
+ f'{out_dir}/logs/{subfolder}' f'/log{min_ID:07d}-{max_ID:07d}.txt'
+ )
- task_list = ""
+ task_list = ''
# create the subfolder to store log files
task_list += f'mkdir -p {out_dir}/logs/{subfolder}/ && '
# run the simulation
- task_list += (f'python3 {workflowScript} '
- f'/tmp/{rWHALE_dir}/{config_file} '
- f'-d /tmp/{rWHALE_dir}/input_data '
- f'-w {run_dir} -l {log_path} '
- f'--filter {filter} && ')
+ task_list += (
+ f'python3 {workflowScript} '
+ f'/tmp/{rWHALE_dir}/{config_file} ' # noqa: S108
+ f'-d /tmp/{rWHALE_dir}/input_data '
+ f'-w {run_dir} -l {log_path} '
+ f'--filter {filter} && '
+ )
# copy the results from the task for aggregation
- file_name = f"surface_motions/*"
- task_list += (f'mkdir -p {out_dir}/results/surface_motions'
- f'/{subfolder}/ && ')
- task_list += (f'cp -Rf {run_dir}/{file_name} {out_dir}/results'
- f'/surface_motions/{subfolder}/ && ')
+ file_name = 'surface_motions/*'
+ task_list += (
+ f'mkdir -p {out_dir}/results/surface_motions' f'/{subfolder}/ && '
+ )
+ task_list += (
+ f'cp -Rf {run_dir}/{file_name} {out_dir}/results'
+ f'/surface_motions/{subfolder}/ && '
+ )
- task_list += f"echo 'cmd generated. Currend dir: '$PWD \n"
+ task_list += "echo 'cmd generated. Currend dir: '$PWD \n"
# write the tasks to the output file
- with open('WorkflowJobs_siteResponse.txt', 'a+') as tasksFile:
+ with open('WorkflowJobs_siteResponse.txt', 'a+') as tasksFile: # noqa: N806, PLW1514, PTH123
tasksFile.write(task_list)
-def generate_workflow_tasks_regionalsiteresponse(site_filter, config_file, out_dir, task_size, rWHALE_dir):
- jobId = os.getenv('SLURM_JOB_ID') # We might need this later
+def generate_workflow_tasks_regionalsiteresponse( # noqa: C901, D103
+ site_filter,
+ config_file,
+ out_dir,
+ task_size,
+ rWHALE_dir, # noqa: N803
+):
+ jobId = os.getenv('SLURM_JOB_ID') # We might need this later # noqa: N806, F841
- # KZ@220324: currently only EDP is valid output as it's jsut soil column response in this step
+ # KZ@220324: currently only EDP is valid output as it's just soil column response in this step
output_valid = ['IM']
# get the type of outputs requested
- with open(f'{rWHALE_dir}/{config_file}', 'r') as f:
+ with open(f'{rWHALE_dir}/{config_file}') as f: # noqa: PLW1514, PTH123
settings = json.load(f)
- output_types = [out_type for out_type, val in settings['outputs'].items()
- if (val == True and out_type in output_valid)]
+ output_types = [
+ out_type
+ for out_type, val in settings['outputs'].items()
+ if (val == True and out_type in output_valid) # noqa: E712
+ ]
# get the list of sites requested to run
- if site_filter == "":
+ if site_filter == '': # noqa: PLC1901
# we pull the site_filter from the config file
- site_filter = settings['Applications']['RegionalEvent']['ApplicationData'].get('filter', "")
+ site_filter = settings['Applications']['RegionalEvent'][
+ 'ApplicationData'
+ ].get('filter', '')
- if site_filter == "":
- raise ValueError(
- "Running a regional simulation on DesignSafe requires either "
+ if site_filter == '': # noqa: PLC1901
+ raise ValueError( # noqa: TRY003
+ 'Running a regional simulation on DesignSafe requires either ' # noqa: EM101
"the 'buildingFilter' parameter to be set for the workflow "
"application or the 'filter' parameter set for the Building "
- "application in the workflow configuration file. Neither was "
- "provided in the current job. If you want to run every building "
+ 'application in the workflow configuration file. Neither was '
+ 'provided in the current job. If you want to run every building '
"in the input file, provide the filter like '#min-#max' where "
- "#min is the id of the first building and #max is the id of the "
- "last building in the inventory."
+ '#min is the id of the first building and #max is the id of the '
+ 'last building in the inventory.'
)
# note: we assume that there are no gaps in the indexes
sites_requested = []
for sites in site_filter.split(','):
- if "-" in sites:
- site_low, site_high = sites.split("-")
- sites_requested += list(
- range(int(site_low), int(site_high) + 1))
+ if '-' in sites:
+ site_low, site_high = sites.split('-')
+ sites_requested += list(range(int(site_low), int(site_high) + 1))
else:
sites_requested.append(int(sites))
count = len(sites_requested)
- tasksCount = int(math.ceil(count/task_size))
+ tasksCount = int(math.ceil(count / task_size)) # noqa: N806
- workflowScript = f"/tmp/{rWHALE_dir}/applications/Workflow/siteResponseWHALE.py"
+ workflowScript = f'/tmp/{rWHALE_dir}/applications/Workflow/siteResponseWHALE.py' # noqa: S108, N806
subfolder = 0
- for i in range(0, tasksCount):
-
- site_list = np.array(sites_requested[i*task_size : (i+1)*task_size])
+ for i in range(tasksCount):
+ site_list = np.array(sites_requested[i * task_size : (i + 1) * task_size])
# do not try to run sims if there are no bldgs to run
if len(site_list) > 0:
-
- min_ID = site_list[0]
- max_ID = site_list[-1]
+ min_ID = site_list[0] # noqa: N806
+ max_ID = site_list[-1] # noqa: N806
max_ids = np.where(np.diff(site_list) != 1)[0]
- max_ids = np.append(max_ids, [len(site_list) - 1, ]).astype(int)
+ max_ids = np.append(
+ max_ids,
+ [
+ len(site_list) - 1,
+ ],
+ ).astype(int)
min_ids = np.zeros(max_ids.shape, dtype=int)
min_ids[1:] = max_ids[:-1] + 1
- filter = ""
+ filter = '' # noqa: A001
for i_min, i_max in zip(min_ids, max_ids):
if i_min == i_max:
- filter += f",{site_list[i_min]}"
+ filter += f',{site_list[i_min]}' # noqa: A001
else:
- filter += f",{site_list[i_min]}-{site_list[i_max]}"
- filter = filter[1:] # to remove the initial comma
+ filter += f',{site_list[i_min]}-{site_list[i_max]}' # noqa: A001
+ filter = filter[1:] # to remove the initial comma # noqa: A001
- if (i%500) == 0:
- subfolder = subfolder + 1
+ if (i % 500) == 0:
+ subfolder = subfolder + 1 # noqa: PLR6104
- run_dir = (f"/tmp/{rWHALE_dir}"
- f"/applications/Workflow/RunDirSite{min_ID}-{max_ID}")
+ run_dir = (
+ f'/tmp/{rWHALE_dir}' # noqa: S108
+ f'/applications/Workflow/RunDirSite{min_ID}-{max_ID}'
+ )
- log_path = (f"{out_dir}/logs/{subfolder}"
- f"/logSite{min_ID:07d}-{max_ID:07d}.txt")
+ log_path = (
+ f'{out_dir}/logs/{subfolder}'
+ f'/logSite{min_ID:07d}-{max_ID:07d}.txt'
+ )
- task_list = ""
+ task_list = ''
# create the subfolder to store log files
task_list += f'mkdir -p {out_dir}/logs/{subfolder}/ && '
# run the simulation
- task_list += (f'python3 {workflowScript} '
- f'-i /tmp/{rWHALE_dir}/{config_file} '
- f'-d /tmp/{rWHALE_dir}/input_data '
- f'-w {run_dir} -l {log_path} '
- f'-o /tmp/{rWHALE_dir}/input_data/siteResponseOutputMotions '
- f'--filter {filter} && ')
+ task_list += (
+ f'python3 {workflowScript} '
+ f'-i /tmp/{rWHALE_dir}/{config_file} '
+ f'-d /tmp/{rWHALE_dir}/input_data '
+ f'-w {run_dir} -l {log_path} '
+ f'-o /tmp/{rWHALE_dir}/input_data/siteResponseOutputMotions '
+ f'--filter {filter} && '
+ )
# copy the results from the task for aggregation
for out_type in output_types:
-
res_type = None
- if out_type in ['IM', 'BIM', 'EDP', 'DM', 'DV']:
+ if out_type in ['IM', 'BIM', 'EDP', 'DM', 'DV']: # noqa: PLR6201
res_type = out_type
file_name = f'{res_type}*.csv'
elif out_type == 'every_realization':
@@ -378,55 +442,92 @@ def generate_workflow_tasks_regionalsiteresponse(site_filter, config_file, out_d
file_name = f'{res_type}*.hdf'
if res_type is not None:
- task_list += (f'mkdir -p {out_dir}/results/{res_type}'
- f'/{subfolder}/ && ')
+ task_list += (
+ f'mkdir -p {out_dir}/results/{res_type}' f'/{subfolder}/ && '
+ )
- task_list += (f'cp -f {run_dir}/{file_name} {out_dir}/results'
- f'/{res_type}/{subfolder}/ && ')
+ task_list += (
+ f'cp -f {run_dir}/{file_name} {out_dir}/results'
+ f'/{res_type}/{subfolder}/ && '
+ )
# remove the results after the simulation is completed
- task_list += f"rm -rf {run_dir} \n"
+ task_list += f'rm -rf {run_dir} \n'
# write the tasks to the output file
- with open('WorkflowJobs_SiteResponse.txt', 'a+') as tasksFile:
+ with open('WorkflowJobs_SiteResponse.txt', 'a+') as tasksFile: # noqa: N806, PLW1514, PTH123
tasksFile.write(task_list)
-if __name__ == "__main__":
-
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Create the workflow tasks for rWHALE.")
-
- workflowArgParser.add_argument("-buildingFilter", "-F", type=str,
- default="", nargs='?', const="",
- help="Filter a subset of the buildings to run")
- workflowArgParser.add_argument("-configFile", "-c", type=str,
- help="The file used to configure the simulation.")
- workflowArgParser.add_argument("-outputDir", "-o", type=str,
- help="The directory to save the final results to.")
- workflowArgParser.add_argument("-buildingsPerTask", "-b", type=int,
- help="Number of buildings to run in each task.")
- workflowArgParser.add_argument("-rWHALE_dir", "-W", type=str,
+if __name__ == '__main__':
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Create the workflow tasks for rWHALE.'
+ )
+
+ workflowArgParser.add_argument(
+ '-buildingFilter',
+ '-F',
+ type=str,
+ default='',
+ nargs='?',
+ const='',
+ help='Filter a subset of the buildings to run',
+ )
+ workflowArgParser.add_argument(
+ '-configFile',
+ '-c',
+ type=str,
+ help='The file used to configure the simulation.',
+ )
+ workflowArgParser.add_argument(
+ '-outputDir',
+ '-o',
+ type=str,
+ help='The directory to save the final results to.',
+ )
+ workflowArgParser.add_argument(
+ '-buildingsPerTask',
+ '-b',
+ type=int,
+ help='Number of buildings to run in each task.',
+ )
+ workflowArgParser.add_argument(
+ '-rWHALE_dir',
+ '-W',
+ type=str,
default='rWHALE',
- help="The path to the rWHALE files on the compute nodes")
- workflowArgParser.add_argument("-workflowName", "-N", type=str,
+ help='The path to the rWHALE files on the compute nodes',
+ )
+ workflowArgParser.add_argument(
+ '-workflowName',
+ '-N',
+ type=str,
default='building',
- help="building or siteResponse")
+ help='building or siteResponse',
+ )
- #Parsing the command line arguments
+ # Parsing the command line arguments
line_args = workflowArgParser.parse_args()
- if line_args.workflowName=='building':
- generate_workflow_tasks(line_args.buildingFilter, line_args.configFile,
- line_args.outputDir, line_args.buildingsPerTask,
- line_args.rWHALE_dir)
-
- elif line_args.workflowName=='siteResponse':
- generate_workflow_tasks_siteresponse(line_args.buildingFilter, line_args.configFile,
- line_args.outputDir, line_args.buildingsPerTask,
- line_args.rWHALE_dir)
+ if line_args.workflowName == 'building':
+ generate_workflow_tasks(
+ line_args.buildingFilter,
+ line_args.configFile,
+ line_args.outputDir,
+ line_args.buildingsPerTask,
+ line_args.rWHALE_dir,
+ )
+
+ elif line_args.workflowName == 'siteResponse':
+ generate_workflow_tasks_siteresponse(
+ line_args.buildingFilter,
+ line_args.configFile,
+ line_args.outputDir,
+ line_args.buildingsPerTask,
+ line_args.rWHALE_dir,
+ )
else:
# currently supporting building and siteresponse
- print('-workflowName has to be building or siteResponse')
\ No newline at end of file
+ print('-workflowName has to be building or siteResponse') # noqa: T201
diff --git a/modules/Workflow/EE-UQ workflow.py b/modules/Workflow/EE-UQ workflow.py
index 2681871bf..c14af0beb 100644
--- a/modules/Workflow/EE-UQ workflow.py
+++ b/modules/Workflow/EE-UQ workflow.py
@@ -1,37 +1,36 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
# This file is part of pelicun.
-#
-# Redistribution and use in source and binary forms, with or without
+#
+# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
-# 1. Redistributions of source code must retain the above copyright notice,
+# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
-# 2. Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
-# 3. Neither the name of the copyright holder nor the names of its contributors
-# may be used to endorse or promote products derived from this software without
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
-#
-# You should have received a copy of the BSD 3-Clause License along with
+#
+# You should have received a copy of the BSD 3-Clause License along with
# pelicun. If not, see .
#
# Contributors:
@@ -42,58 +41,65 @@
# Chaofeng Wang
# import functions for Python 2.X support
-from __future__ import division, print_function
-import sys, os, json
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+import json
+import os
+import sys
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
+from whale.main import log_div, log_msg
-def main(run_type, input_file, app_registry):
+def main(run_type, input_file, app_registry): # noqa: D103
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
whale.log_file = runDir + '/log.txt'
- with open(whale.log_file, 'w') as f:
- f.write('EE-UQ workflow\n')
+ with open(whale.log_file, 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('EE-UQ workflow\n')
# echo the inputs
log_msg(log_div)
log_msg('Started running the workflow script')
- log_msg(log_div)
+ log_msg(log_div)
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Event', 'Modeling', 'EDP', 'Simulation', 'UQ'])
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=['Event', 'Modeling', 'EDP', 'Simulation', 'UQ'],
+ )
# initialize the working directory
WF.init_simdir()
# prepare the input files for the simulation
- WF.create_RV_files(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF.create_RV_files(app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'])
# create the workflow driver file
- WF.create_driver_file(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF.create_driver_file(app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'])
# run uq engine to simulate response
WF.simulate_response()
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
- main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
\ No newline at end of file
+ main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
diff --git a/modules/Workflow/EE-UQ.py b/modules/Workflow/EE-UQ.py
index fe9a3e279..a513e584f 100755
--- a/modules/Workflow/EE-UQ.py
+++ b/modules/Workflow/EE-UQ.py
@@ -1,102 +1,98 @@
-# written: fmk, adamzs
+# written: fmk, adamzs # noqa: CPY001, D100, EXE002, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import json
import os
-import subprocess
-from time import gmtime, strftime
import posixpath
+from time import gmtime, strftime
divider = '#' * 80
log_output = []
-from WorkflowUtils import *
+from WorkflowUtils import * # noqa: E402, F403
+
-def main(run_type, inputFile, applicationsRegistry):
+def main(run_type, inputFile, applicationsRegistry): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# the whole workflow is wrapped within a 'try' block.
# a number of exceptions (files missing, explicit application failures, etc.) are
# handled explicitly to aid the user.
# But unhandled exceptions case the workflow to stop with an error, handled in the
# exception block way at the bottom of this main() function
- try:
-
- workflow_log(divider)
- workflow_log('Start of run')
- workflow_log(divider)
- workflow_log('workflow input file: %s' % inputFile)
- workflow_log('application registry file: %s' % applicationsRegistry)
- workflow_log('runtype: %s' % run_type)
- workflow_log(divider)
-
+ try: # noqa: PLR1702
+ workflow_log(divider) # noqa: F405
+ workflow_log('Start of run') # noqa: F405
+ workflow_log(divider) # noqa: F405
+ workflow_log('workflow input file: %s' % inputFile) # noqa: F405, UP031
+ workflow_log('application registry file: %s' % applicationsRegistry) # noqa: F405, UP031
+ workflow_log('runtype: %s' % run_type) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
#
# first we parse the applications registry to load all possible applications
# - for each application type we place in a dictionary key being name, value containing path to executable
#
- with open(applicationsRegistry, 'r') as data_file:
- registryData = json.load(data_file)
+ with open(applicationsRegistry) as data_file: # noqa: PLW1514, PTH123
+ registryData = json.load(data_file) # noqa: N806
# convert all relative paths to full paths
- A = 'Applications'
- Applications = dict()
- appList = 'Event Modeling EDP Simulation UQ'.split(' ')
- appList = [a + A for a in appList]
+ A = 'Applications' # noqa: N806
+ Applications = dict() # noqa: C408, N806
+ appList = 'Event Modeling EDP Simulation UQ'.split(' ') # noqa: N806
+ appList = [a + A for a in appList] # noqa: N806
for app_type in appList:
-
if app_type in registryData:
- xApplicationData = registryData[app_type]
- applicationsData = xApplicationData['Applications']
+ xApplicationData = registryData[app_type] # noqa: N806
+ applicationsData = xApplicationData['Applications'] # noqa: N806
for app in applicationsData:
- appName = app['Name']
- appExe = app['ExecutablePath']
- if not app_type in Applications:
- Applications[app_type] = dict()
+ appName = app['Name'] # noqa: N806
+ appExe = app['ExecutablePath'] # noqa: N806
+ if app_type not in Applications:
+ Applications[app_type] = dict() # noqa: C408
Applications[app_type][appName] = appExe
#
# open input file, and parse json into data
#
- with open(inputFile, 'r') as data_file:
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
# convert all relative paths to full paths
# relative2fullpath(data)
if 'runDir' in data:
- runDIR = data['runDir']
+ runDIR = data['runDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a runDir Entry')
+ raise WorkFlowInputError('Need a runDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'remoteAppDir' in data:
- remoteAppDir = data['remoteAppDir']
+ remoteAppDir = data['remoteAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a remoteAppDir Entry')
+ raise WorkFlowInputError('Need a remoteAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'localAppDir' in data:
- localAppDir = data['localAppDir']
+ localAppDir = data['localAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a localAppDir Entry')
+ raise WorkFlowInputError('Need a localAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
#
# before running chdir to templatedir
#
- workflow_log('run Directory: %s' % runDIR)
+ workflow_log('run Directory: %s' % runDIR) # noqa: F405, UP031
os.chdir(runDIR)
os.chdir('templatedir')
-
#
# now we parse for the applications & app specific data in workflow
#
@@ -104,7 +100,7 @@ def main(run_type, inputFile, applicationsRegistry):
if 'Applications' in data:
available_apps = data['Applications']
else:
- raise WorkFlowInputError('Need an Applications Entry')
+ raise WorkFlowInputError('Need an Applications Entry') # noqa: EM101, F405, TRY003, TRY301
#
# get events, for each the application and its data .. FOR NOW 1 EVENT
@@ -114,140 +110,176 @@ def main(run_type, inputFile, applicationsRegistry):
events = available_apps['Events']
for event in events:
-
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
+ eventClassification = event['EventClassification'] # noqa: N806
if eventClassification == 'Earthquake':
if 'Application' in event:
- eventApplication = event['Application']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
-
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
- workflow_log(remoteAppDir)
- workflow_log(eventAppExe)
- eventAppExeLocal = posixpath.join(localAppDir,eventAppExe)
- eventAppExeRemote = posixpath.join(remoteAppDir,eventAppExe)
- workflow_log(eventAppExeRemote)
+ eventApplication = event['Application'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806, F841
+
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications['EventApplications'].get( # noqa: N806
+ eventApplication
+ )
+ workflow_log(remoteAppDir) # noqa: F405
+ workflow_log(eventAppExe) # noqa: F405
+ eventAppExeLocal = posixpath.join( # noqa: N806
+ localAppDir, eventAppExe
+ )
+ eventAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, eventAppExe
+ )
+ workflow_log(eventAppExeRemote) # noqa: F405
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Event classification must be Earthquake, not %s' % eventClassification)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event classification must be Earthquake, not %s' # noqa: UP031
+ % eventClassification
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
+ raise WorkFlowInputError('Need Event Classification') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an Events Entry in Applications')
+ raise WorkFlowInputError('Need an Events Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get modeling application and its data
#
if 'Modeling' in available_apps:
- modelingApp = available_apps['Modeling']
+ modelingApp = available_apps['Modeling'] # noqa: N806
if 'Application' in modelingApp:
- modelingApplication = modelingApp['Application']
+ modelingApplication = modelingApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- modelingAppData = modelingApp['ApplicationData']
- if modelingApplication in Applications['ModelingApplications'].keys():
- modelingAppExe = Applications['ModelingApplications'].get(modelingApplication)
- modelingAppExeLocal = posixpath.join(localAppDir,modelingAppExe)
- modelingAppExeRemote = posixpath.join(remoteAppDir,modelingAppExe)
+ modelingAppData = modelingApp['ApplicationData'] # noqa: N806
+ if (
+ modelingApplication # noqa: SIM118
+ in Applications['ModelingApplications'].keys()
+ ):
+ modelingAppExe = Applications['ModelingApplications'].get( # noqa: N806
+ modelingApplication
+ )
+ modelingAppExeLocal = posixpath.join(localAppDir, modelingAppExe) # noqa: N806
+ modelingAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, modelingAppExe
+ )
else:
- raise WorkFlowInputError('Modeling application %s not in registry' % modelingApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Modeling application %s not in registry' # noqa: UP031
+ % modelingApplication
+ )
else:
- raise WorkFlowInputError('Need a ModelingApplication in Modeling data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a ModelingApplication in Modeling data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Modeling Entry in Applications')
+ raise WorkFlowInputError('Need a Modeling Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get edp application and its data .. CURRENTLY MODELING APP MUST CREATE EDP
#
if 'EDP' in available_apps:
- edpApp = available_apps['EDP']
-
+ edpApp = available_apps['EDP'] # noqa: N806
+
if 'Application' in edpApp:
- edpApplication = edpApp['Application']
-
+ edpApplication = edpApp['Application'] # noqa: N806
+
# check modeling app in registry, if so get full executable path
- edpAppData = edpApp['ApplicationData']
- if edpApplication in Applications['EDPApplications'].keys():
- edpAppExe = Applications['EDPApplications'].get(edpApplication)
- edpAppExeLocal = posixpath.join(localAppDir,edpAppExe)
- edpAppExeRemote = posixpath.join(remoteAppDir,edpAppExe)
+ edpAppData = edpApp['ApplicationData'] # noqa: N806
+ if edpApplication in Applications['EDPApplications'].keys(): # noqa: SIM118
+ edpAppExe = Applications['EDPApplications'].get(edpApplication) # noqa: N806
+ edpAppExeLocal = posixpath.join(localAppDir, edpAppExe) # noqa: N806
+ edpAppExeRemote = posixpath.join(remoteAppDir, edpAppExe) # noqa: N806
else:
- raise WorkFlowInputError('EDP application {} not in registry'.format(edpApplication))
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'EDP application {edpApplication} not in registry' # noqa: EM102
+ )
+
else:
- raise WorkFlowInputError('Need an EDPApplication in EDP data')
-
+ raise WorkFlowInputError('Need an EDPApplication in EDP data') # noqa: EM101, F405, TRY003, TRY301
+
else:
- raise WorkFlowInputError('Need an EDP Entry in Applications')
+ raise WorkFlowInputError('Need an EDP Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
- # get simulation application and its data
+ # get simulation application and its data
#
if 'Simulation' in available_apps:
- simulationApp = available_apps['Simulation']
+ simulationApp = available_apps['Simulation'] # noqa: N806
if 'Application' in simulationApp:
- simulationApplication = simulationApp['Application']
+ simulationApplication = simulationApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- simAppData = simulationApp['ApplicationData']
- if simulationApplication in Applications['SimulationApplications'].keys():
- simAppExe = Applications['SimulationApplications'].get(simulationApplication)
- simAppExeLocal = posixpath.join(localAppDir,simAppExe)
- simAppExeRemote = posixpath.join(remoteAppDir,simAppExe)
+ simAppData = simulationApp['ApplicationData'] # noqa: N806
+ if (
+ simulationApplication # noqa: SIM118
+ in Applications['SimulationApplications'].keys()
+ ):
+ simAppExe = Applications['SimulationApplications'].get( # noqa: N806
+ simulationApplication
+ )
+ simAppExeLocal = posixpath.join(localAppDir, simAppExe) # noqa: N806
+ simAppExeRemote = posixpath.join(remoteAppDir, simAppExe) # noqa: N806
else:
- raise WorkFlowInputError('Simulation application {} not in registry'.format(simulationApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'Simulation application {simulationApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need an SimulationApplication in Simulation data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an SimulationApplication in Simulation data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'UQ' in available_apps:
- uqApp = available_apps['UQ']
+ uqApp = available_apps['UQ'] # noqa: N806
if 'Application' in uqApp:
- uqApplication = uqApp['Application']
+ uqApplication = uqApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- uqAppData = uqApp['ApplicationData']
- if uqApplication in Applications['UQApplications'].keys():
- uqAppExe = Applications['UQApplications'].get(uqApplication)
- uqAppExeLocal = posixpath.join(localAppDir,uqAppExe)
- uqAppExeRemote = posixpath.join(localAppDir,uqAppExe)
+ uqAppData = uqApp['ApplicationData'] # noqa: N806
+ if uqApplication in Applications['UQApplications'].keys(): # noqa: SIM118
+ uqAppExe = Applications['UQApplications'].get(uqApplication) # noqa: N806
+ uqAppExeLocal = posixpath.join(localAppDir, uqAppExe) # noqa: N806
+ uqAppExeRemote = posixpath.join(localAppDir, uqAppExe) # noqa: N806, F841
else:
- raise WorkFlowInputError('UQ application {} not in registry'.format(uqApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'UQ application {uqApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need a UQApplication in UQ data')
-
+ raise WorkFlowInputError('Need a UQApplication in UQ data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
-
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
- workflow_log('SUCCESS: Parsed workflow input')
- workflow_log(divider)
+ workflow_log('SUCCESS: Parsed workflow input') # noqa: F405
+ workflow_log(divider) # noqa: F405
#
# now invoke the applications
@@ -260,173 +292,223 @@ def main(run_type, inputFile, applicationsRegistry):
# - perform Simulation
# - getDL
- bimFILE = 'dakota.json'
- eventFILE = 'EVENT.json'
- samFILE = 'SAM.json'
- edpFILE = 'EDP.json'
- simFILE = 'SIM.json'
- driverFile = 'driver'
+ bimFILE = 'dakota.json' # noqa: N806
+ eventFILE = 'EVENT.json' # noqa: N806
+ samFILE = 'SAM.json' # noqa: N806
+ edpFILE = 'EDP.json' # noqa: N806
+ simFILE = 'SIM.json' # noqa: N806
+ driverFile = 'driver' # noqa: N806
# open driver file & write building app (minus the --getRV) to it
- driverFILE = open(driverFile, 'w')
+ driverFILE = open(driverFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
# get RV for event
- eventAppDataList = ['"{}"'.format(eventAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE]
- if (eventAppExe.endswith('.py')):
+ eventAppDataList = [ # noqa: N806
+ f'"{eventAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ ]
+ if eventAppExe.endswith('.py'):
eventAppDataList.insert(0, 'python')
- for key in eventAppData.keys():
- eventAppDataList.append(u"--" + key)
+ for key in eventAppData.keys(): # noqa: SIM118
+ eventAppDataList.append('--' + key)
value = eventAppData.get(key)
- eventAppDataList.append(u"" + str(value))
-
+ eventAppDataList.append('' + str(value))
+
for item in eventAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
eventAppDataList.append('--getRV')
- if (eventAppExe.endswith('.py')):
- eventAppDataList[1] = u""+eventAppExeLocal
+ if eventAppExe.endswith('.py'):
+ eventAppDataList[1] = '' + eventAppExeLocal
else:
- eventAppDataList[0] = u""+eventAppExeLocal
+ eventAppDataList[0] = '' + eventAppExeLocal
- command, result, returncode = runApplication(eventAppDataList)
+ command, result, returncode = runApplication(eventAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for building model
- modelAppDataList = ['"{}"'.format(modelingAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM',
- samFILE]
-
- if (modelingAppExe.endswith('.py')):
+ modelAppDataList = [ # noqa: N806
+ f'"{modelingAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ ]
+
+ if modelingAppExe.endswith('.py'):
modelAppDataList.insert(0, 'python')
- for key in modelingAppData.keys():
- modelAppDataList.append(u'--' + key)
- modelAppDataList.append(u'' + modelingAppData.get(key))
+ for key in modelingAppData.keys(): # noqa: SIM118
+ modelAppDataList.append('--' + key) # noqa: FURB113
+ modelAppDataList.append('' + modelingAppData.get(key))
for item in modelAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
modelAppDataList.append('--getRV')
- if (modelingAppExe.endswith('.py')):
+ if modelingAppExe.endswith('.py'):
modelAppDataList[1] = modelingAppExeLocal
else:
modelAppDataList[0] = modelingAppExeLocal
- command, result, returncode = runApplication(modelAppDataList)
+ command, result, returncode = runApplication(modelAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# get RV for EDP!
- edpAppDataList = ['"{}"'.format(edpAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM', samFILE,
- '--filenameEDP', edpFILE]
-
- if (edpAppExe.endswith('.py')):
+ edpAppDataList = [ # noqa: N806
+ f'"{edpAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEDP',
+ edpFILE,
+ ]
+
+ if edpAppExe.endswith('.py'):
edpAppDataList.insert(0, 'python')
- for key in edpAppData.keys():
- edpAppDataList.append(u'--' + key)
- edpAppDataList.append(u'' + edpAppData.get(key))
+ for key in edpAppData.keys(): # noqa: SIM118
+ edpAppDataList.append('--' + key) # noqa: FURB113
+ edpAppDataList.append('' + edpAppData.get(key))
for item in edpAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
- if (edpAppExe.endswith('.py')):
+ if edpAppExe.endswith('.py'):
edpAppDataList[1] = edpAppExeLocal
else:
edpAppDataList[0] = edpAppExeLocal
edpAppDataList.append('--getRV')
- command, result, returncode = runApplication(edpAppDataList)
+ command, result, returncode = runApplication(edpAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for Simulation
- simAppDataList = ['"{}"'.format(simAppExeRemote), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE]
-
- if (simAppExe.endswith('.py')):
+ simAppDataList = [ # noqa: N806
+ f'"{simAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ ]
+
+ if simAppExe.endswith('.py'):
simAppDataList.insert(0, 'python')
- for key in simAppData.keys():
- simAppDataList.append(u'--' + key)
- simAppDataList.append(u'' + simAppData.get(key))
+ for key in simAppData.keys(): # noqa: SIM118
+ simAppDataList.append('--' + key) # noqa: FURB113
+ simAppDataList.append('' + simAppData.get(key))
for item in simAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
simAppDataList.append('--getRV')
- if (simAppExe.endswith('.py')):
+ if simAppExe.endswith('.py'):
simAppDataList[1] = simAppExeLocal
else:
simAppDataList[0] = simAppExeLocal
- command, result, returncode = runApplication(simAppDataList)
+ command, result, returncode = runApplication(simAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# perform the simulation
driverFILE.close()
- uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE, '--driverFile', driverFile]
-
- if (uqAppExe.endswith('.py')):
+ uqAppDataList = [ # noqa: N806
+ f'"{uqAppExeLocal}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ '--driverFile',
+ driverFile,
+ ]
+
+ if uqAppExe.endswith('.py'):
uqAppDataList.insert(0, 'python')
uqAppDataList[1] = uqAppExeLocal
- uqAppDataList.append('--runType')
+ uqAppDataList.append('--runType') # noqa: FURB113
uqAppDataList.append(run_type)
- for key in uqAppData.keys():
- uqAppDataList.append(u'--' + key)
- value = uqAppData.get(key)
+ for key in uqAppData.keys(): # noqa: SIM118
+ uqAppDataList.append('--' + key)
+ value = uqAppData.get(key)
if isinstance(value, string_types):
- uqAppDataList.append(u'' + value)
+ uqAppDataList.append('' + value)
else:
- uqAppDataList.append(u'' + str(value))
+ uqAppDataList.append('' + str(value))
- if run_type == 'run' or run_type == 'set_up' or run_type == 'runningRemote':
- workflow_log('Running Simulation...')
- workflow_log(' '.join(uqAppDataList))
- command, result, returncode = runApplication(uqAppDataList)
+ if run_type == 'run' or run_type == 'set_up' or run_type == 'runningRemote': # noqa: PLR1714
+ workflow_log('Running Simulation...') # noqa: F405
+ workflow_log(' '.join(uqAppDataList)) # noqa: F405
+ command, result, returncode = runApplication(uqAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- workflow_log('Simulation ended...')
+ workflow_log('Simulation ended...') # noqa: F405
else:
- workflow_log('Setup run only. No simulation performed.')
+ workflow_log('Setup run only. No simulation performed.') # noqa: F405
- except WorkFlowInputError as e:
- print('workflow error: %s' % e.value)
- workflow_log('workflow error: %s' % e.value)
- workflow_log(divider)
- exit(1)
+ except WorkFlowInputError as e: # noqa: F405
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('workflow error: %s' % e.value) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
+ exit(1) # noqa: PLR1722
# unhandled exceptions are handled here
- except Exception as e:
- print('workflow error: ',sys.exc_info()[0])
- workflow_log('unhandled exception... exiting')
+ except Exception:
+ print('workflow error: ', sys.exc_info()[0]) # noqa: T201
+ workflow_log('unhandled exception... exiting') # noqa: F405
raise
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
run_type = sys.argv[1]
- inputFile = sys.argv[2]
- applicationsRegistry = sys.argv[3]
+ inputFile = sys.argv[2] # noqa: N816
+ applicationsRegistry = sys.argv[3] # noqa: N816
main(run_type, inputFile, applicationsRegistry)
- workflow_log_file = 'workflow-log-%s.txt' % (strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime()))
- log_filehandle = open(workflow_log_file, 'w')
+ workflow_log_file = 'workflow-log-%s.txt' % ( # noqa: UP031
+ strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime())
+ )
+ log_filehandle = open(workflow_log_file, 'w') # noqa: PLW1514, PTH123, SIM115
- print(type(log_filehandle))
+ print(type(log_filehandle)) # noqa: T201
print(divider, file=log_filehandle)
print('Start of Log', file=log_filehandle)
print(divider, file=log_filehandle)
@@ -434,14 +516,13 @@ def main(run_type, inputFile, applicationsRegistry):
# nb: log_output is a global variable, defined at the top of this script.
for result in log_output:
print(divider, file=log_filehandle)
- print('command line:\n%s\n' % result[0], file=log_filehandle)
+ print('command line:\n%s\n' % result[0], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
- print('output from process:\n%s\n' % result[1], file=log_filehandle)
+ print('output from process:\n%s\n' % result[1], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
print('End of Log', file=log_filehandle)
print(divider, file=log_filehandle)
- workflow_log('Log file: %s' % workflow_log_file)
- workflow_log('End of run.')
-
+ workflow_log('Log file: %s' % workflow_log_file) # noqa: F405, UP031
+ workflow_log('End of run.') # noqa: F405
diff --git a/modules/Workflow/GMT.py b/modules/Workflow/GMT.py
index a603434a2..9faa6bada 100755
--- a/modules/Workflow/GMT.py
+++ b/modules/Workflow/GMT.py
@@ -1,102 +1,98 @@
-# written: fmk, adamzs
+# written: fmk, adamzs # noqa: CPY001, D100, EXE002, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import json
import os
-import subprocess
-from time import gmtime, strftime
import posixpath
+from time import gmtime, strftime
divider = '#' * 80
log_output = []
-from WorkflowUtils import *
+from WorkflowUtils import * # noqa: E402, F403
+
-def main(run_type, inputFile, applicationsRegistry):
+def main(run_type, inputFile, applicationsRegistry): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# the whole workflow is wrapped within a 'try' block.
# a number of exceptions (files missing, explicit application failures, etc.) are
# handled explicitly to aid the user.
# But unhandled exceptions case the workflow to stop with an error, handled in the
# exception block way at the bottom of this main() function
- try:
-
- workflow_log(divider)
- workflow_log('Start of run')
- workflow_log(divider)
- workflow_log('workflow input file: %s' % inputFile)
- workflow_log('application registry file: %s' % applicationsRegistry)
- workflow_log('runtype: %s' % run_type)
- workflow_log(divider)
-
+ try: # noqa: PLR1702
+ workflow_log(divider) # noqa: F405
+ workflow_log('Start of run') # noqa: F405
+ workflow_log(divider) # noqa: F405
+ workflow_log('workflow input file: %s' % inputFile) # noqa: F405, UP031
+ workflow_log('application registry file: %s' % applicationsRegistry) # noqa: F405, UP031
+ workflow_log('runtype: %s' % run_type) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
#
# first we parse the applications registry to load all possible applications
# - for each application type we place in a dictionary key being name, value containing path to executable
#
- with open(applicationsRegistry, 'r') as data_file:
- registryData = json.load(data_file)
+ with open(applicationsRegistry) as data_file: # noqa: PLW1514, PTH123
+ registryData = json.load(data_file) # noqa: N806
# convert all relative paths to full paths
- A = 'Applications'
- Applications = dict()
- appList = 'Event Modeling EDP Simulation UQ'.split(' ')
- appList = [a + A for a in appList]
+ A = 'Applications' # noqa: N806
+ Applications = dict() # noqa: C408, N806
+ appList = 'Event Modeling EDP Simulation UQ'.split(' ') # noqa: N806
+ appList = [a + A for a in appList] # noqa: N806
for app_type in appList:
-
if app_type in registryData:
- xApplicationData = registryData[app_type]
- applicationsData = xApplicationData['Applications']
+ xApplicationData = registryData[app_type] # noqa: N806
+ applicationsData = xApplicationData['Applications'] # noqa: N806
for app in applicationsData:
- appName = app['Name']
- appExe = app['ExecutablePath']
- if not app_type in Applications:
- Applications[app_type] = dict()
+ appName = app['Name'] # noqa: N806
+ appExe = app['ExecutablePath'] # noqa: N806
+ if app_type not in Applications:
+ Applications[app_type] = dict() # noqa: C408
Applications[app_type][appName] = appExe
#
# open input file, and parse json into data
#
- with open(inputFile, 'r') as data_file:
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
# convert all relative paths to full paths
# relative2fullpath(data)
if 'runDir' in data:
- runDIR = data['runDir']
+ runDIR = data['runDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a runDir Entry')
+ raise WorkFlowInputError('Need a runDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'remoteAppDir' in data:
- remoteAppDir = data['remoteAppDir']
+ remoteAppDir = data['remoteAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a remoteAppDir Entry')
+ raise WorkFlowInputError('Need a remoteAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'localAppDir' in data:
- localAppDir = data['localAppDir']
+ localAppDir = data['localAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a localAppDir Entry')
+ raise WorkFlowInputError('Need a localAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
#
# before running chdir to templatedir
#
- workflow_log('run Directory: %s' % runDIR)
+ workflow_log('run Directory: %s' % runDIR) # noqa: F405, UP031
os.chdir(runDIR)
os.chdir('templatedir')
-
#
# now we parse for the applications & app specific data in workflow
#
@@ -104,7 +100,7 @@ def main(run_type, inputFile, applicationsRegistry):
if 'Applications' in data:
available_apps = data['Applications']
else:
- raise WorkFlowInputError('Need an Applications Entry')
+ raise WorkFlowInputError('Need an Applications Entry') # noqa: EM101, F405, TRY003, TRY301
#
# get events, for each the application and its data .. FOR NOW 1 EVENT
@@ -114,108 +110,131 @@ def main(run_type, inputFile, applicationsRegistry):
events = available_apps['Events']
for event in events:
-
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
+ eventClassification = event['EventClassification'] # noqa: N806
if eventClassification == 'Earthquake':
if 'Application' in event:
- eventApplication = event['Application']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
-
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
- workflow_log(remoteAppDir)
- workflow_log(eventAppExe)
- eventAppExeLocal = posixpath.join(localAppDir,eventAppExe)
- eventAppExeRemote = posixpath.join(remoteAppDir,eventAppExe)
- workflow_log(eventAppExeRemote)
+ eventApplication = event['Application'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806, F841
+
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications['EventApplications'].get( # noqa: N806
+ eventApplication
+ )
+ workflow_log(remoteAppDir) # noqa: F405
+ workflow_log(eventAppExe) # noqa: F405
+ eventAppExeLocal = posixpath.join( # noqa: N806
+ localAppDir, eventAppExe
+ )
+ eventAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, eventAppExe
+ )
+ workflow_log(eventAppExeRemote) # noqa: F405
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Event classification must be Earthquake, not %s' % eventClassification)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event classification must be Earthquake, not %s' # noqa: UP031
+ % eventClassification
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
+ raise WorkFlowInputError('Need Event Classification') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an Events Entry in Applications')
-
+ raise WorkFlowInputError('Need an Events Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'EDP' in available_apps:
- edpApp = available_apps['EDP']
-
+ edpApp = available_apps['EDP'] # noqa: N806
+
if 'Application' in edpApp:
- edpApplication = edpApp['Application']
-
+ edpApplication = edpApp['Application'] # noqa: N806
+
# check modeling app in registry, if so get full executable path
- edpAppData = edpApp['ApplicationData']
- if edpApplication in Applications['EDPApplications'].keys():
- edpAppExe = Applications['EDPApplications'].get(edpApplication)
- edpAppExeLocal = posixpath.join(localAppDir,edpAppExe)
- edpAppExeRemote = posixpath.join(remoteAppDir,edpAppExe)
+ edpAppData = edpApp['ApplicationData'] # noqa: N806
+ if edpApplication in Applications['EDPApplications'].keys(): # noqa: SIM118
+ edpAppExe = Applications['EDPApplications'].get(edpApplication) # noqa: N806
+ edpAppExeLocal = posixpath.join(localAppDir, edpAppExe) # noqa: N806
+ edpAppExeRemote = posixpath.join(remoteAppDir, edpAppExe) # noqa: N806
else:
- raise WorkFlowInputError('EDP application {} not in registry'.format(edpApplication))
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'EDP application {edpApplication} not in registry' # noqa: EM102
+ )
+
else:
- raise WorkFlowInputError('Need an EDPApplication in EDP data')
-
+ raise WorkFlowInputError('Need an EDPApplication in EDP data') # noqa: EM101, F405, TRY003, TRY301
+
else:
- raise WorkFlowInputError('Need an EDP Entry in Applications')
+ raise WorkFlowInputError('Need an EDP Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'Simulation' in available_apps:
- simulationApp = available_apps['Simulation']
+ simulationApp = available_apps['Simulation'] # noqa: N806
if 'Application' in simulationApp:
- simulationApplication = simulationApp['Application']
+ simulationApplication = simulationApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- simAppData = simulationApp['ApplicationData']
- if simulationApplication in Applications['SimulationApplications'].keys():
- simAppExe = Applications['SimulationApplications'].get(simulationApplication)
- simAppExeLocal = posixpath.join(localAppDir,simAppExe)
- simAppExeRemote = posixpath.join(remoteAppDir,simAppExe)
+ simAppData = simulationApp['ApplicationData'] # noqa: N806
+ if (
+ simulationApplication # noqa: SIM118
+ in Applications['SimulationApplications'].keys()
+ ):
+ simAppExe = Applications['SimulationApplications'].get( # noqa: N806
+ simulationApplication
+ )
+ simAppExeLocal = posixpath.join(localAppDir, simAppExe) # noqa: N806
+ simAppExeRemote = posixpath.join(remoteAppDir, simAppExe) # noqa: N806
else:
- raise WorkFlowInputError('Simulation application {} not in registry'.format(simulationApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'Simulation application {simulationApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need an SimulationApplication in Simulation data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an SimulationApplication in Simulation data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
-
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'UQ' in available_apps:
- uqApp = available_apps['UQ']
+ uqApp = available_apps['UQ'] # noqa: N806
if 'Application' in uqApp:
- uqApplication = uqApp['Application']
+ uqApplication = uqApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- uqAppData = uqApp['ApplicationData']
- if uqApplication in Applications['UQApplications'].keys():
- uqAppExe = Applications['UQApplications'].get(uqApplication)
- uqAppExeLocal = posixpath.join(localAppDir,uqAppExe)
- uqAppExeRemote = posixpath.join(localAppDir,uqAppExe)
+ uqAppData = uqApp['ApplicationData'] # noqa: N806
+ if uqApplication in Applications['UQApplications'].keys(): # noqa: SIM118
+ uqAppExe = Applications['UQApplications'].get(uqApplication) # noqa: N806
+ uqAppExeLocal = posixpath.join(localAppDir, uqAppExe) # noqa: N806
+ uqAppExeRemote = posixpath.join(localAppDir, uqAppExe) # noqa: N806, F841
else:
- raise WorkFlowInputError('UQ application {} not in registry'.format(uqApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'UQ application {uqApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need a UQApplication in UQ data')
-
+ raise WorkFlowInputError('Need a UQApplication in UQ data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
-
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
- workflow_log('SUCCESS: Parsed workflow input')
- workflow_log(divider)
+ workflow_log('SUCCESS: Parsed workflow input') # noqa: F405
+ workflow_log(divider) # noqa: F405
#
# now invoke the applications
@@ -228,150 +247,195 @@ def main(run_type, inputFile, applicationsRegistry):
# - perform Simulation
# - getDL
- inputFILE = 'dakota.json'
- eventFILE = 'EVENT.json'
- edpFILE = 'EDP.json'
- driverFile = 'driver'
+ inputFILE = 'dakota.json' # noqa: N806
+ eventFILE = 'EVENT.json' # noqa: N806
+ edpFILE = 'EDP.json' # noqa: N806
+ driverFile = 'driver' # noqa: N806
# open driver file & write building app (minus the --getRV) to it
- driverFILE = open(driverFile, 'w')
+ driverFILE = open(driverFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
# get RV for event
- eventAppDataList = ['"{}"'.format(eventAppExeRemote), '--filenameBIM', inputFILE, '--filenameEVENT', eventFILE]
- if (eventAppExe.endswith('.py')):
+ eventAppDataList = [ # noqa: N806
+ f'"{eventAppExeRemote}"',
+ '--filenameBIM',
+ inputFILE,
+ '--filenameEVENT',
+ eventFILE,
+ ]
+ if eventAppExe.endswith('.py'):
eventAppDataList.insert(0, 'python')
- for key in eventAppData.keys():
- eventAppDataList.append(u"--" + key)
+ for key in eventAppData.keys(): # noqa: SIM118
+ eventAppDataList.append('--' + key)
value = eventAppData.get(key)
- eventAppDataList.append(u"" + value)
-
+ eventAppDataList.append('' + value)
+
for item in eventAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
eventAppDataList.append('--getRV')
- if (eventAppExe.endswith('.py')):
- eventAppDataList[1] = u""+eventAppExeLocal
+ if eventAppExe.endswith('.py'):
+ eventAppDataList[1] = '' + eventAppExeLocal
else:
- eventAppDataList[0] = u""+eventAppExeLocal
+ eventAppDataList[0] = '' + eventAppExeLocal
- command, result, returncode = runApplication(eventAppDataList)
+ command, result, returncode = runApplication(eventAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- edpAppDataList = ['"{}"'.format(edpAppExeRemote), '--filenameBIM', inputFILE, '--filenameEVENT', eventFILE, '--filenameSAM', 'drivel',
- '--filenameEDP', edpFILE]
-
- if (edpAppExe.endswith('.py')):
+ edpAppDataList = [ # noqa: N806
+ f'"{edpAppExeRemote}"',
+ '--filenameBIM',
+ inputFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ 'drivel',
+ '--filenameEDP',
+ edpFILE,
+ ]
+
+ if edpAppExe.endswith('.py'):
edpAppDataList.insert(0, 'python')
- for key in edpAppData.keys():
- edpAppDataList.append(u'--' + key)
- edpAppDataList.append(u'' + edpAppData.get(key))
+ for key in edpAppData.keys(): # noqa: SIM118
+ edpAppDataList.append('--' + key) # noqa: FURB113
+ edpAppDataList.append('' + edpAppData.get(key))
for item in edpAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
- if (edpAppExe.endswith('.py')):
+ if edpAppExe.endswith('.py'):
edpAppDataList[1] = edpAppExeLocal
else:
edpAppDataList[0] = edpAppExeLocal
edpAppDataList.append('--getRV')
- command, result, returncode = runApplication(edpAppDataList)
+ command, result, returncode = runApplication(edpAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for Simulation
- simAppDataList = ['"{}"'.format(simAppExeRemote), '--filenameBIM', inputFILE, '--filenameSAM', 'drivel', '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', 'drivel']
-
- if (simAppExe.endswith('.py')):
+ simAppDataList = [ # noqa: N806
+ f'"{simAppExeRemote}"',
+ '--filenameBIM',
+ inputFILE,
+ '--filenameSAM',
+ 'drivel',
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ 'drivel',
+ ]
+
+ if simAppExe.endswith('.py'):
simAppDataList.insert(0, 'python')
- for key in simAppData.keys():
- simAppDataList.append(u'--' + key)
- simAppDataList.append(u'' + simAppData.get(key))
+ for key in simAppData.keys(): # noqa: SIM118
+ simAppDataList.append('--' + key) # noqa: FURB113
+ simAppDataList.append('' + simAppData.get(key))
for item in simAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
simAppDataList.append('--getRV')
- if (simAppExe.endswith('.py')):
+ if simAppExe.endswith('.py'):
simAppDataList[1] = simAppExeLocal
else:
simAppDataList[0] = simAppExeLocal
- command, result, returncode = runApplication(simAppDataList)
+ command, result, returncode = runApplication(simAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# perform the simulation
driverFILE.close()
- print("HELLO")
-
- uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', inputFILE, '--filenameSAM', 'drivel', '--filenameEVENT', eventFILE, '--filenameEDP', edpFILE, '--filenameSIM', 'drivel', '--driverFile', driverFile]
-
-# uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', inputFILE, '--filenameEVENT', eventFILE, '--driverFile', driverFile]
-
- print(uqAppDataList)
-
- if (uqAppExe.endswith('.py')):
+ print('HELLO') # noqa: T201
+
+ uqAppDataList = [ # noqa: N806
+ f'"{uqAppExeLocal}"',
+ '--filenameBIM',
+ inputFILE,
+ '--filenameSAM',
+ 'drivel',
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ 'drivel',
+ '--driverFile',
+ driverFile,
+ ]
+
+ # uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', inputFILE, '--filenameEVENT', eventFILE, '--driverFile', driverFile]
+
+ print(uqAppDataList) # noqa: T201
+
+ if uqAppExe.endswith('.py'):
uqAppDataList.insert(0, 'python')
uqAppDataList[1] = uqAppExeLocal
- uqAppDataList.append('--runType')
+ uqAppDataList.append('--runType') # noqa: FURB113
uqAppDataList.append(run_type)
- print(uqAppDataList)
+ print(uqAppDataList) # noqa: T201
- for key in uqAppData.keys():
- uqAppDataList.append(u'--' + key)
- value = uqAppData.get(key)
+ for key in uqAppData.keys(): # noqa: SIM118
+ uqAppDataList.append('--' + key)
+ value = uqAppData.get(key)
if isinstance(value, string_types):
- uqAppDataList.append(u'' + value)
+ uqAppDataList.append('' + value)
else:
- uqAppDataList.append(u'' + str(value))
+ uqAppDataList.append('' + str(value))
- if run_type == 'run' or run_type == 'set_up':
- workflow_log('Running Simulation...')
- workflow_log(' '.join(uqAppDataList))
- command, result, returncode = runApplication(uqAppDataList)
+ if run_type == 'run' or run_type == 'set_up': # noqa: PLR1714
+ workflow_log('Running Simulation...') # noqa: F405
+ workflow_log(' '.join(uqAppDataList)) # noqa: F405
+ command, result, returncode = runApplication(uqAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- workflow_log('Simulation ended...')
+ workflow_log('Simulation ended...') # noqa: F405
else:
- workflow_log('Setup run only. No simulation performed.')
+ workflow_log('Setup run only. No simulation performed.') # noqa: F405
- except WorkFlowInputError as e:
- print('workflow error: %s' % e.value)
- workflow_log('workflow error: %s' % e.value)
- workflow_log(divider)
- exit(1)
+ except WorkFlowInputError as e: # noqa: F405
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('workflow error: %s' % e.value) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
+ exit(1) # noqa: PLR1722
# unhandled exceptions are handled here
- except Exception as e:
+ except Exception:
# print('workflow error: %s' % e.value)
- workflow_log('unhandled exception... exiting')
+ workflow_log('unhandled exception... exiting') # noqa: F405
raise
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
run_type = sys.argv[1]
- inputFile = sys.argv[2]
- applicationsRegistry = sys.argv[3]
+ inputFile = sys.argv[2] # noqa: N816
+ applicationsRegistry = sys.argv[3] # noqa: N816
main(run_type, inputFile, applicationsRegistry)
- workflow_log_file = 'workflow-log-%s.txt' % (strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime()))
- log_filehandle = open(workflow_log_file, 'w')
+ workflow_log_file = 'workflow-log-%s.txt' % ( # noqa: UP031
+ strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime())
+ )
+ log_filehandle = open(workflow_log_file, 'w') # noqa: PLW1514, PTH123, SIM115
- print(type(log_filehandle))
+ print(type(log_filehandle)) # noqa: T201
print(divider, file=log_filehandle)
print('Start of Log', file=log_filehandle)
print(divider, file=log_filehandle)
@@ -379,14 +443,13 @@ def main(run_type, inputFile, applicationsRegistry):
# nb: log_output is a global variable, defined at the top of this script.
for result in log_output:
print(divider, file=log_filehandle)
- print('command line:\n%s\n' % result[0], file=log_filehandle)
+ print('command line:\n%s\n' % result[0], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
- print('output from process:\n%s\n' % result[1], file=log_filehandle)
+ print('output from process:\n%s\n' % result[1], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
print('End of Log', file=log_filehandle)
print(divider, file=log_filehandle)
- workflow_log('Log file: %s' % workflow_log_file)
- workflow_log('End of run.')
-
+ workflow_log('Log file: %s' % workflow_log_file) # noqa: F405, UP031
+ workflow_log('End of run.') # noqa: F405
diff --git a/modules/Workflow/MultiModelApplication.py b/modules/Workflow/MultiModelApplication.py
index 970372322..8eec0c5be 100755
--- a/modules/Workflow/MultiModelApplication.py
+++ b/modules/Workflow/MultiModelApplication.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: EXE002, INP001, D100
# Copyright (c) 2019 The Regents of the University of California
#
# This file is part of the SimCenter Backend Applications.
@@ -36,155 +35,162 @@
# Contributors:
# Frank McKenna
-import sys, os, json
import argparse
-from pathlib import Path
-
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
-
-import whale.main as whale
-from whale.main import log_msg, log_div, _parse_app_registry, create_command, run_command
-
-def main(inputFile,
- appKey,
- getRV,
- samFile,
- evtFile,
- edpFile,
- simFile,
- registryFile,
- appDir) :
-
+import json
+import os
+import sys
+
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+
+from whale.main import (
+ _parse_app_registry, # noqa: PLC2701
+ create_command,
+ run_command,
+)
+
+
+def main( # noqa: C901, D103
+ inputFile, # noqa: N803
+ appKey, # noqa: N803
+ getRV, # noqa: N803
+ samFile, # noqa: ARG001, N803
+ evtFile, # noqa: ARG001, N803
+ edpFile, # noqa: ARG001, N803
+ simFile, # noqa: ARG001, N803
+ registryFile, # noqa: N803
+ appDir, # noqa: N803
+):
#
- # get some dir paths, load input file and get data for app, appKey
+ # get some dir paths, load input file and get data for app, appKey
#
- inputDir = os.path.dirname(inputFile)
- inputFileName = os.path.basename(inputFile)
- if inputDir != "":
+ inputDir = os.path.dirname(inputFile) # noqa: PTH120, N806
+ inputFileName = os.path.basename(inputFile) # noqa: PTH119, N806
+ if inputDir != '': # noqa: PLC1901
os.chdir(inputDir)
- with open(inputFileName, 'r') as f:
+ with open(inputFileName) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
- if 'referenceDir' in inputs:
+ if 'referenceDir' in inputs: # noqa: SIM401
reference_dir = inputs['referenceDir']
else:
reference_dir = inputDir
- appData={}
+ appData = {} # noqa: N806
if appKey in inputs:
- appData = inputs[appKey]
+ appData = inputs[appKey] # noqa: N806
else:
- raise KeyError(f'No data for "{appKey}" application in the input file "{inputFile}"')
-
- eventApp = False;
+ raise KeyError( # noqa: TRY003
+ f'No data for "{appKey}" application in the input file "{inputFile}"' # noqa: EM102
+ )
- if appKey == "Events":
- eventApp = True;
- appData = appData[0]
+ eventApp = False # noqa: N806
+ if appKey == 'Events':
+ eventApp = True # noqa: N806, F841
+ appData = appData[0] # noqa: N806
+ print('appKEY: ', appKey) # noqa: T201
+ print('appDATA: ', appData) # noqa: T201
+ print('HELLO ') # noqa: T201
- print('appKEY: ', appKey)
- print('appDATA: ', appData)
- print('HELLO ')
-
if 'models' not in appData:
- print('NO models in: ', appData)
- raise KeyError(f'"models" not defined in data for "{appKey}" application in the input file "{inputFile}')
-
- if len(appData['models']) < 2:
- raise RuntimeError(f"At least two models must be provided if the multimodel {appKey} application is used")
+ print('NO models in: ', appData) # noqa: T201
+ raise KeyError( # noqa: TRY003
+ f'"models" not defined in data for "{appKey}" application in the input file "{inputFile}' # noqa: EM102
+ )
+ if len(appData['models']) < 2: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ f'At least two models must be provided if the multimodel {appKey} application is used' # noqa: EM102
+ )
models = appData['models']
- modelToRun = appData['modelToRun']
+ modelToRun = appData['modelToRun'] # noqa: N806
if not getRV:
-
#
# make sure not still a string, if so try reading from params.in
- #
-
+ #
+
if isinstance(modelToRun, str):
- rvName = "MultiModel-"+appKey
+ rvName = 'MultiModel-' + appKey # noqa: N806
# if not here, try opening params.in and getting var from there
- with open("params.in", 'r') as params:
+ with open('params.in') as params: # noqa: PLW1514, PTH123
# Read the file line by line
for line in params:
values = line.strip().split()
- print(values)
+ print(values) # noqa: T201
if values[0] == rvName:
- modelToRun = values[1]
-
- modelToRun = int(float(modelToRun))
-
- appsInMultiModel=[]
- appDataInMultiModel=[]
- appRunDataInMultiModel=[]
- beliefs=[]
- sumBeliefs = 0
-
- numModels = 0
-
+ modelToRun = values[1] # noqa: N806
+
+ modelToRun = int(float(modelToRun)) # noqa: N806
+
+ appsInMultiModel = [] # noqa: N806
+ appDataInMultiModel = [] # noqa: N806
+ appRunDataInMultiModel = [] # noqa: N806
+ beliefs = []
+ sumBeliefs = 0 # noqa: N806
+
+ numModels = 0 # noqa: N806
+
for model in models:
belief = model['belief']
- appName = model['Application']
- appData = model['ApplicationData']
- appRunData = model['data']
+ appName = model['Application'] # noqa: N806
+ appData = model['ApplicationData'] # noqa: N806
+ appRunData = model['data'] # noqa: N806
beliefs.append(belief)
- sumBeliefs = sumBeliefs + belief
+ sumBeliefs = sumBeliefs + belief # noqa: N806, PLR6104
appsInMultiModel.append(appName)
appDataInMultiModel.append(appData)
appRunDataInMultiModel.append(appRunData)
- numModels = numModels + 1
+ numModels = numModels + 1 # noqa: N806, PLR6104
- for i in range(0,numModels):
- beliefs[i] = beliefs[i]/sumBeliefs
+ for i in range(numModels):
+ beliefs[i] = beliefs[i] / sumBeliefs # noqa: PLR6104
#
# parse WorkflowApplications to get possible applications
# need the 2 ifs, as appKey needs to be Events, but switch in WorkflowApplications needs to be Event!
#
-
- if appKey == "Events":
- appTypes=["Event"]
+
+ if appKey == 'Events':
+ appTypes = ['Event'] # noqa: N806
else:
- appTypes=[appKey]
+ appTypes = [appKey] # noqa: N806
+
+ parsedRegistry = _parse_app_registry(registryFile, appTypes) # noqa: N806
- parsedRegistry = (_parse_app_registry(registryFile, appTypes))
-
- if appKey == "Events":
- appsRegistry = parsedRegistry[0]["Event"]
+ if appKey == 'Events':
+ appsRegistry = parsedRegistry[0]['Event'] # noqa: N806
else:
- appsRegistry = parsedRegistry[0][appKey]
+ appsRegistry = parsedRegistry[0][appKey] # noqa: N806
#
# now we run the application
# if getRV we have to run each & collect the RVs
# if !getRV we run the single application chosen
#
-
+
if getRV:
-
- print("MultiModel - getRV")
+ print('MultiModel - getRV') # noqa: T201
#
# launch each application with getRV and add any new RandomVariable
# add randomvariable for MultiModel itself, to launch application
- # need to create temp inputfile for just that application,
+ # need to create temp inputfile for just that application,
#
- for i in range(0, numModels):
- appName = appsInMultiModel[i]
- print('appsRegistry:', appsRegistry)
+ for i in range(numModels):
+ appName = appsInMultiModel[i] # noqa: N806
+ print('appsRegistry:', appsRegistry) # noqa: T201
application = appsRegistry[appName]
- application.set_pref(appDataInMultiModel[i], reference_dir)
+ application.set_pref(appDataInMultiModel[i], reference_dir)
asset_command_list = application.get_command_list(appDir)
- asset_command_list.append(u'--getRV')
+ asset_command_list.append('--getRV')
command = create_command(asset_command_list)
- # thinking to store aplications commands in a file so don't have to repeat this!
+ # thinking to store applications commands in a file so don't have to repeat this!
#
# update input file
@@ -194,21 +200,21 @@ def main(inputFile,
# for NOW, add RV to input file
#
- randomVariables = inputs['randomVariables']
- rvName = "MultiModel-"+appKey
- rvValue="RV.MultiModel-"+appKey
+ randomVariables = inputs['randomVariables'] # noqa: N806
+ rvName = 'MultiModel-' + appKey # noqa: N806
+ rvValue = 'RV.MultiModel-' + appKey # noqa: N806
# nrv = len(randomVariables)
-
- thisRV = {
- "distribution": "Discrete",
- "inputType": "Parameters",
- "name": rvName,
- "refCount": 1,
- "value": rvValue,
- "createdRun": True,
- "variableClass": "Uncertain",
- "Weights":beliefs,
- "Values":[i+1 for i in range(0,numModels)]
+
+ thisRV = { # noqa: N806
+ 'distribution': 'Discrete',
+ 'inputType': 'Parameters',
+ 'name': rvName,
+ 'refCount': 1,
+ 'value': rvValue,
+ 'createdRun': True,
+ 'variableClass': 'Uncertain',
+ 'Weights': beliefs,
+ 'Values': [i + 1 for i in range(numModels)],
}
randomVariables.append(thisRV)
@@ -223,76 +229,74 @@ def main(inputFile,
# newCorrMat[0:nrv,0:nrv] = corrMat
# inputs['correlationMatrix'] = newCorrMat.flatten().tolist()
+ with open(inputFile, 'w') as outfile: # noqa: PLW1514, PTH123
+ json.dump(inputs, outfile)
- with open(inputFile, "w") as outfile:
- json.dump(inputs, outfile)
+ print('UPDATING INPUT FILE:', inputFile) # noqa: T201
- print('UPDATING INPUT FILE:', inputFile)
-
#
# for now just run the last model (works in sWHALE for all apps that don't create RV, i.e. events)
#
# create input file for application
-
- tmpFile = "MultiModel." + appKey + ".json"
- inputs[appKey] = appRunDataInMultiModel[numModels-1]
-
- with open(tmpFile, "w") as outfile:
+
+ tmpFile = 'MultiModel.' + appKey + '.json' # noqa: N806
+ inputs[appKey] = appRunDataInMultiModel[numModels - 1]
+
+ with open(tmpFile, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(inputs, outfile)
# run the application
asset_command_list = application.get_command_list(appDir)
- indexInputFile = asset_command_list.index('--filenameAIM') + 1
+ indexInputFile = asset_command_list.index('--filenameAIM') + 1 # noqa: N806
asset_command_list[indexInputFile] = tmpFile
- asset_command_list.append(u'--getRV')
+ asset_command_list.append('--getRV')
command = create_command(asset_command_list)
run_command(command)
- print('RUNNING --getRV:', command)
-
+ print('RUNNING --getRV:', command) # noqa: T201
+
else:
-
- print("MultiModel - run")
- modelToRun = modelToRun - 1
+ print('MultiModel - run') # noqa: T201
+ modelToRun = modelToRun - 1 # noqa: N806, PLR6104
# get app data given model
- appName = appsInMultiModel[modelToRun]
+ appName = appsInMultiModel[modelToRun] # noqa: N806
application = appsRegistry[appName]
- application.set_pref(appDataInMultiModel[modelToRun], reference_dir)
+ application.set_pref(appDataInMultiModel[modelToRun], reference_dir)
# create modified input file for app
- tmpFile = "MultiModel." + appKey + ".json"
+ tmpFile = 'MultiModel.' + appKey + '.json' # noqa: N806
- #if appKey == "Events":
+ # if appKey == "Events":
# inputs["Events"][0]=appRunDataInMultiModel[modelToRun]
- #else:
+ # else:
# inputs[appKey] = appRunDataInMultiModel[modelToRun]
- inputs[appKey] = appRunDataInMultiModel[modelToRun]
+ inputs[appKey] = appRunDataInMultiModel[modelToRun]
- print('model to run:', modelToRun)
+ print('model to run:', modelToRun) # noqa: T201
- with open(tmpFile, "w") as outfile:
+ with open(tmpFile, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(inputs, outfile)
- print("INPUTS", inputs)
-
+ print('INPUTS', inputs) # noqa: T201
+
# run application
asset_command_list = application.get_command_list(appDir)
- indexInputFile = asset_command_list.index('--filenameAIM') + 1
- asset_command_list[indexInputFile] = tmpFile
- command = create_command(asset_command_list)
+ indexInputFile = asset_command_list.index('--filenameAIM') + 1 # noqa: N806
+ asset_command_list[indexInputFile] = tmpFile
+ command = create_command(asset_command_list)
run_command(command)
- print('RUNNING:', command)
+ print('RUNNING:', command) # noqa: T201
- print("Finished MultiModelApplication")
+ print('Finished MultiModelApplication') # noqa: T201
-if __name__ == '__main__':
- #Defining the command line arguments
+if __name__ == '__main__':
+ # Defining the command line arguments
parser = argparse.ArgumentParser(
- "Run the MultiModel application.",
- allow_abbrev=False)
-
+ 'Run the MultiModel application.', allow_abbrev=False
+ )
+
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM', default='NA')
@@ -300,29 +304,40 @@ def main(inputFile,
parser.add_argument('--filenameEDP', default='NA')
parser.add_argument('--filenameSIM', default='NA')
parser.add_argument('--getRV', nargs='?', const=True, default=False)
- parser.add_argument("--appKey", default=None)
- parser.add_argument("--registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- parser.add_argument("-a", "--appDir",
- default=os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- help="Absolute path to the local application directory.")
- parser.add_argument("-l", "--logFile",
- default='log.txt',
- help="Path where the log file will be saved.")
-
- args, unknown = parser.parse_known_args()
-
-
- main(inputFile = args.filenameAIM,
- appKey = args.appKey,
- getRV = args.getRV,
- samFile = args.filenameSAM,
- evtFile = args.filenameEVENT,
- edpFile = args.filenameEDP,
- simFile = args.filenameSIM,
- registryFile = args.registry,
- appDir = args.appDir)
-
-
+ parser.add_argument('--appKey', default=None)
+ parser.add_argument(
+ '--registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ parser.add_argument(
+ '-a',
+ '--appDir',
+ default=os.path.dirname( # noqa: PTH120
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
+ ),
+ help='Absolute path to the local application directory.',
+ )
+ parser.add_argument(
+ '-l',
+ '--logFile',
+ default='log.txt',
+ help='Path where the log file will be saved.',
+ )
+
+ args, unknown = parser.parse_known_args()
+
+ main(
+ inputFile=args.filenameAIM,
+ appKey=args.appKey,
+ getRV=args.getRV,
+ samFile=args.filenameSAM,
+ evtFile=args.filenameEVENT,
+ edpFile=args.filenameEDP,
+ simFile=args.filenameSIM,
+ registryFile=args.registry,
+ appDir=args.appDir,
+ )
diff --git a/modules/Workflow/MultiModelDriver.py b/modules/Workflow/MultiModelDriver.py
index 84ea8df61..d53c39269 100644
--- a/modules/Workflow/MultiModelDriver.py
+++ b/modules/Workflow/MultiModelDriver.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
#
# This file is part of the SimCenter Backend Applications.
@@ -36,216 +35,248 @@
# Contributors:
# Frank McKenna
-import sys, os, json
import argparse
+import json
+import os
+import sys
from copy import deepcopy
-import numpy as np
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+
+from whale.main import (
+ _parse_app_registry, # noqa: PLC2701
+ create_command,
+ run_command,
+)
-from whale.main import _parse_app_registry, create_command, run_command
-def main(inputFile,
- driverFile,
- appKey,
- registryFile,
- appDir,
- runType,
- osType):
-
+def main(inputFile, driverFile, appKey, registryFile, appDir, runType, osType): # noqa: C901, N803, D103
#
- # get some dir paths, load input file and get data for app, appKey
+ # get some dir paths, load input file and get data for app, appKey
#
- inputDir = os.path.dirname(inputFile)
- inputFileName = os.path.basename(inputFile)
- if inputDir != "":
+ inputDir = os.path.dirname(inputFile) # noqa: PTH120, N806
+ inputFileName = os.path.basename(inputFile) # noqa: PTH119, N806
+ if inputDir != '': # noqa: PLC1901
os.chdir(inputDir)
- with open(inputFileName, 'r') as f:
+ with open(inputFileName) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
-
- localAppDir = inputs["localAppDir"]
- remoteAppDir = inputs["remoteAppDir"]
- appDir = localAppDir
- if runType == "runningRemote":
- appDir = remoteAppDir
+ localAppDir = inputs['localAppDir'] # noqa: N806
+ remoteAppDir = inputs['remoteAppDir'] # noqa: N806
+
+ appDir = localAppDir # noqa: N806
+ if runType == 'runningRemote':
+ appDir = remoteAppDir # noqa: N806
- if 'referenceDir' in inputs:
+ if 'referenceDir' in inputs: # noqa: SIM401
reference_dir = inputs['referenceDir']
else:
reference_dir = inputDir
- appData={}
+ appData = {} # noqa: N806
if appKey in inputs:
- appData = inputs[appKey]
+ appData = inputs[appKey] # noqa: N806
if 'models' not in appData:
- print('NO models in: ', appData)
- raise KeyError(f'"models" not defined in data for "{appKey}" application in the input file "{inputFile}')
-
- if len(appData['models']) < 2:
- raise RuntimeError(f"At least two models must be provided if the multimodel {appKey} application is used")
+ print('NO models in: ', appData) # noqa: T201
+ raise KeyError( # noqa: TRY003
+ f'"models" not defined in data for "{appKey}" application in the input file "{inputFile}' # noqa: EM102
+ )
+
+ if len(appData['models']) < 2: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ f'At least two models must be provided if the multimodel {appKey} application is used' # noqa: EM102
+ )
models = appData['models']
- modelToRun = appData['modelToRun']
-
- appsInMultiModel=[]
- appDataInMultiModel=[]
- appRunDataInMultiModel=[]
- beliefs=[]
- sumBeliefs = 0
-
- numModels = 0
-
+ modelToRun = appData['modelToRun'] # noqa: N806
+
+ appsInMultiModel = [] # noqa: N806
+ appDataInMultiModel = [] # noqa: N806
+ appRunDataInMultiModel = [] # noqa: N806
+ beliefs = []
+ sumBeliefs = 0 # noqa: N806
+
+ numModels = 0 # noqa: N806
+
for model in models:
belief = model['belief']
- appName = model['Application']
- appData = model['ApplicationData']
- appRunData = model['data']
+ appName = model['Application'] # noqa: N806
+ appData = model['ApplicationData'] # noqa: N806
+ appRunData = model['data'] # noqa: N806
beliefs.append(belief)
- sumBeliefs = sumBeliefs + belief
+ sumBeliefs = sumBeliefs + belief # noqa: N806, PLR6104
appsInMultiModel.append(appName)
appDataInMultiModel.append(appData)
appRunDataInMultiModel.append(appRunData)
- numModels = numModels + 1
+ numModels = numModels + 1 # noqa: N806, PLR6104
for i in range(numModels):
- beliefs[i] = beliefs[i]/sumBeliefs
-
- appTypes=[appKey]
-
- parsedRegistry = (_parse_app_registry(registryFile, appTypes))
- appsRegistry = parsedRegistry[0][appKey]
+ beliefs[i] = beliefs[i] / sumBeliefs # noqa: PLR6104
+
+ appTypes = [appKey] # noqa: N806
+
+ parsedRegistry = _parse_app_registry(registryFile, appTypes) # noqa: N806
+ appsRegistry = parsedRegistry[0][appKey] # noqa: N806
#
# add RV to input file
#
- randomVariables = inputs['randomVariables']
- rvName = "MultiModel-"+appKey
- rvValue="RV.MultiModel-"+appKey
-
- thisRV = {
- "distribution": "Discrete",
- "inputType": "Parameters",
- "name": rvName,
- "refCount": 1,
- "value": rvValue,
- "createdRun": True,
- "variableClass": "Uncertain",
- "Weights":beliefs,
- "Values":[i+1 for i in range(numModels)]
+ randomVariables = inputs['randomVariables'] # noqa: N806
+ rvName = 'MultiModel-' + appKey # noqa: N806
+ rvValue = 'RV.MultiModel-' + appKey # noqa: N806
+
+ thisRV = { # noqa: N806
+ 'distribution': 'Discrete',
+ 'inputType': 'Parameters',
+ 'name': rvName,
+ 'refCount': 1,
+ 'value': rvValue,
+ 'createdRun': True,
+ 'variableClass': 'Uncertain',
+ 'Weights': beliefs,
+ 'Values': [i + 1 for i in range(numModels)],
}
randomVariables.append(thisRV)
- with open(inputFile, "w") as outfile:
- json.dump(inputs, outfile)
+ with open(inputFile, 'w') as outfile: # noqa: PLW1514, PTH123
+ json.dump(inputs, outfile)
#
# create driver file that runs the right driver
#
- paramsFileName = "params.in"
- multiModelString = "MultiModel"
- exeFileName = "runMultiModelDriver"
- if osType == "Windows" and runType == "runningLocal":
- driverFileBat = driverFile + ".bat"
- exeFileName = exeFileName + ".exe"
- with open(driverFileBat, "wb") as f:
- f.write(bytes(os.path.join(appDir, "applications", "Workflow", exeFileName) + f" {paramsFileName} {driverFileBat} {multiModelString}", "UTF-8"))
- elif osType == "Windows" and runType == "runningRemote":
- with open(driverFile, "wb") as f:
- f.write(appDir+ "/applications/Workflow/"+ exeFileName + f" {paramsFileName} {driverFile} {multiModelString}", "UTF-8")
+ paramsFileName = 'params.in' # noqa: N806
+ multiModelString = 'MultiModel' # noqa: N806
+ exeFileName = 'runMultiModelDriver' # noqa: N806
+ if osType == 'Windows' and runType == 'runningLocal':
+ driverFileBat = driverFile + '.bat' # noqa: N806
+ exeFileName = exeFileName + '.exe' # noqa: N806, PLR6104
+ with open(driverFileBat, 'wb') as f: # noqa: FURB103, PTH123
+ f.write(
+ bytes(
+ os.path.join(appDir, 'applications', 'Workflow', exeFileName) # noqa: PTH118
+ + f' {paramsFileName} {driverFileBat} {multiModelString}',
+ 'UTF-8',
+ )
+ )
+ elif osType == 'Windows' and runType == 'runningRemote':
+ with open(driverFile, 'wb') as f: # noqa: PTH123
+ f.write(
+ appDir
+ + '/applications/Workflow/'
+ + exeFileName
+ + f' {paramsFileName} {driverFile} {multiModelString}',
+ 'UTF-8',
+ )
else:
- with open(driverFile, "wb") as f:
- f.write(bytes(os.path.join(appDir, "applications", "Workflow", exeFileName) + f" {paramsFileName} {driverFile} {multiModelString}", "UTF-8"))
-
- for modelToRun in range(numModels):
+ with open(driverFile, 'wb') as f: # noqa: FURB103, PTH123
+ f.write(
+ bytes(
+ os.path.join(appDir, 'applications', 'Workflow', exeFileName) # noqa: PTH118
+ + f' {paramsFileName} {driverFile} {multiModelString}',
+ 'UTF-8',
+ )
+ )
+ for modelToRun in range(numModels): # noqa: N806
#
# run the app to create the driver file for each model
#
-
- appName = appsInMultiModel[modelToRun]
+
+ appName = appsInMultiModel[modelToRun] # noqa: N806
application = appsRegistry[appName]
- application.set_pref(appDataInMultiModel[modelToRun], reference_dir)
+ application.set_pref(appDataInMultiModel[modelToRun], reference_dir)
#
# create input file for application
#
- modelInputFile = f"MultiModel_{modelToRun+1}_" + inputFile
- modelDriverFile = f"MultiModel_{modelToRun+1}_" + driverFile
-
- inputsTmp = deepcopy(inputs)
- inputsTmp[appKey] = appRunDataInMultiModel[modelToRun]
- inputsTmp['Applications'][appKey] = {
- "Application":appsInMultiModel[modelToRun],
- "ApplicationData":appDataInMultiModel[modelToRun]
+ modelInputFile = f'MultiModel_{modelToRun + 1}_' + inputFile # noqa: N806
+ modelDriverFile = f'MultiModel_{modelToRun + 1}_' + driverFile # noqa: N806
+
+ inputsTmp = deepcopy(inputs) # noqa: N806
+ inputsTmp[appKey] = appRunDataInMultiModel[modelToRun]
+ inputsTmp['Applications'][appKey] = {
+ 'Application': appsInMultiModel[modelToRun],
+ 'ApplicationData': appDataInMultiModel[modelToRun],
}
-
- with open(modelInputFile, "w") as outfile:
+
+ with open(modelInputFile, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(inputsTmp, outfile)
#
# run the application to create driver file
#
-
+
asset_command_list = application.get_command_list(localAppDir)
- indexInputFile = asset_command_list.index('--workflowInput') + 1
+ indexInputFile = asset_command_list.index('--workflowInput') + 1 # noqa: N806
asset_command_list[indexInputFile] = modelInputFile
- indexInputFile = asset_command_list.index('--driverFile') + 1
- asset_command_list[indexInputFile] = modelDriverFile
- asset_command_list.append(u'--osType')
+ indexInputFile = asset_command_list.index('--driverFile') + 1 # noqa: N806
+ asset_command_list[indexInputFile] = modelDriverFile
+ asset_command_list.append('--osType')
asset_command_list.append(osType)
- asset_command_list.append(u'--runType')
- asset_command_list.append(runType)
- asset_command_list.append(u'--modelIndex')
- asset_command_list.append(modelToRun+1)
+ asset_command_list.append('--runType')
+ asset_command_list.append(runType)
+ asset_command_list.append('--modelIndex')
+ asset_command_list.append(modelToRun + 1)
command = create_command(asset_command_list)
run_command(command)
if __name__ == '__main__':
-
#
# Defining the command line arguments
#
-
+
parser = argparse.ArgumentParser(
- "Run the MultiModel application.",
- allow_abbrev=False)
-
+ 'Run the MultiModel application.', allow_abbrev=False
+ )
+
parser = argparse.ArgumentParser()
parser.add_argument('--driverFile', default=None)
parser.add_argument('--workflowInput', default=None)
- parser.add_argument("--appKey", default=None)
+ parser.add_argument('--appKey', default=None)
parser.add_argument('--runType', default=None)
- parser.add_argument('--osType', default=None)
- parser.add_argument("--registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- # parser.add_argument('--runDriver', default="False")
- parser.add_argument("-a", "--appDir",
- default=os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- help="Absolute path to the local application directory.")
- parser.add_argument("-l", "--logFile",
- default='log.txt',
- help="Path where the log file will be saved.")
-
- args = parser.parse_args()
+ parser.add_argument('--osType', default=None)
+ parser.add_argument(
+ '--registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ # parser.add_argument('--runDriver', default="False")
+ parser.add_argument(
+ '-a',
+ '--appDir',
+ default=os.path.dirname( # noqa: PTH120
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
+ ),
+ help='Absolute path to the local application directory.',
+ )
+ parser.add_argument(
+ '-l',
+ '--logFile',
+ default='log.txt',
+ help='Path where the log file will be saved.',
+ )
+
+ args = parser.parse_args()
#
# run the app
#
-
- main(inputFile = args.workflowInput,
- driverFile = args.driverFile,
- appKey = args.appKey,
- registryFile = args.registry,
- appDir = args.appDir,
- runType = args.runType,
- osType = args.osType)
-
+
+ main(
+ inputFile=args.workflowInput,
+ driverFile=args.driverFile,
+ appKey=args.appKey,
+ registryFile=args.registry,
+ appDir=args.appDir,
+ runType=args.runType,
+ osType=args.osType,
+ )
diff --git a/modules/Workflow/PBE.py b/modules/Workflow/PBE.py
index eb8665492..6552a576c 100644
--- a/modules/Workflow/PBE.py
+++ b/modules/Workflow/PBE.py
@@ -1,102 +1,98 @@
-# written: fmk, adamzs
+# written: fmk, adamzs # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import json
import os
-import subprocess
-from time import gmtime, strftime
import posixpath
+from time import gmtime, strftime
divider = '#' * 80
log_output = []
-from WorkflowUtils import *
+from WorkflowUtils import * # noqa: E402, F403
+
-def main(run_type, inputFile, applicationsRegistry):
+def main(run_type, inputFile, applicationsRegistry): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# the whole workflow is wrapped within a 'try' block.
# a number of exceptions (files missing, explicit application failures, etc.) are
# handled explicitly to aid the user.
# But unhandled exceptions case the workflow to stop with an error, handled in the
# exception block way at the bottom of this main() function
- try:
-
- workflow_log(divider)
- workflow_log('Start of run')
- workflow_log(divider)
- workflow_log('workflow input file: %s' % inputFile)
- workflow_log('application registry file: %s' % applicationsRegistry)
- workflow_log('runtype: %s' % run_type)
- workflow_log(divider)
-
+ try: # noqa: PLR1702
+ workflow_log(divider) # noqa: F405
+ workflow_log('Start of run') # noqa: F405
+ workflow_log(divider) # noqa: F405
+ workflow_log('workflow input file: %s' % inputFile) # noqa: F405, UP031
+ workflow_log('application registry file: %s' % applicationsRegistry) # noqa: F405, UP031
+ workflow_log('runtype: %s' % run_type) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
#
# first we parse the applications registry to load all possible applications
# - for each application type we place in a dictionary key being name, value containing path to executable
#
- with open(applicationsRegistry, 'r') as data_file:
- registryData = json.load(data_file)
- # convert all relative paths to full paths
+ with open(applicationsRegistry) as data_file: # noqa: PLW1514, PTH123
+ registryData = json.load(data_file) # noqa: N806
+ # convert all relative paths to full paths
- A = 'Applications'
- Applications = dict()
- appList = 'Event Modeling EDP Simulation UQ'.split(' ')
- appList = [a + A for a in appList]
+ A = 'Applications' # noqa: N806
+ Applications = dict() # noqa: C408, N806
+ appList = 'Event Modeling EDP Simulation UQ'.split(' ') # noqa: N806
+ appList = [a + A for a in appList] # noqa: N806
for app_type in appList:
-
if app_type in registryData:
- xApplicationData = registryData[app_type]
- applicationsData = xApplicationData['Applications']
+ xApplicationData = registryData[app_type] # noqa: N806
+ applicationsData = xApplicationData['Applications'] # noqa: N806
for app in applicationsData:
- appName = app['Name']
- appExe = app['ExecutablePath']
- if not app_type in Applications:
- Applications[app_type] = dict()
+ appName = app['Name'] # noqa: N806
+ appExe = app['ExecutablePath'] # noqa: N806
+ if app_type not in Applications:
+ Applications[app_type] = dict() # noqa: C408
Applications[app_type][appName] = appExe
#
# open input file, and parse json into data
#
- with open(inputFile, 'r') as data_file:
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
# convert all relative paths to full paths
# relative2fullpath(data)
if 'runDir' in data:
- runDIR = data['runDir']
+ runDIR = data['runDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a runDir Entry')
+ raise WorkFlowInputError('Need a runDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'remoteAppDir' in data:
- remoteAppDir = data['remoteAppDir']
+ remoteAppDir = data['remoteAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a remoteAppDir Entry')
+ raise WorkFlowInputError('Need a remoteAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'localAppDir' in data:
- localAppDir = data['localAppDir']
+ localAppDir = data['localAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a localAppDir Entry')
+ raise WorkFlowInputError('Need a localAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
#
# before running chdir to templatedir
#
- workflow_log('run Directory: %s' % runDIR)
+ workflow_log('run Directory: %s' % runDIR) # noqa: F405, UP031
os.chdir(runDIR)
os.chdir('templatedir')
-
#
# now we parse for the applications & app specific data in workflow
#
@@ -104,7 +100,7 @@ def main(run_type, inputFile, applicationsRegistry):
if 'Applications' in data:
available_apps = data['Applications']
else:
- raise WorkFlowInputError('Need an Applications Entry')
+ raise WorkFlowInputError('Need an Applications Entry') # noqa: EM101, F405, TRY003, TRY301
#
# get events, for each the application and its data .. FOR NOW 1 EVENT
@@ -114,161 +110,221 @@ def main(run_type, inputFile, applicationsRegistry):
events = available_apps['Events']
for event in events:
-
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
+ eventClassification = event['EventClassification'] # noqa: N806
if eventClassification == 'Earthquake':
if 'Application' in event:
- eventApplication = event['Application']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
-
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
- workflow_log(remoteAppDir)
- workflow_log(eventAppExe)
- eventAppExeLocal = posixpath.join(localAppDir,eventAppExe)
- eventAppExeRemote = posixpath.join(remoteAppDir,eventAppExe)
- workflow_log(eventAppExeRemote)
+ eventApplication = event['Application'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806
+
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications['EventApplications'].get( # noqa: N806
+ eventApplication
+ )
+ workflow_log(remoteAppDir) # noqa: F405
+ workflow_log(eventAppExe) # noqa: F405
+ eventAppExeLocal = posixpath.join( # noqa: N806
+ localAppDir, eventAppExe
+ )
+ eventAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, eventAppExe
+ )
+ workflow_log(eventAppExeRemote) # noqa: F405
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
- # TODO: Fully implement HydroUQ's waterborne events into PBE
- elif eventClassification == 'Tsunami' or eventClassification == 'Surge' or eventClassification == 'StormSurge' or eventClassification == 'Hydro':
- is_hydrouq_implemented = False # To be set to True when HydroUQ is in PBE
- if (is_hydrouq_implemented):
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
+ # TODO: Fully implement HydroUQ's waterborne events into PBE # noqa: TD002
+ elif (
+ eventClassification == 'Tsunami' # noqa: PLR1714
+ or eventClassification == 'Surge'
+ or eventClassification == 'StormSurge'
+ or eventClassification == 'Hydro'
+ ):
+ is_hydrouq_implemented = (
+ False # To be set to True when HydroUQ is in PBE
+ )
+ if is_hydrouq_implemented:
if 'Application' in event:
- eventApplication = event['Application']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
-
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
- workflow_log(remoteAppDir)
- workflow_log(eventAppExe)
- eventAppExeLocal = posixpath.join(localAppDir,eventAppExe)
- eventAppExeRemote = posixpath.join(remoteAppDir,eventAppExe)
- workflow_log(eventAppExeRemote)
+ eventApplication = event['Application'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806, F841
+
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications[ # noqa: N806
+ 'EventApplications'
+ ].get(eventApplication)
+ workflow_log(remoteAppDir) # noqa: F405
+ workflow_log(eventAppExe) # noqa: F405
+ eventAppExeLocal = posixpath.join( # noqa: N806
+ localAppDir, eventAppExe
+ )
+ eventAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, eventAppExe
+ )
+ workflow_log(eventAppExeRemote) # noqa: F405
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('HydroUQ waterborne events are not implemented in PBE yet. Please use different workflow for now...')
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'HydroUQ waterborne events are not implemented in PBE yet. Please use different workflow for now...' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Event classification must be Earthquake, not %s' % eventClassification)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event classification must be Earthquake, not %s' # noqa: UP031
+ % eventClassification
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
+ raise WorkFlowInputError('Need Event Classification') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an Events Entry in Applications')
+ raise WorkFlowInputError('Need an Events Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get modeling application and its data
#
if 'Modeling' in available_apps:
- modelingApp = available_apps['Modeling']
+ modelingApp = available_apps['Modeling'] # noqa: N806
if 'Application' in modelingApp:
- modelingApplication = modelingApp['Application']
+ modelingApplication = modelingApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- modelingAppData = modelingApp['ApplicationData']
- if modelingApplication in Applications['ModelingApplications'].keys():
- modelingAppExe = Applications['ModelingApplications'].get(modelingApplication)
- modelingAppExeLocal = posixpath.join(localAppDir,modelingAppExe)
- modelingAppExeRemote = posixpath.join(remoteAppDir,modelingAppExe)
+ modelingAppData = modelingApp['ApplicationData'] # noqa: N806
+ if (
+ modelingApplication # noqa: SIM118
+ in Applications['ModelingApplications'].keys()
+ ):
+ modelingAppExe = Applications['ModelingApplications'].get( # noqa: N806
+ modelingApplication
+ )
+ modelingAppExeLocal = posixpath.join(localAppDir, modelingAppExe) # noqa: N806
+ modelingAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, modelingAppExe
+ )
else:
- raise WorkFlowInputError('Modeling application %s not in registry' % modelingApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Modeling application %s not in registry' # noqa: UP031
+ % modelingApplication
+ )
else:
- raise WorkFlowInputError('Need a ModelingApplication in Modeling data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a ModelingApplication in Modeling data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Modeling Entry in Applications')
+ raise WorkFlowInputError('Need a Modeling Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get edp application and its data .. CURRENTLY MODELING APP MUST CREATE EDP
#
if 'EDP' in available_apps:
- edpApp = available_apps['EDP']
-
+ edpApp = available_apps['EDP'] # noqa: N806
+
if 'Application' in edpApp:
- edpApplication = edpApp['Application']
-
+ edpApplication = edpApp['Application'] # noqa: N806
+
# check modeling app in registry, if so get full executable path
- edpAppData = edpApp['ApplicationData']
- if edpApplication in Applications['EDPApplications'].keys():
- edpAppExe = Applications['EDPApplications'].get(edpApplication)
- edpAppExeLocal = posixpath.join(localAppDir,edpAppExe)
- edpAppExeRemote = posixpath.join(remoteAppDir,edpAppExe)
+ edpAppData = edpApp['ApplicationData'] # noqa: N806
+ if edpApplication in Applications['EDPApplications'].keys(): # noqa: SIM118
+ edpAppExe = Applications['EDPApplications'].get(edpApplication) # noqa: N806
+ edpAppExeLocal = posixpath.join(localAppDir, edpAppExe) # noqa: N806
+ edpAppExeRemote = posixpath.join(remoteAppDir, edpAppExe) # noqa: N806
else:
- raise WorkFlowInputError('EDP application {} not in registry'.format(edpApplication))
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'EDP application {edpApplication} not in registry' # noqa: EM102
+ )
+
else:
- raise WorkFlowInputError('Need an EDPApplication in EDP data')
-
+ raise WorkFlowInputError('Need an EDPApplication in EDP data') # noqa: EM101, F405, TRY003, TRY301
+
else:
- raise WorkFlowInputError('Need an EDP Entry in Applications')
+ raise WorkFlowInputError('Need an EDP Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
- # get simulation application and its data
+ # get simulation application and its data
#
if 'Simulation' in available_apps:
- simulationApp = available_apps['Simulation']
+ simulationApp = available_apps['Simulation'] # noqa: N806
if 'Application' in simulationApp:
- simulationApplication = simulationApp['Application']
+ simulationApplication = simulationApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- simAppData = simulationApp['ApplicationData']
- if simulationApplication in Applications['SimulationApplications'].keys():
- simAppExe = Applications['SimulationApplications'].get(simulationApplication)
- simAppExeLocal = posixpath.join(localAppDir,simAppExe)
- simAppExeRemote = posixpath.join(remoteAppDir,simAppExe)
+ simAppData = simulationApp['ApplicationData'] # noqa: N806
+ if (
+ simulationApplication # noqa: SIM118
+ in Applications['SimulationApplications'].keys()
+ ):
+ simAppExe = Applications['SimulationApplications'].get( # noqa: N806
+ simulationApplication
+ )
+ simAppExeLocal = posixpath.join(localAppDir, simAppExe) # noqa: N806
+ simAppExeRemote = posixpath.join(remoteAppDir, simAppExe) # noqa: N806
else:
- raise WorkFlowInputError('Simulation application {} not in registry'.format(simulationApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'Simulation application {simulationApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need an SimulationApplication in Simulation data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an SimulationApplication in Simulation data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'UQ' in available_apps:
- uqApp = available_apps['UQ']
+ uqApp = available_apps['UQ'] # noqa: N806
if 'Application' in uqApp:
- uqApplication = uqApp['Application']
+ uqApplication = uqApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- uqAppData = uqApp['ApplicationData']
- if uqApplication in Applications['UQApplications'].keys():
- uqAppExe = Applications['UQApplications'].get(uqApplication)
- uqAppExeLocal = posixpath.join(localAppDir,uqAppExe)
- uqAppExeRemote = posixpath.join(localAppDir,uqAppExe)
+ uqAppData = uqApp['ApplicationData'] # noqa: N806
+ if uqApplication in Applications['UQApplications'].keys(): # noqa: SIM118
+ uqAppExe = Applications['UQApplications'].get(uqApplication) # noqa: N806
+ uqAppExeLocal = posixpath.join(localAppDir, uqAppExe) # noqa: N806
+ uqAppExeRemote = posixpath.join(localAppDir, uqAppExe) # noqa: N806, F841
else:
- raise WorkFlowInputError('UQ application {} not in registry'.format(uqApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'UQ application {uqApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need a UQApplication in UQ data')
-
+ raise WorkFlowInputError('Need a UQApplication in UQ data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
-
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
- workflow_log('SUCCESS: Parsed workflow input')
- workflow_log(divider)
+ workflow_log('SUCCESS: Parsed workflow input') # noqa: F405
+ workflow_log(divider) # noqa: F405
#
# now invoke the applications
@@ -281,174 +337,223 @@ def main(run_type, inputFile, applicationsRegistry):
# - perform Simulation
# - getDL
- bimFILE = 'dakota.json'
- eventFILE = 'EVENT.json'
- samFILE = 'SAM.json'
- edpFILE = 'EDP.json'
- simFILE = 'SIM.json'
- driverFile = 'driver'
+ bimFILE = 'dakota.json' # noqa: N806
+ eventFILE = 'EVENT.json' # noqa: N806
+ samFILE = 'SAM.json' # noqa: N806
+ edpFILE = 'EDP.json' # noqa: N806
+ simFILE = 'SIM.json' # noqa: N806
+ driverFile = 'driver' # noqa: N806
# open driver file & write building app (minus the --getRV) to it
- driverFILE = open(driverFile, 'w')
+ driverFILE = open(driverFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
# get RV for event
- eventAppDataList = ['"{}"'.format(eventAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE]
- if (eventAppExe.endswith('.py')):
+ eventAppDataList = [ # noqa: N806
+ f'"{eventAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ ]
+ if eventAppExe.endswith('.py'):
eventAppDataList.insert(0, 'python')
- for key in eventAppData.keys():
- eventAppDataList.append(u"--" + key)
+ for key in eventAppData.keys(): # noqa: SIM118
+ eventAppDataList.append('--' + key)
value = eventAppData.get(key)
- eventAppDataList.append(u"" + value)
-
+ eventAppDataList.append('' + value)
+
for item in eventAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
eventAppDataList.append('--getRV')
- if (eventAppExe.endswith('.py')):
- eventAppDataList[1] = u""+eventAppExeLocal
+ if eventAppExe.endswith('.py'):
+ eventAppDataList[1] = '' + eventAppExeLocal
else:
- eventAppDataList[0] = u""+eventAppExeLocal
+ eventAppDataList[0] = '' + eventAppExeLocal
- command, result, returncode = runApplication(eventAppDataList)
+ command, result, returncode = runApplication(eventAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for building model
- modelAppDataList = ['"{}"'.format(modelingAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM',
- samFILE]
-
- if (modelingAppExe.endswith('.py')):
+ modelAppDataList = [ # noqa: N806
+ f'"{modelingAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ ]
+
+ if modelingAppExe.endswith('.py'):
modelAppDataList.insert(0, 'python')
- for key in modelingAppData.keys():
- modelAppDataList.append(u'--' + key)
- modelAppDataList.append(u'' + modelingAppData.get(key))
+ for key in modelingAppData.keys(): # noqa: SIM118
+ modelAppDataList.append('--' + key) # noqa: FURB113
+ modelAppDataList.append('' + modelingAppData.get(key))
for item in modelAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
modelAppDataList.append('--getRV')
- if (modelingAppExe.endswith('.py')):
+ if modelingAppExe.endswith('.py'):
modelAppDataList[1] = modelingAppExeLocal
else:
modelAppDataList[0] = modelingAppExeLocal
- command, result, returncode = runApplication(modelAppDataList)
+ command, result, returncode = runApplication(modelAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# get RV for EDP!
- edpAppDataList = ['"{}"'.format(edpAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM', samFILE,
- '--filenameEDP', edpFILE]
-
- if (edpAppExe.endswith('.py')):
+ edpAppDataList = [ # noqa: N806
+ f'"{edpAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEDP',
+ edpFILE,
+ ]
+
+ if edpAppExe.endswith('.py'):
edpAppDataList.insert(0, 'python')
- for key in edpAppData.keys():
- edpAppDataList.append(u'--' + key)
- edpAppDataList.append(u'' + edpAppData.get(key))
+ for key in edpAppData.keys(): # noqa: SIM118
+ edpAppDataList.append('--' + key) # noqa: FURB113
+ edpAppDataList.append('' + edpAppData.get(key))
for item in edpAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
- if (edpAppExe.endswith('.py')):
+ if edpAppExe.endswith('.py'):
edpAppDataList[1] = edpAppExeLocal
else:
edpAppDataList[0] = edpAppExeLocal
edpAppDataList.append('--getRV')
- command, result, returncode = runApplication(edpAppDataList)
+ command, result, returncode = runApplication(edpAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for Simulation
- simAppDataList = ['"{}"'.format(simAppExeRemote), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE]
-
- if (simAppExe.endswith('.py')):
+ simAppDataList = [ # noqa: N806
+ f'"{simAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ ]
+
+ if simAppExe.endswith('.py'):
simAppDataList.insert(0, 'python')
- for key in simAppData.keys():
- simAppDataList.append(u'--' + key)
- simAppDataList.append(u'' + simAppData.get(key))
+ for key in simAppData.keys(): # noqa: SIM118
+ simAppDataList.append('--' + key) # noqa: FURB113
+ simAppDataList.append('' + simAppData.get(key))
for item in simAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
simAppDataList.append('--getRV')
- if (simAppExe.endswith('.py')):
+ if simAppExe.endswith('.py'):
simAppDataList[1] = simAppExeLocal
else:
simAppDataList[0] = simAppExeLocal
- command, result, returncode = runApplication(simAppDataList)
+ command, result, returncode = runApplication(simAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# perform the simulation
driverFILE.close()
- uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE, '--driverFile', driverFile]
-
- if (uqAppExe.endswith('.py')):
+ uqAppDataList = [ # noqa: N806
+ f'"{uqAppExeLocal}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ '--driverFile',
+ driverFile,
+ ]
+
+ if uqAppExe.endswith('.py'):
uqAppDataList.insert(0, 'python')
uqAppDataList[1] = uqAppExeLocal
- uqAppDataList.append('--runType')
+ uqAppDataList.append('--runType') # noqa: FURB113
uqAppDataList.append(run_type)
- for key in uqAppData.keys():
- uqAppDataList.append(u'--' + key)
+ for key in uqAppData.keys(): # noqa: SIM118
+ uqAppDataList.append('--' + key)
value = uqAppData.get(key)
if isinstance(value, string_types):
- uqAppDataList.append(u'' + value)
+ uqAppDataList.append('' + value)
else:
- uqAppDataList.append(u'' + str(value))
+ uqAppDataList.append('' + str(value))
- if run_type == 'run' or run_type == 'set_up':
- workflow_log('Running Simulation...')
- workflow_log(' '.join(uqAppDataList))
- command, result, returncode = runApplication(uqAppDataList)
+ if run_type == 'run' or run_type == 'set_up': # noqa: PLR1714
+ workflow_log('Running Simulation...') # noqa: F405
+ workflow_log(' '.join(uqAppDataList)) # noqa: F405
+ command, result, returncode = runApplication(uqAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- workflow_log('Simulation ended...')
+ workflow_log('Simulation ended...') # noqa: F405
else:
- workflow_log('Setup run only. No simulation performed.')
+ workflow_log('Setup run only. No simulation performed.') # noqa: F405
- except WorkFlowInputError as e:
- print('workflow error: %s' % e.value)
- workflow_log('workflow error: %s' % e.value)
- workflow_log(divider)
- exit(1)
+ except WorkFlowInputError as e: # noqa: F405
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('workflow error: %s' % e.value) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
+ exit(1) # noqa: PLR1722
# unhandled exceptions are handled here
except Exception as e:
- print('workflow error: %s' % e.value)
- workflow_log('unhandled exception... exiting')
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('unhandled exception... exiting') # noqa: F405
raise
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
run_type = sys.argv[1]
- inputFile = sys.argv[2]
- applicationsRegistry = sys.argv[3]
+ inputFile = sys.argv[2] # noqa: N816
+ applicationsRegistry = sys.argv[3] # noqa: N816
main(run_type, inputFile, applicationsRegistry)
- workflow_log_file = 'workflow-log-%s.txt' % (strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime()))
- log_filehandle = open(workflow_log_file, 'w')
+ workflow_log_file = 'workflow-log-%s.txt' % ( # noqa: UP031
+ strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime())
+ )
+ log_filehandle = open(workflow_log_file, 'w') # noqa: PLW1514, PTH123, SIM115
- print(type(log_filehandle))
+ print(type(log_filehandle)) # noqa: T201
print(divider, file=log_filehandle)
print('Start of Log', file=log_filehandle)
print(divider, file=log_filehandle)
@@ -456,14 +561,13 @@ def main(run_type, inputFile, applicationsRegistry):
# nb: log_output is a global variable, defined at the top of this script.
for result in log_output:
print(divider, file=log_filehandle)
- print('command line:\n%s\n' % result[0], file=log_filehandle)
+ print('command line:\n%s\n' % result[0], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
- print('output from process:\n%s\n' % result[1], file=log_filehandle)
+ print('output from process:\n%s\n' % result[1], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
print('End of Log', file=log_filehandle)
print(divider, file=log_filehandle)
- workflow_log('Log file: %s' % workflow_log_file)
- workflow_log('End of run.')
-
+ workflow_log('Log file: %s' % workflow_log_file) # noqa: F405, UP031
+ workflow_log('End of run.') # noqa: F405
diff --git a/modules/Workflow/PBE_workflow.py b/modules/Workflow/PBE_workflow.py
index 9794395eb..90551865d 100644
--- a/modules/Workflow/PBE_workflow.py
+++ b/modules/Workflow/PBE_workflow.py
@@ -1,36 +1,35 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
# This file is part of the PBE Application.
-#
-# Redistribution and use in source and binary forms, with or without
+#
+# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
-# 1. Redistributions of source code must retain the above copyright notice,
+# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
-# 2. Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
-# 3. Neither the name of the copyright holder nor the names of its contributors
-# may be used to endorse or promote products derived from this software without
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
-#
+#
# You should have received a copy of the BSD 3-Clause License along with the
# PBE Application. If not, see .
#
@@ -42,71 +41,84 @@
# Chaofeng Wang
# import functions for Python 2.X support
-from __future__ import division, print_function
-import sys, os, json
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+import json
+import os
+import sys
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
+from whale.main import log_div, log_msg
-def main(run_type, input_file, app_registry):
+def main(run_type, input_file, app_registry): # noqa: D103
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
whale.log_file = runDir + '/log.txt'
- with open(whale.log_file, 'w') as f:
- f.write('PBE workflow\n')
+ with open(whale.log_file, 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('PBE workflow\n')
# echo the inputs
log_msg(log_div)
log_msg('Started running the workflow script')
- log_msg(log_div)
+ log_msg(log_div)
# If there is an external EDP file provided, change the run_type to loss_only
try:
- if inputs['DamageAndLoss']['ResponseModel']['ResponseDescription']['EDPDataFile'] is not None:
+ if (
+ inputs['DamageAndLoss']['ResponseModel']['ResponseDescription'][
+ 'EDPDataFile'
+ ]
+ is not None
+ ):
run_type = 'loss_only'
- except:
+ except: # noqa: S110, E722
pass
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Event', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL'])
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=['Event', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL'],
+ )
if WF.run_type != 'loss_only':
-
# initialize the working directory
WF.init_simdir()
# prepare the input files for the simulation
- WF.create_RV_files(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF.create_RV_files(app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'])
# create the workflow driver file
WF.create_driver_file(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ app_sequence=['Event', 'Modeling', 'EDP', 'Simulation']
+ )
# run uq engine to simulate response
WF.simulate_response()
if WF.run_type != 'set_up':
# run dl engine to estimate losses
- WF.estimate_losses(input_file = input_file)
-
-if __name__ == '__main__':
+ WF.estimate_losses(input_file=input_file)
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
- main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
\ No newline at end of file
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
+
+ main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
diff --git a/modules/Workflow/R2DTool_workflow.py b/modules/Workflow/R2DTool_workflow.py
index c0a3c3909..793e3ac52 100644
--- a/modules/Workflow/R2DTool_workflow.py
+++ b/modules/Workflow/R2DTool_workflow.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,35 +40,44 @@
# Michael Gardner
# Chaofeng Wang
-import sys, os, json
import argparse
import json
+import os
+import sys
from pathlib import Path
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
-
-def main(run_type, input_file, app_registry,
- force_cleanup, bldg_id_filter, reference_dir,
- working_dir, app_dir, log_file):
-
+from whale.main import log_div, log_msg
+
+
+def main( # noqa: D103
+ run_type,
+ input_file,
+ app_registry,
+ force_cleanup,
+ bldg_id_filter,
+ reference_dir,
+ working_dir,
+ app_dir,
+ log_file,
+):
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
if working_dir is not None:
- runDir = working_dir
+ runDir = working_dir # noqa: N806
else:
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
- if not os.path.exists(runDir):
- os.mkdir(runDir)
+ if not os.path.exists(runDir): # noqa: PTH110
+ os.mkdir(runDir) # noqa: PTH102
if log_file == 'log.txt':
whale.log_file = runDir + '/log.txt'
else:
whale.log_file = log_file
- with open(whale.log_file, 'w') as f:
+ with open(whale.log_file, 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write('RDT workflow\n')
whale.print_system_info()
@@ -81,23 +89,35 @@ def main(run_type, input_file, app_registry,
if force_cleanup:
log_msg('Forced cleanup turned on.')
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Building', 'RegionalEvent', 'RegionalMapping',
- 'Event', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL'],
- reference_dir = reference_dir,
- working_dir = working_dir,
- app_dir = app_dir,
- units = inputs.get('units', None),
- outputs=inputs.get('outputs', None))
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=[
+ 'Building',
+ 'RegionalEvent',
+ 'RegionalMapping',
+ 'Event',
+ 'Modeling',
+ 'EDP',
+ 'Simulation',
+ 'UQ',
+ 'DL',
+ ],
+ reference_dir=reference_dir,
+ working_dir=working_dir,
+ app_dir=app_dir,
+ units=inputs.get('units', None),
+ outputs=inputs.get('outputs', None),
+ )
if bldg_id_filter is not None:
- print(bldg_id_filter)
- log_msg(
- f'Overriding simulation scope; running buildings {bldg_id_filter}')
+ print(bldg_id_filter) # noqa: T201
+ log_msg(f'Overriding simulation scope; running buildings {bldg_id_filter}')
# If a Min or Max attribute is used when calling the script, we need to
# update the min and max values in the input file.
- WF.workflow_apps['Building'].pref["filter"] = bldg_id_filter
+ WF.workflow_apps['Building'].pref['filter'] = bldg_id_filter
# initialize the working directory
WF.init_workdir()
@@ -106,11 +126,11 @@ def main(run_type, input_file, app_registry,
building_file = WF.create_building_files()
WF.perform_regional_mapping(building_file)
- # TODO: not elegant code, fix later
- with open(WF.building_file_path, 'r') as f:
+ # TODO: not elegant code, fix later # noqa: TD002
+ with open(WF.building_file_path) as f: # noqa: PLW1514, PTH123
bldg_data = json.load(f)
- for bldg in bldg_data: #[:1]:
+ for bldg in bldg_data: # [:1]:
log_msg(bldg)
# initialize the simulation directory
@@ -118,73 +138,101 @@ def main(run_type, input_file, app_registry,
# prepare the input files for the simulation
WF.create_RV_files(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'],
- BIM_file = bldg['file'], bldg_id=bldg['id'])
+ app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'],
+ BIM_file=bldg['file'],
+ bldg_id=bldg['id'],
+ )
# create the workflow driver file
WF.create_driver_file(
- app_sequence = ['Building', 'Event', 'Modeling', 'EDP', 'Simulation'],
- bldg_id=bldg['id'])
+ app_sequence=['Building', 'Event', 'Modeling', 'EDP', 'Simulation'],
+ bldg_id=bldg['id'],
+ )
# run uq engine to simulate response
- WF.simulate_response(BIM_file = bldg['file'], bldg_id=bldg['id'])
+ WF.simulate_response(BIM_file=bldg['file'], bldg_id=bldg['id'])
# run dl engine to estimate losses
- WF.estimate_losses(BIM_file = bldg['file'], bldg_id = bldg['id'])
+ WF.estimate_losses(BIM_file=bldg['file'], bldg_id=bldg['id'])
if force_cleanup:
- #clean up intermediate files from the simulation
+ # clean up intermediate files from the simulation
WF.cleanup_simdir(bldg['id'])
# aggregate results
- WF.aggregate_results(bldg_data = bldg_data)
+ WF.aggregate_results(bldg_data=bldg_data)
if force_cleanup:
# clean up intermediate files from the working directory
WF.cleanup_workdir()
-if __name__ == '__main__':
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter workflow for a set of assets.",
- allow_abbrev=False)
-
- workflowArgParser.add_argument("configuration",
- help="Configuration file specifying the applications and data to be "
- "used")
- workflowArgParser.add_argument("-F", "--filter",
+if __name__ == '__main__':
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter workflow for a set of assets.', allow_abbrev=False
+ )
+
+ workflowArgParser.add_argument(
+ 'configuration',
+ help='Configuration file specifying the applications and data to be ' 'used',
+ )
+ workflowArgParser.add_argument(
+ '-F',
+ '--filter',
default=None,
- help="Provide a subset of building ids to run")
- workflowArgParser.add_argument("-c", "--check",
- help="Check the configuration file")
- workflowArgParser.add_argument("-r", "--registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-f", "--forceCleanup",
- action="store_true",
- help="Remove working directories after the simulation is completed.")
- workflowArgParser.add_argument("-d", "--referenceDir",
- default=os.path.join(os.getcwd(), 'input_data'),
- help="Relative paths in the config file are referenced to this directory.")
- workflowArgParser.add_argument("-w", "--workDir",
- default=os.path.join(os.getcwd(), 'results'),
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Provide a subset of building ids to run',
+ )
+ workflowArgParser.add_argument(
+ '-c', '--check', help='Check the configuration file'
+ )
+ workflowArgParser.add_argument(
+ '-r',
+ '--registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-f',
+ '--forceCleanup',
+ action='store_true',
+ help='Remove working directories after the simulation is completed.',
+ )
+ workflowArgParser.add_argument(
+ '-d',
+ '--referenceDir',
+ default=os.path.join(os.getcwd(), 'input_data'), # noqa: PTH109, PTH118
+ help='Relative paths in the config file are referenced to this directory.',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
+ default=os.path.join(os.getcwd(), 'results'), # noqa: PTH109, PTH118
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
+ help='Path where the log file will be saved.',
+ )
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
if wfArgs.check:
@@ -192,13 +240,15 @@ def main(run_type, input_file, app_registry,
else:
run_type = 'run'
- #Calling the main workflow method and passing the parsed arguments
- main(run_type = run_type,
- input_file = wfArgs.configuration,
- app_registry = wfArgs.registry,
- force_cleanup = wfArgs.forceCleanup,
- bldg_id_filter = wfArgs.filter,
- reference_dir = wfArgs.referenceDir,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile)
\ No newline at end of file
+ # Calling the main workflow method and passing the parsed arguments
+ main(
+ run_type=run_type,
+ input_file=wfArgs.configuration,
+ app_registry=wfArgs.registry,
+ force_cleanup=wfArgs.forceCleanup,
+ bldg_id_filter=wfArgs.filter,
+ reference_dir=wfArgs.referenceDir,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ )
diff --git a/modules/Workflow/RegionalEarthquakeSimulation.py b/modules/Workflow/RegionalEarthquakeSimulation.py
index 67203cf03..269cbafca 100755
--- a/modules/Workflow/RegionalEarthquakeSimulation.py
+++ b/modules/Workflow/RegionalEarthquakeSimulation.py
@@ -1,77 +1,76 @@
-# written: fmk
+# written: fmk # noqa: CPY001, D100, EXE002, INP001
import json
import os
import sys
-import subprocess
from time import gmtime, strftime
divider = '#' * 80
log_output = []
-from WorkflowUtils import *
+from WorkflowUtils import * # noqa: E402, F403
-def main(run_type, inputFile, applicationsRegistry):
+
+def main(run_type, inputFile, applicationsRegistry): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# the whole workflow is wrapped within a 'try' block.
# a number of exceptions (files missing, explicit application failures, etc.) are
# handled explicitly to aid the user.
# But unhandled exceptions case the workflow to stop with an error, handled in the
# exception block way at the bottom of this main() function
- try:
-
- workflow_log(divider)
- workflow_log('Start of run')
- workflow_log(divider)
- workflow_log('workflow input file: %s' % inputFile)
- workflow_log('application registry file: %s' % applicationsRegistry)
- workflow_log('runtype: %s' % run_type)
- workflow_log(divider)
-
+ try: # noqa: PLR1702
+ workflow_log(divider) # noqa: F405
+ workflow_log('Start of run') # noqa: F405
+ workflow_log(divider) # noqa: F405
+ workflow_log('workflow input file: %s' % inputFile) # noqa: F405, UP031
+ workflow_log('application registry file: %s' % applicationsRegistry) # noqa: F405, UP031
+ workflow_log('runtype: %s' % run_type) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
#
# first we parse the applications registry to load all possible applications
# - for each application type we place in a dictionary key being name, value containing path to executable
#
- with open(applicationsRegistry, 'r') as data_file:
- registryData = json.load(data_file)
+ with open(applicationsRegistry) as data_file: # noqa: PLW1514, PTH123
+ registryData = json.load(data_file) # noqa: N806
# convert all relative paths to full paths
- relative2fullpath(registryData)
+ relative2fullpath(registryData) # noqa: F405
- A = 'Applications'
- Applications = dict()
- appList = 'Building Event Modeling EDP Simulation UQ DamageAndLoss'.split(' ')
- appList = [a + A for a in appList]
+ A = 'Applications' # noqa: N806
+ Applications = dict() # noqa: C408, N806
+ appList = 'Building Event Modeling EDP Simulation UQ DamageAndLoss'.split( # noqa: N806
+ ' '
+ )
+ appList = [a + A for a in appList] # noqa: N806
for app_type in appList:
-
if app_type in registryData:
- xApplicationData = registryData[app_type]
- applicationsData = xApplicationData['Applications']
+ xApplicationData = registryData[app_type] # noqa: N806
+ applicationsData = xApplicationData['Applications'] # noqa: N806
for app in applicationsData:
- appName = app['Name']
- appExe = app['ExecutablePath']
- if not app_type in Applications:
- Applications[app_type] = dict()
+ appName = app['Name'] # noqa: N806
+ appExe = app['ExecutablePath'] # noqa: N806
+ if app_type not in Applications:
+ Applications[app_type] = dict() # noqa: C408
Applications[app_type][appName] = appExe
#
# open input file, and parse json into data
#
- with open(inputFile, 'r') as data_file:
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
# convert all relative paths to full paths
- relative2fullpath(data)
+ relative2fullpath(data) # noqa: F405
#
# get all application data, quit if error
#
if 'WorkflowType' in data:
- typeWorkflow = data['WorkflowType']
+ typeWorkflow = data['WorkflowType'] # noqa: N806, F841
else:
- raise WorkFlowInputError('Need a Workflow Type')
+ raise WorkFlowInputError('Need a Workflow Type') # noqa: EM101, F405, TRY003, TRY301
# check correct workflow type
@@ -82,30 +81,40 @@ def main(run_type, inputFile, applicationsRegistry):
if 'Applications' in data:
available_apps = data['Applications']
else:
- raise WorkFlowInputError('Need an Applications Entry')
+ raise WorkFlowInputError('Need an Applications Entry') # noqa: EM101, F405, TRY003, TRY301
#
# get building application and its data
#
if 'Buildings' in available_apps:
- buildingApp = available_apps['Buildings']
+ buildingApp = available_apps['Buildings'] # noqa: N806
if 'BuildingApplication' in buildingApp:
- buildingApplication = buildingApp['BuildingApplication']
+ buildingApplication = buildingApp['BuildingApplication'] # noqa: N806
# check building app in registry, if so get full executable path
- buildingAppData = buildingApp['ApplicationData']
- if buildingApplication in Applications['BuildingApplications'].keys():
- buildingAppExe = Applications['BuildingApplications'].get(buildingApplication)
+ buildingAppData = buildingApp['ApplicationData'] # noqa: N806
+ if (
+ buildingApplication # noqa: SIM118
+ in Applications['BuildingApplications'].keys()
+ ):
+ buildingAppExe = Applications['BuildingApplications'].get( # noqa: N806
+ buildingApplication
+ )
else:
- raise WorkFlowInputError('Building application %s not in registry' % buildingApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Building application %s not in registry' # noqa: UP031
+ % buildingApplication
+ )
else:
- raise WorkFlowInputError('Need a Building Generator Application in Buildings')
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a Building Generator Application in Buildings' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Buildings Entry in Applications')
+ raise WorkFlowInputError('Need a Buildings Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get events, for each the application and its data .. FOR NOW 1 EVENT
@@ -116,139 +125,179 @@ def main(run_type, inputFile, applicationsRegistry):
for event in events:
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
+ eventClassification = event['EventClassification'] # noqa: N806
if eventClassification == 'Earthquake':
if 'EventApplication' in event:
- eventApplication = event['EventApplication']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
+ eventApplication = event['EventApplication'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806, F841
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications['EventApplications'].get( # noqa: N806
+ eventApplication
+ )
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Event classification must be Earthquake, not %s' % eventClassification)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event classification must be Earthquake, not %s' # noqa: UP031
+ % eventClassification
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
+ raise WorkFlowInputError('Need Event Classification') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an Events Entry in Applications')
+ raise WorkFlowInputError('Need an Events Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get modeling application and its data
#
if 'Modeling' in available_apps:
- modelingApp = available_apps['Modeling']
+ modelingApp = available_apps['Modeling'] # noqa: N806
if 'ModelingApplication' in modelingApp:
- modelingApplication = modelingApp['ModelingApplication']
+ modelingApplication = modelingApp['ModelingApplication'] # noqa: N806
# check modeling app in registry, if so get full executable path
- modelingAppData = modelingApp['ApplicationData']
- if modelingApplication in Applications['ModelingApplications'].keys():
- modelingAppExe = Applications['ModelingApplications'].get(modelingApplication)
+ modelingAppData = modelingApp['ApplicationData'] # noqa: N806
+ if (
+ modelingApplication # noqa: SIM118
+ in Applications['ModelingApplications'].keys()
+ ):
+ modelingAppExe = Applications['ModelingApplications'].get( # noqa: N806
+ modelingApplication
+ )
else:
- raise WorkFlowInputError('Modeling application %s not in registry' % modelingApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Modeling application %s not in registry' # noqa: UP031
+ % modelingApplication
+ )
else:
- raise WorkFlowInputError('Need a ModelingApplication in Modeling data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a ModelingApplication in Modeling data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Modeling Entry in Applications')
+ raise WorkFlowInputError('Need a Modeling Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get edp application and its data
#
if 'EDP' in available_apps:
- edpApp = available_apps['EDP']
+ edpApp = available_apps['EDP'] # noqa: N806
if 'EDPApplication' in edpApp:
- edpApplication = edpApp['EDPApplication']
+ edpApplication = edpApp['EDPApplication'] # noqa: N806
# check modeling app in registry, if so get full executable path
- edpAppData = edpApp['ApplicationData']
- if edpApplication in Applications['EDPApplications'].keys():
- edpAppExe = Applications['EDPApplications'].get(edpApplication)
+ edpAppData = edpApp['ApplicationData'] # noqa: N806
+ if edpApplication in Applications['EDPApplications'].keys(): # noqa: SIM118
+ edpAppExe = Applications['EDPApplications'].get(edpApplication) # noqa: N806
else:
- raise WorkFlowInputError('EDP application %s not in registry', edpApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'EDP application %s not in registry', # noqa: EM101
+ edpApplication,
+ )
else:
- raise WorkFlowInputError('Need an EDPApplication in EDP data')
-
+ raise WorkFlowInputError('Need an EDPApplication in EDP data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an EDP Entry in Applications')
+ raise WorkFlowInputError('Need an EDP Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'Simulation' in available_apps:
- simulationApp = available_apps['Simulation']
+ simulationApp = available_apps['Simulation'] # noqa: N806
if 'SimulationApplication' in simulationApp:
- simulationApplication = simulationApp['SimulationApplication']
+ simulationApplication = simulationApp['SimulationApplication'] # noqa: N806
# check modeling app in registry, if so get full executable path
- simAppData = simulationApp['ApplicationData']
- if simulationApplication in Applications['SimulationApplications'].keys():
- simAppExe = Applications['SimulationApplications'].get(simulationApplication)
+ simAppData = simulationApp['ApplicationData'] # noqa: N806
+ if (
+ simulationApplication # noqa: SIM118
+ in Applications['SimulationApplications'].keys()
+ ):
+ simAppExe = Applications['SimulationApplications'].get( # noqa: N806
+ simulationApplication
+ )
else:
- raise WorkFlowInputError('Simulation application %s not in registry', simulationApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Simulation application %s not in registry', # noqa: EM101
+ simulationApplication,
+ )
else:
- raise WorkFlowInputError('Need an SimulationApplication in Simulation data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an SimulationApplication in Simulation data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'UQ-Simulation' in available_apps:
- uqApp = available_apps['UQ-Simulation']
+ uqApp = available_apps['UQ-Simulation'] # noqa: N806
if 'UQApplication' in uqApp:
- uqApplication = uqApp['UQApplication']
+ uqApplication = uqApp['UQApplication'] # noqa: N806
# check modeling app in registry, if so get full executable path
- uqAppData = uqApp['ApplicationData']
- if uqApplication in Applications['UQApplications'].keys():
- uqAppExe = Applications['UQApplications'].get(uqApplication)
+ uqAppData = uqApp['ApplicationData'] # noqa: N806
+ if uqApplication in Applications['UQApplications'].keys(): # noqa: SIM118
+ uqAppExe = Applications['UQApplications'].get(uqApplication) # noqa: N806
else:
- raise WorkFlowInputError('UQ application %s not in registry', uqApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'UQ application %s not in registry', # noqa: EM101
+ uqApplication,
+ )
else:
- raise WorkFlowInputError('Need a UQApplication in UQ data')
-
+ raise WorkFlowInputError('Need a UQApplication in UQ data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'Damage&Loss' in available_apps:
- DLApp = available_apps['Damage&Loss']
+ DLApp = available_apps['Damage&Loss'] # noqa: N806
if 'Damage&LossApplication' in DLApp:
- dlApplication = DLApp['Damage&LossApplication']
+ dlApplication = DLApp['Damage&LossApplication'] # noqa: N806
# check modeling app in registry, if so get full executable path
- dlAppData = DLApp['ApplicationData']
- if dlApplication in Applications['DamageAndLossApplications'].keys():
- dlAppExe = Applications['DamageAndLossApplications'].get(dlApplication)
+ dlAppData = DLApp['ApplicationData'] # noqa: N806
+ if dlApplication in Applications['DamageAndLossApplications'].keys(): # noqa: SIM118
+ dlAppExe = Applications['DamageAndLossApplications'].get( # noqa: N806
+ dlApplication
+ )
else:
- raise WorkFlowInputError('Dmage & Loss application %s not in registry' % dlApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Dmage & Loss application %s not in registry' % dlApplication # noqa: UP031
+ )
else:
- raise WorkFlowInputError('Need a Damage&LossApplicationApplication in Damage & Loss data')
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a Damage&LossApplicationApplication in Damage & Loss data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
- workflow_log('SUCCESS: Parsed workflow input')
- workflow_log(divider)
+ workflow_log('SUCCESS: Parsed workflow input') # noqa: F405
+ workflow_log(divider) # noqa: F405
#
# now invoke the applications
@@ -258,15 +307,17 @@ def main(run_type, inputFile, applicationsRegistry):
# put building generator application data into list and exe
#
- buildingsFile = 'buildings.json'
- buildingAppDataList = [buildingAppExe, buildingsFile]
+ buildingsFile = 'buildings.json' # noqa: N806
+ buildingAppDataList = [buildingAppExe, buildingsFile] # noqa: N806
- for key in buildingAppData.keys():
- buildingAppDataList.append('-' + key.encode('ascii', 'ignore'))
- buildingAppDataList.append(buildingAppData.get(key).encode('ascii', 'ignore'))
+ for key in buildingAppData.keys(): # noqa: SIM118
+ buildingAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
+ buildingAppDataList.append(
+ buildingAppData.get(key).encode('ascii', 'ignore')
+ )
buildingAppDataList.append('--getRV')
- command, result, returncode = runApplication(buildingAppDataList)
+ command, result, returncode = runApplication(buildingAppDataList) # noqa: F405
log_output.append([command, result, returncode])
del buildingAppDataList[-1]
@@ -280,168 +331,227 @@ def main(run_type, inputFile, applicationsRegistry):
# - perform Simulation
# - getDL
- with open(buildingsFile, 'r') as data_file:
+ with open(buildingsFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
for building in data:
- id = building['id']
- bimFILE = building['file']
- eventFILE = id + '-EVENT.json'
- samFILE = id + '-SAM.json'
- edpFILE = id + '-EDP.json'
- dlFILE = id + '-DL.json'
- simFILE = id + '-SIM.json'
- driverFile = id + '-driver'
+ id = building['id'] # noqa: A001
+ bimFILE = building['file'] # noqa: N806
+ eventFILE = id + '-EVENT.json' # noqa: N806
+ samFILE = id + '-SAM.json' # noqa: N806
+ edpFILE = id + '-EDP.json' # noqa: N806
+ dlFILE = id + '-DL.json' # noqa: N806
+ simFILE = id + '-SIM.json' # noqa: N806
+ driverFile = id + '-driver' # noqa: N806
# open driver file & write building app (minus the --getRV) to it
- driverFILE = open(driverFile, 'w')
+ driverFILE = open(driverFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
for item in buildingAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
# get RV for event
- eventAppDataList = [eventAppExe, '--filenameAIM', bimFILE, '--filenameEVENT', eventFILE]
- if (eventAppExe.endswith('.py')):
+ eventAppDataList = [ # noqa: N806
+ eventAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ ]
+ if eventAppExe.endswith('.py'):
eventAppDataList.insert(0, 'python')
- for key in eventAppData.keys():
+ for key in eventAppData.keys(): # noqa: SIM118
eventAppDataList.append('-' + key.encode('ascii', 'ignore'))
value = eventAppData.get(key)
- if (os.path.exists(value) and not os.path.isabs(value)):
- value = os.path.abspath(value)
+ if os.path.exists(value) and not os.path.isabs(value): # noqa: PTH110, PTH117
+ value = os.path.abspath(value) # noqa: PTH100
eventAppDataList.append(value.encode('ascii', 'ignore'))
for item in eventAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
eventAppDataList.append('--getRV')
- command, result, returncode = runApplication(eventAppDataList)
+ command, result, returncode = runApplication(eventAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for building model
- modelAppDataList = [modelingAppExe, '--filenameAIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM',
- samFILE]
-
- for key in modelingAppData.keys():
- modelAppDataList.append('-' + key.encode('ascii', 'ignore'))
- modelAppDataList.append(modelingAppData.get(key).encode('ascii', 'ignore'))
+ modelAppDataList = [ # noqa: N806
+ modelingAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ ]
+
+ for key in modelingAppData.keys(): # noqa: SIM118
+ modelAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
+ modelAppDataList.append(
+ modelingAppData.get(key).encode('ascii', 'ignore')
+ )
for item in modelAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
modelAppDataList.append('--getRV')
- command, result, returncode = runApplication(modelAppDataList)
+ command, result, returncode = runApplication(modelAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# get RV for EDP!
- edpAppDataList = [edpAppExe, '--filenameAIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM', samFILE,
- '--filenameEDP', edpFILE]
-
- for key in edpAppData.keys():
- edpAppDataList.append('-' + key.encode('ascii', 'ignore'))
+ edpAppDataList = [ # noqa: N806
+ edpAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEDP',
+ edpFILE,
+ ]
+
+ for key in edpAppData.keys(): # noqa: SIM118
+ edpAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
edpAppDataList.append(edpAppData.get(key).encode('ascii', 'ignore'))
for item in edpAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
edpAppDataList.append('--getRV')
- command, result, returncode = runApplication(edpAppDataList)
+ command, result, returncode = runApplication(edpAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for Simulation
- simAppDataList = [simAppExe, '--filenameAIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE]
-
- for key in simAppData.keys():
- simAppDataList.append('-' + key.encode('ascii', 'ignore'))
+ simAppDataList = [ # noqa: N806
+ simAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ ]
+
+ for key in simAppData.keys(): # noqa: SIM118
+ simAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
simAppDataList.append(simAppData.get(key).encode('ascii', 'ignore'))
for item in simAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
simAppDataList.append('--getRV')
- command, result, returncode = runApplication(simAppDataList)
+ command, result, returncode = runApplication(simAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# Adding CreateLoss to Dakota Driver
- dlAppDataList = [dlAppExe, '--filenameAIM', bimFILE, '--filenameEDP', edpFILE, '--filenameLOSS', dlFILE]
-
- for key in dlAppData.keys():
- dlAppDataList.append('-' + key.encode('ascii', 'ignore'))
+ dlAppDataList = [ # noqa: N806
+ dlAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameLOSS',
+ dlFILE,
+ ]
+
+ for key in dlAppData.keys(): # noqa: SIM118
+ dlAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
dlAppDataList.append(dlAppData.get(key).encode('ascii', 'ignore'))
for item in dlAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
# perform the simulation
driverFILE.close()
- uqAppDataList = [uqAppExe, '--filenameAIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameLOSS', dlFILE, '--filenameSIM', simFILE, 'driverFile',
- driverFile]
-
- for key in uqAppData.keys():
- uqAppDataList.append('-' + key.encode('ascii', 'ignore'))
+ uqAppDataList = [ # noqa: N806
+ uqAppExe,
+ '--filenameAIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameLOSS',
+ dlFILE,
+ '--filenameSIM',
+ simFILE,
+ 'driverFile',
+ driverFile,
+ ]
+
+ for key in uqAppData.keys(): # noqa: SIM118
+ uqAppDataList.append('-' + key.encode('ascii', 'ignore')) # noqa: FURB113
uqAppDataList.append(simAppData.get(key).encode('ascii', 'ignore'))
if run_type == 'run':
- workflow_log('Running Simulation...')
- workflow_log(' '.join(uqAppDataList))
- command, result, returncode = runApplication(uqAppDataList)
+ workflow_log('Running Simulation...') # noqa: F405
+ workflow_log(' '.join(uqAppDataList)) # noqa: F405
+ command, result, returncode = runApplication(uqAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- workflow_log('Simulation ended...')
+ workflow_log('Simulation ended...') # noqa: F405
else:
- workflow_log('Check run only. No simulation performed.')
+ workflow_log('Check run only. No simulation performed.') # noqa: F405
- except WorkFlowInputError as e:
- workflow_log('workflow error: %s' % e.value)
- workflow_log(divider)
- exit(1)
+ except WorkFlowInputError as e: # noqa: F405
+ workflow_log('workflow error: %s' % e.value) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
+ exit(1) # noqa: PLR1722
# unhandled exceptions are handled here
except:
raise
- workflow_log('unhandled exception... exiting')
- exit(1)
+ workflow_log('unhandled exception... exiting') # noqa: F405
+ exit(1) # noqa: PLR1722
if __name__ == '__main__':
-
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
run_type = sys.argv[1]
- inputFile = sys.argv[2]
- applicationsRegistry = sys.argv[3]
+ inputFile = sys.argv[2] # noqa: N816
+ applicationsRegistry = sys.argv[3] # noqa: N816
main(run_type, inputFile, applicationsRegistry)
- workflow_log_file = 'workflow-log-%s.txt' % (strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime()))
- log_filehandle = open(workflow_log_file, 'wb')
+ workflow_log_file = 'workflow-log-%s.txt' % ( # noqa: UP031
+ strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime())
+ )
+ log_filehandle = open(workflow_log_file, 'wb') # noqa: SIM115, PTH123
- print >>log_filehandle, divider
- print >>log_filehandle, 'Start of Log'
- print >>log_filehandle, divider
- print >>log_filehandle, workflow_log_file
+ print >> log_filehandle, divider # noqa: F633
+ print >> log_filehandle, 'Start of Log' # noqa: F633
+ print >> log_filehandle, divider # noqa: F633
+ print >> log_filehandle, workflow_log_file # noqa: F633
# nb: log_output is a global variable, defined at the top of this script.
for result in log_output:
- print >>log_filehandle, divider
- print >>log_filehandle, 'command line:\n%s\n' % result[0]
- print >>log_filehandle, divider
- print >>log_filehandle, 'output from process:\n%s\n' % result[1]
-
- print >>log_filehandle, divider
- print >>log_filehandle, 'End of Log'
- print >>log_filehandle, divider
+ print >> log_filehandle, divider # noqa: F633
+ print >> log_filehandle, 'command line:\n%s\n' % result[0] # noqa: F633, UP031
+ print >> log_filehandle, divider # noqa: F633
+ print >> log_filehandle, 'output from process:\n%s\n' % result[1] # noqa: F633, UP031
- workflow_log('Log file: %s' % workflow_log_file)
- workflow_log('End of run.')
+ print >> log_filehandle, divider # noqa: F633
+ print >> log_filehandle, 'End of Log' # noqa: F633
+ print >> log_filehandle, divider # noqa: F633
+ workflow_log('Log file: %s' % workflow_log_file) # noqa: F405, UP031
+ workflow_log('End of run.') # noqa: F405
diff --git a/modules/Workflow/SiteResponse_workflow.py b/modules/Workflow/SiteResponse_workflow.py
index 1eec743cc..7ee4b1c4e 100644
--- a/modules/Workflow/SiteResponse_workflow.py
+++ b/modules/Workflow/SiteResponse_workflow.py
@@ -1,37 +1,44 @@
-# Site response workflow
+# Site response workflow # noqa: CPY001, D100, INP001
-import sys, os, json
import argparse
import json
-from pathlib import Path
-from glob import glob
-import shutil
import os
+import sys
+from glob import glob
+from pathlib import Path
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
-
-def main(run_type, input_file, app_registry,
- force_cleanup, bldg_id_filter, reference_dir,
- working_dir, app_dir, log_file):
-
+from whale.main import log_div, log_msg
+
+
+def main( # noqa: D103
+ run_type,
+ input_file,
+ app_registry,
+ force_cleanup,
+ bldg_id_filter,
+ reference_dir,
+ working_dir,
+ app_dir,
+ log_file,
+):
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
if working_dir is not None:
- runDir = working_dir
+ runDir = working_dir # noqa: N806
else:
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
- if not os.path.exists(runDir):
- os.mkdir(runDir)
+ if not os.path.exists(runDir): # noqa: PTH110
+ os.mkdir(runDir) # noqa: PTH102
if log_file == 'log.txt':
whale.log_file = runDir + '/log.txt'
else:
whale.log_file = log_file
- with open(whale.log_file, 'w') as f:
+ with open(whale.log_file, 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write('RDT workflow\n')
whale.print_system_info()
@@ -43,23 +50,35 @@ def main(run_type, input_file, app_registry,
if force_cleanup:
log_msg('Forced cleanup turned on.')
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Building', 'RegionalEvent', 'RegionalMapping',
- 'Event', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL'],
- reference_dir = reference_dir,
- working_dir = working_dir,
- app_dir = app_dir,
- units = inputs.get('units', None),
- outputs=inputs.get('outputs', None))
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=[
+ 'Building',
+ 'RegionalEvent',
+ 'RegionalMapping',
+ 'Event',
+ 'Modeling',
+ 'EDP',
+ 'Simulation',
+ 'UQ',
+ 'DL',
+ ],
+ reference_dir=reference_dir,
+ working_dir=working_dir,
+ app_dir=app_dir,
+ units=inputs.get('units', None),
+ outputs=inputs.get('outputs', None),
+ )
if bldg_id_filter is not None:
- print(bldg_id_filter)
- log_msg(
- f'Overriding simulation scope; running buildings {bldg_id_filter}')
+ print(bldg_id_filter) # noqa: T201
+ log_msg(f'Overriding simulation scope; running buildings {bldg_id_filter}')
# If a Min or Max attribute is used when calling the script, we need to
# update the min and max values in the input file.
- WF.workflow_apps['Building'].pref["filter"] = bldg_id_filter
+ WF.workflow_apps['Building'].pref['filter'] = bldg_id_filter
# initialize the working directory
WF.init_workdir()
@@ -68,11 +87,11 @@ def main(run_type, input_file, app_registry,
building_file = WF.create_building_files()
WF.perform_regional_mapping(building_file)
- # TODO: not elegant code, fix later
- with open(WF.building_file_path, 'r') as f:
+ # TODO: not elegant code, fix later # noqa: TD002
+ with open(WF.building_file_path) as f: # noqa: PLW1514, PTH123
bldg_data = json.load(f)
- for bldg in bldg_data: #[:1]:
+ for bldg in bldg_data: # [:1]:
log_msg(bldg)
# initialize the simulation directory
@@ -80,120 +99,155 @@ def main(run_type, input_file, app_registry,
# prepare the input files for the simulation
WF.create_RV_files(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'],
- BIM_file = bldg['file'], bldg_id=bldg['id'])
+ app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'],
+ BIM_file=bldg['file'],
+ bldg_id=bldg['id'],
+ )
# create the workflow driver file
WF.create_driver_file(
- app_sequence = ['Building', 'Event', 'Modeling', 'EDP', 'Simulation'],
- bldg_id=bldg['id'])
+ app_sequence=['Building', 'Event', 'Modeling', 'EDP', 'Simulation'],
+ bldg_id=bldg['id'],
+ )
# run uq engine to simulate response
- WF.simulate_response(BIM_file = bldg['file'], bldg_id=bldg['id'])
+ WF.simulate_response(BIM_file=bldg['file'], bldg_id=bldg['id'])
# run dl engine to estimate losses
- #WF.estimate_losses(BIM_file = bldg['file'], bldg_id = bldg['id'])
+ # WF.estimate_losses(BIM_file = bldg['file'], bldg_id = bldg['id'])
if force_cleanup:
- #clean up intermediate files from the simulation
+ # clean up intermediate files from the simulation
WF.cleanup_simdir(bldg['id'])
# aggregate results
- #WF.aggregate_results(bldg_data = bldg_data)
+ # WF.aggregate_results(bldg_data = bldg_data)
if force_cleanup:
# clean up intermediate files from the working directory
WF.cleanup_workdir()
- surfaceMoDir = collect_surface_motion(WF.run_dir,bldg_data)
+ surfaceMoDir = collect_surface_motion(WF.run_dir, bldg_data) # noqa: N806, F841
-def collect_surface_motion(runDir, bldg_data, surfaceMoDir=''):
- if surfaceMoDir == '': surfaceMoDir = f"{runDir}/surface_motions/"
+def collect_surface_motion(runDir, bldg_data, surfaceMoDir=''): # noqa: N803, D103
+ if surfaceMoDir == '': # noqa: PLC1901
+ surfaceMoDir = f'{runDir}/surface_motions/' # noqa: N806
-
- for bldg in bldg_data: #[:1]:
+ for bldg in bldg_data: # [:1]:
log_msg(bldg)
- bldg_id = bldg['id']
+ bldg_id = bldg['id']
if bldg_id is not None:
+ mPaths = glob(f'{runDir}/{bldg_id}/workdir.*/EVENT.json') # noqa: PTH207, N806
- mPaths = glob(f"{runDir}/{bldg_id}/workdir.*/EVENT.json")
-
+ surfMoTmpDir = f'{surfaceMoDir}/{bldg_id}/' # noqa: N806
- surfMoTmpDir = f"{surfaceMoDir}/{bldg_id}/"
-
- if not os.path.exists(surfMoTmpDir): os.makedirs(surfMoTmpDir)
+ if not os.path.exists(surfMoTmpDir): # noqa: PTH110
+ os.makedirs(surfMoTmpDir) # noqa: PTH103
for p in mPaths:
- simID = p.split('/')[-2].split('.')[-1]
- #shutil.copyfile(p, f"{surfMoTmpDir}/EVENT-{simID}.json")
- newEVENT = {}
+ simID = p.split('/')[-2].split('.')[-1] # noqa: N806, F841
+ # shutil.copyfile(p, f"{surfMoTmpDir}/EVENT-{simID}.json")
+ newEVENT = {} # noqa: N806
# load the event file
- with open(p, 'r') as f:
- EVENT_in_All = json.load(f)
-
- newEVENT['name'] = EVENT_in_All['Events'][0]['event_id'].replace('x','-')
+ with open(p) as f: # noqa: PLW1514, PTH123
+ EVENT_in_All = json.load(f) # noqa: N806
+
+ newEVENT['name'] = EVENT_in_All['Events'][0]['event_id'].replace(
+ 'x', '-'
+ )
newEVENT['location'] = EVENT_in_All['Events'][0]['location']
newEVENT['dT'] = EVENT_in_All['Events'][0]['dT']
-
- newEVENT['data_x'] = EVENT_in_All['Events'][0]['timeSeries'][0]['data']
+
+ newEVENT['data_x'] = EVENT_in_All['Events'][0]['timeSeries'][0][
+ 'data'
+ ]
newEVENT['PGA_x'] = max(newEVENT['data_x'])
- if len(EVENT_in_All['Events'][0]['timeSeries'])>0: # two-way shaking
- newEVENT['data_y'] = EVENT_in_All['Events'][0]['timeSeries'][1]['data']
+ if (
+ len(EVENT_in_All['Events'][0]['timeSeries']) > 0
+ ): # two-way shaking
+ newEVENT['data_y'] = EVENT_in_All['Events'][0]['timeSeries'][
+ 1
+ ]['data']
newEVENT['PGA_y'] = max(newEVENT['data_y'])
-
- with open(f"{surfMoTmpDir}/EVENT-{newEVENT['name']}.json", "w") as outfile:
- json.dump(newEVENT, outfile)
+ with open( # noqa: PLW1514, PTH123
+ f"{surfMoTmpDir}/EVENT-{newEVENT['name']}.json", 'w'
+ ) as outfile:
+ json.dump(newEVENT, outfile)
return surfaceMoDir
if __name__ == '__main__':
-
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter workflow for a set of assets.",
- allow_abbrev=False)
-
- workflowArgParser.add_argument("configuration",
- help="Configuration file specifying the applications and data to be "
- "used")
- workflowArgParser.add_argument("-F", "--filter",
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter workflow for a set of assets.', allow_abbrev=False
+ )
+
+ workflowArgParser.add_argument(
+ 'configuration',
+ help='Configuration file specifying the applications and data to be ' 'used',
+ )
+ workflowArgParser.add_argument(
+ '-F',
+ '--filter',
default=None,
- help="Provide a subset of building ids to run")
- workflowArgParser.add_argument("-c", "--check",
- help="Check the configuration file")
- workflowArgParser.add_argument("-r", "--registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-f", "--forceCleanup",
- action="store_true",
- help="Remove working directories after the simulation is completed.")
- workflowArgParser.add_argument("-d", "--referenceDir",
- default=os.path.join(os.getcwd(), 'input_data'),
- help="Relative paths in the config file are referenced to this directory.")
- workflowArgParser.add_argument("-w", "--workDir",
- default=os.path.join(os.getcwd(), 'results'),
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Provide a subset of building ids to run',
+ )
+ workflowArgParser.add_argument(
+ '-c', '--check', help='Check the configuration file'
+ )
+ workflowArgParser.add_argument(
+ '-r',
+ '--registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-f',
+ '--forceCleanup',
+ action='store_true',
+ help='Remove working directories after the simulation is completed.',
+ )
+ workflowArgParser.add_argument(
+ '-d',
+ '--referenceDir',
+ default=os.path.join(os.getcwd(), 'input_data'), # noqa: PTH109, PTH118
+ help='Relative paths in the config file are referenced to this directory.',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
+ default=os.path.join(os.getcwd(), 'results'), # noqa: PTH109, PTH118
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
+ help='Path where the log file will be saved.',
+ )
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
if wfArgs.check:
@@ -201,13 +255,15 @@ def collect_surface_motion(runDir, bldg_data, surfaceMoDir=''):
else:
run_type = 'run'
- #Calling the main workflow method and passing the parsed arguments
- main(run_type = run_type,
- input_file = wfArgs.configuration,
- app_registry = wfArgs.registry,
- force_cleanup = wfArgs.forceCleanup,
- bldg_id_filter = wfArgs.filter,
- reference_dir = wfArgs.referenceDir,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile)
\ No newline at end of file
+ # Calling the main workflow method and passing the parsed arguments
+ main(
+ run_type=run_type,
+ input_file=wfArgs.configuration,
+ app_registry=wfArgs.registry,
+ force_cleanup=wfArgs.forceCleanup,
+ bldg_id_filter=wfArgs.filter,
+ reference_dir=wfArgs.referenceDir,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ )
diff --git a/modules/Workflow/WorkflowApplications.json b/modules/Workflow/WorkflowApplications.json
index 10fa04fa9..8748fc167 100644
--- a/modules/Workflow/WorkflowApplications.json
+++ b/modules/Workflow/WorkflowApplications.json
@@ -19,7 +19,7 @@
{
"id": "assetFile",
"type": "string",
- "description": "name of JSON file containing an array of id,filename pairs. For each asset id the aplication creates a AIM file with name filename",
+ "description": "name of JSON file containing an array of id,filename pairs. For each asset id the application creates a AIM file with name filename",
"default": "assetsFile.json"
}
]
@@ -317,7 +317,7 @@
{
"id": "assetFile",
"type": "path",
- "description": "Name of JSON file containing an array of id,filename pairs. For each asset id the aplication creates a AIM file with name filename",
+ "description": "Name of JSON file containing an array of id,filename pairs. For each asset id the application creates a AIM file with name filename",
"default": "assets.json"
}
],
@@ -581,7 +581,7 @@
{
"id": "pathSW4results",
"type": "path",
- "description": "Path to directory containig output point files"
+ "description": "Path to directory containing output point files"
}
]
},
diff --git a/modules/Workflow/WorkflowUtils.py b/modules/Workflow/WorkflowUtils.py
index 412f8a2af..44de3622a 100644
--- a/modules/Workflow/WorkflowUtils.py
+++ b/modules/Workflow/WorkflowUtils.py
@@ -1,44 +1,53 @@
-# written: fmk, adamzs
+# written: fmk, adamzs # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import os
-import subprocess
+import subprocess # noqa: S404
from time import gmtime, strftime
-class WorkFlowInputError(Exception):
+
+class WorkFlowInputError(Exception): # noqa: D101
def __init__(self, value):
self.value = value
- def __str__(self):
+ def __str__(self): # noqa: D105
return repr(self.value)
+
try:
- basestring
+ basestring # noqa: B018
except NameError:
basestring = str
-def workflow_log(msg):
+
+def workflow_log(msg): # noqa: D103
# ISO-8601 format, e.g. 2018-06-16T20:24:04Z
- print('%s %s' % (strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg))
+ print('%s %s' % (strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)) # noqa: T201, UP031
# function to return result of invoking an application
-def runApplication(application_plus_args):
+def runApplication(application_plus_args): # noqa: N802, D103
if application_plus_args[0] == 'python':
- command = 'python "{}" '.format(application_plus_args[1])+' '.join(application_plus_args[2:])
+ command = f'python "{application_plus_args[1]}" ' + ' '.join(
+ application_plus_args[2:]
+ )
else:
- command = '"{}" '.format(application_plus_args[0])+' '.join(application_plus_args[1:])
+ command = f'"{application_plus_args[0]}" ' + ' '.join(
+ application_plus_args[1:]
+ )
try:
- result = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ command, stderr=subprocess.STDOUT, shell=True
+ )
# for line in result.split('\n'):
# pass
# print(line)
@@ -48,23 +57,23 @@ def runApplication(application_plus_args):
returncode = e.returncode
if returncode != 0:
- workflow_log('NON-ZERO RETURN CODE: %s' % returncode)
+ workflow_log('NON-ZERO RETURN CODE: %s' % returncode) # noqa: UP031
return command, result, returncode
-def add_full_path(possible_filename):
+def add_full_path(possible_filename): # noqa: D103
if not isinstance(possible_filename, basestring):
return possible_filename
- if (os.path.exists(possible_filename)):
- if os.path.isdir(possible_filename):
- return os.path.abspath(possible_filename) + '/'
- else:
- return os.path.abspath(possible_filename)
+ if os.path.exists(possible_filename): # noqa: PTH110
+ if os.path.isdir(possible_filename): # noqa: PTH112
+ return os.path.abspath(possible_filename) + '/' # noqa: PTH100
+ else: # noqa: RET505
+ return os.path.abspath(possible_filename) # noqa: PTH100
else:
return possible_filename
-def recursive_iter(obj):
+def recursive_iter(obj): # noqa: D103
if isinstance(obj, dict):
for k, v in obj.items():
if isinstance(v, basestring):
@@ -79,5 +88,5 @@ def recursive_iter(obj):
recursive_iter(item)
-def relative2fullpath(json_object):
+def relative2fullpath(json_object): # noqa: D103
recursive_iter(json_object)
diff --git a/modules/Workflow/changeJSON.py b/modules/Workflow/changeJSON.py
index e8f116007..95e4e930e 100755
--- a/modules/Workflow/changeJSON.py
+++ b/modules/Workflow/changeJSON.py
@@ -1,20 +1,20 @@
-import sys, os, json
+import json # noqa: CPY001, D100, EXE002, INP001
+import sys
-def main(inputFile, outputFile):
- extraArgs = sys.argv[3:]
-
+def main(inputFile, outputFile): # noqa: N803, D103
+ extraArgs = sys.argv[3:] # noqa: N806
+
# initialize the log file
- with open(inputFile, 'r') as f:
+ with open(inputFile) as f: # noqa: PLW1514, PTH123
data = json.load(f)
- for k,val in zip(extraArgs[0::2],extraArgs[1::2]):
- data[k]=val
+ for k, val in zip(extraArgs[0::2], extraArgs[1::2]):
+ data[k] = val
- with open(outputFile, 'w') as outfile:
+ with open(outputFile, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(data, outfile)
-if __name__ == "__main__":
+if __name__ == '__main__':
main(inputFile=sys.argv[1], outputFile=sys.argv[2])
-
diff --git a/modules/Workflow/computeResponseSpectrum.py b/modules/Workflow/computeResponseSpectrum.py
index 04bb913b7..66c4296ec 100644
--- a/modules/Workflow/computeResponseSpectrum.py
+++ b/modules/Workflow/computeResponseSpectrum.py
@@ -1,17 +1,15 @@
-"""
-Simple Python Script to integrate a strong motion record using
+"""Simple Python Script to integrate a strong motion record using
the Newmark-Beta method
-"""
+""" # noqa: CPY001, D205, D400, INP001
import numpy as np
-from math import sqrt
-from scipy.integrate import cumtrapz
from scipy.constants import g
+from scipy.integrate import cumtrapz
from scipy.interpolate import interp1d
-def convert_accel_units(acceleration, from_, to_='cm/s/s'):
- """
- Converts acceleration from/to different units
+
+def convert_accel_units(acceleration, from_, to_='cm/s/s'): # noqa: C901
+ """Converts acceleration from/to different units
:param acceleration: the acceleration (numeric or numpy array)
:param from_: unit of `acceleration`: string in "g", "m/s/s", "m/s**2",
"m/s^2", "cm/s/s", "cm/s**2" or "cm/s^2"
@@ -19,9 +17,9 @@ def convert_accel_units(acceleration, from_, to_='cm/s/s'):
"m/s^2", "cm/s/s", "cm/s**2" or "cm/s^2". When missing, it defaults
to "cm/s/s"
:return: acceleration converted to the given units (by default, 'cm/s/s')
- """
- m_sec_square = ("m/s/s", "m/s**2", "m/s^2")
- cm_sec_square = ("cm/s/s", "cm/s**2", "cm/s^2")
+ """ # noqa: D205, D400, D401
+ m_sec_square = ('m/s/s', 'm/s**2', 'm/s^2')
+ cm_sec_square = ('cm/s/s', 'cm/s**2', 'cm/s^2')
acceleration = np.asarray(acceleration)
if from_ == 'g':
if to_ == 'g':
@@ -45,13 +43,20 @@ def convert_accel_units(acceleration, from_, to_='cm/s/s'):
if to_ in cm_sec_square:
return acceleration
- raise ValueError("Unrecognised time history units. "
- "Should take either ''g'', ''m/s/s'' or ''cm/s/s''")
+ raise ValueError( # noqa: DOC501, TRY003
+ 'Unrecognised time history units. ' # noqa: EM101
+ "Should take either ''g'', ''m/s/s'' or ''cm/s/s''"
+ )
-def get_velocity_displacement(time_step, acceleration, units="cm/s/s",
- velocity=None, displacement=None):
- """
- Returns the velocity and displacment time series using simple integration
+
+def get_velocity_displacement(
+ time_step,
+ acceleration,
+ units='cm/s/s',
+ velocity=None,
+ displacement=None,
+):
+ """Returns the velocity and displacement time series using simple integration
:param float time_step:
Time-series time-step (s)
:param numpy.ndarray acceleration:
@@ -59,23 +64,28 @@ def get_velocity_displacement(time_step, acceleration, units="cm/s/s",
:returns:
velocity - Velocity Time series (cm/s)
displacement - Displacement Time series (cm)
- """
+ """ # noqa: D205, D400, D401
acceleration = convert_accel_units(acceleration, units)
if velocity is None:
- velocity = time_step * cumtrapz(acceleration, initial=0.)
+ velocity = time_step * cumtrapz(acceleration, initial=0.0)
if displacement is None:
- displacement = time_step * cumtrapz(velocity, initial=0.)
+ displacement = time_step * cumtrapz(velocity, initial=0.0)
return velocity, displacement
+
class NewmarkBeta:
- """
- Evaluates the response spectrum using the Newmark-Beta methodology
- """
+ """Evaluates the response spectrum using the Newmark-Beta methodology""" # noqa: D400
- def __init__(self, acceleration, time_step, periods, damping=0.05, dt_disc = 0.002,
- units="g"):
- """
- Setup the response spectrum calculator
+ def __init__(
+ self,
+ acceleration,
+ time_step,
+ periods,
+ damping=0.05,
+ dt_disc=0.002,
+ units='g',
+ ):
+ """Setup the response spectrum calculator
:param numpy.ndarray time_hist:
Acceleration time history
:param numpy.ndarray periods:
@@ -83,25 +93,25 @@ def __init__(self, acceleration, time_step, periods, damping=0.05, dt_disc = 0.0
:param float damping:
Fractional coefficient of damping
:param float dt_disc:
- Sampling rate of the acceleartion
+ Sampling rate of the acceleration
:param str units:
Units of the acceleration time history {"g", "m/s", "cm/s/s"}
- """
+ """ # noqa: D205, D400, D401
self.periods = periods
self.num_per = len(periods)
self.acceleration = convert_accel_units(acceleration, units)
self.damping = damping
self.d_t = time_step
self.velocity, self.displacement = get_velocity_displacement(
- self.d_t, self.acceleration)
+ self.d_t, self.acceleration
+ )
self.num_steps = len(self.acceleration)
- self.omega = (2. * np.pi) / self.periods
+ self.omega = (2.0 * np.pi) / self.periods
self.response_spectrum = None
self.dt_disc = dt_disc
def run(self):
- """
- Evaluates the response spectrum
+ """Evaluates the response spectrum
:returns:
Response Spectrum - Dictionary containing all response spectrum
data
@@ -123,34 +133,37 @@ def run(self):
accel - Acceleration response of Single Degree of Freedom Oscillator
vel - Velocity response of Single Degree of Freedom Oscillator
disp - Displacement response of Single Degree of Freedom Oscillator
- """
- omega = (2. * np.pi) / self.periods
- cval = self.damping * 2. * omega
- kval = ((2. * np.pi) / self.periods) ** 2.
+ """ # noqa: D205, D400, D401
+ omega = (2.0 * np.pi) / self.periods
+ cval = self.damping * 2.0 * omega
+ kval = ((2.0 * np.pi) / self.periods) ** 2.0
# Perform Newmark - Beta integration
accel, vel, disp, a_t = self._newmark_beta(omega, cval, kval)
self.response_spectrum = {
'Period': self.periods,
'Acceleration': np.max(np.fabs(a_t), axis=0),
'Velocity': np.max(np.fabs(vel), axis=0),
- 'Displacement': np.max(np.fabs(disp), axis=0)}
- self.response_spectrum['Pseudo-Velocity'] = omega * \
- self.response_spectrum['Displacement']
- self.response_spectrum['Pseudo-Acceleration'] = (omega ** 2.) * \
- self.response_spectrum['Displacement'] / g / 100.0
+ 'Displacement': np.max(np.fabs(disp), axis=0),
+ }
+ self.response_spectrum['Pseudo-Velocity'] = (
+ omega * self.response_spectrum['Displacement']
+ )
+ self.response_spectrum['Pseudo-Acceleration'] = (
+ (omega**2.0) * self.response_spectrum['Displacement'] / g / 100.0
+ )
time_series = {
'Time-Step': self.d_t,
'Acceleration': self.acceleration,
'Velocity': self.velocity,
'Displacement': self.displacement,
- 'PGA': np.max(np.fabs(self.acceleration))/g/100.0,
+ 'PGA': np.max(np.fabs(self.acceleration)) / g / 100.0,
'PGV': np.max(np.fabs(self.velocity)),
- 'PGD': np.max(np.fabs(self.displacement))}
+ 'PGD': np.max(np.fabs(self.displacement)),
+ }
return self.response_spectrum, time_series, accel, vel, disp
- def _newmark_beta(self, omega, cval, kval):
- """
- Newmark-beta integral
+ def _newmark_beta(self, omega, cval, kval): # noqa: ARG002
+ """Newmark-beta integral
:param numpy.ndarray omega:
Angular period - (2 * pi) / T
:param numpy.ndarray cval:
@@ -162,16 +175,21 @@ def _newmark_beta(self, omega, cval, kval):
vel - Velocity response of a SDOF oscillator
disp - Displacement response of a SDOF oscillator
a_t - Acceleration response of a SDOF oscillator
- """
+ """ # noqa: D205, D400
# Parameters
dt = self.d_t
ground_acc = self.acceleration
num_steps = self.num_steps
dt_disc = self.dt_disc
# discritize
- num_steps_disc = int(np.floor(num_steps*dt/dt_disc))
- f = interp1d([dt*x for x in range(num_steps)], ground_acc, bounds_error=False, fill_value=(ground_acc[0], ground_acc[-1]))
- tmp_time = [dt_disc*x for x in range(num_steps_disc)]
+ num_steps_disc = int(np.floor(num_steps * dt / dt_disc))
+ f = interp1d(
+ [dt * x for x in range(num_steps)],
+ ground_acc,
+ bounds_error=False,
+ fill_value=(ground_acc[0], ground_acc[-1]),
+ )
+ tmp_time = [dt_disc * x for x in range(num_steps_disc)]
ground_acc = f(tmp_time)
# Pre-allocate arrays
accel = np.zeros([num_steps_disc, self.num_per], dtype=float)
@@ -181,14 +199,21 @@ def _newmark_beta(self, omega, cval, kval):
# Initial line
accel[0, :] = (-ground_acc[0] - (cval * vel[0, :])) - (kval * disp[0, :])
for j in range(1, num_steps_disc):
- delta_acc = ground_acc[j]-ground_acc[j-1]
- delta_d2u = (-delta_acc-dt_disc*cval*accel[j-1,:]-dt_disc*kval*(vel[j-1,:]+0.5*dt_disc*accel[j-1,:]))/ \
- (1.0+0.5*dt_disc*cval+0.25*dt_disc**2*kval)
- delta_du = dt_disc*accel[j-1,:]+0.5*dt_disc*delta_d2u
- delta_u = dt_disc*vel[j-1,:]+0.5*dt_disc**2*accel[j-1,:]+0.25*dt_disc**2*delta_d2u
- accel[j,:] = delta_d2u+accel[j-1,:]
- vel[j,:] = delta_du+vel[j-1,:]
- disp[j,:] = delta_u+disp[j-1,:]
+ delta_acc = ground_acc[j] - ground_acc[j - 1]
+ delta_d2u = (
+ -delta_acc
+ - dt_disc * cval * accel[j - 1, :]
+ - dt_disc * kval * (vel[j - 1, :] + 0.5 * dt_disc * accel[j - 1, :])
+ ) / (1.0 + 0.5 * dt_disc * cval + 0.25 * dt_disc**2 * kval)
+ delta_du = dt_disc * accel[j - 1, :] + 0.5 * dt_disc * delta_d2u
+ delta_u = (
+ dt_disc * vel[j - 1, :]
+ + 0.5 * dt_disc**2 * accel[j - 1, :]
+ + 0.25 * dt_disc**2 * delta_d2u
+ )
+ accel[j, :] = delta_d2u + accel[j - 1, :]
+ vel[j, :] = delta_du + vel[j - 1, :]
+ disp[j, :] = delta_u + disp[j - 1, :]
a_t[j, :] = ground_acc[j] + accel[j, :]
return accel, vel, disp, a_t
diff --git a/modules/Workflow/createGM4BIM.py b/modules/Workflow/createGM4BIM.py
index f6765da37..3bc5f2297 100644
--- a/modules/Workflow/createGM4BIM.py
+++ b/modules/Workflow/createGM4BIM.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
#
# This file is part of the RDT Application.
@@ -37,66 +36,61 @@
# Chaofeng Wang
# fmk
-import numpy as np
+import argparse
import json
-import os, sys
+import os
import shutil
+import sys
from glob import glob
-import argparse
-import pandas as pd
-from computeResponseSpectrum import *
# import the common constants and methods
from pathlib import Path
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+
+import numpy as np
+import pandas as pd
+from computeResponseSpectrum import * # noqa: F403
+
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[0]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import *
+from simcenter_common import * # noqa: E402, F403
-def get_scale_factors(input_units, output_units):
- """
- Determine the scale factor to convert input event to internal event data
-
- """
+def get_scale_factors(input_units, output_units): # noqa: C901
+ """Determine the scale factor to convert input event to internal event data""" # noqa: D400
# special case: if the input unit is not specified then do not do any scaling
if input_units is None:
-
scale_factors = {'ALL': 1.0}
else:
-
# parse output units:
# if no length unit is specified, 'inch' is assumed
unit_length = output_units.get('length', 'inch')
f_length = globals().get(unit_length, None)
if f_length is None:
- raise ValueError(
- f"Specified length unit not recognized: {unit_length}")
+ raise ValueError(f'Specified length unit not recognized: {unit_length}') # noqa: DOC501, EM102, TRY003
# if no time unit is specified, 'sec' is assumed
unit_time = output_units.get('time', 'sec')
f_time = globals().get(unit_time, None)
if f_time is None:
- raise ValueError(
- f"Specified time unit not recognized: {unit_time}")
+ raise ValueError(f'Specified time unit not recognized: {unit_time}') # noqa: DOC501, EM102, TRY003
scale_factors = {}
for input_name, input_unit in input_units.items():
-
# exceptions
- if input_name in ['factor', ]:
+ if input_name == 'factor':
f_scale = 1.0
else:
-
# get the scale factor to standard units
f_in = globals().get(input_unit, None)
if f_in is None:
- raise ValueError(
- f"Input unit for event files not recognized: {input_unit}")
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Input unit for event files not recognized: {input_unit}' # noqa: EM102
+ )
unit_type = None
for base_unit_type, unit_set in globals()['unit_types'].items():
@@ -104,11 +98,11 @@ def get_scale_factors(input_units, output_units):
unit_type = base_unit_type
if unit_type is None:
- raise ValueError(f"Failed to identify unit type: {input_unit}")
+ raise ValueError(f'Failed to identify unit type: {input_unit}') # noqa: DOC501, EM102, TRY003
# the output unit depends on the unit type
if unit_type == 'acceleration':
- f_out = f_time ** 2.0 / f_length
+ f_out = f_time**2.0 / f_length
elif unit_type == 'speed':
f_out = f_time / f_length
@@ -117,7 +111,9 @@ def get_scale_factors(input_units, output_units):
f_out = 1.0 / f_length
else:
- raise ValueError(f"Unexpected unit type in workflow: {unit_type}")
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Unexpected unit type in workflow: {unit_type}' # noqa: EM102
+ )
# the scale factor is the product of input and output scaling
f_scale = f_in * f_out
@@ -126,103 +122,144 @@ def get_scale_factors(input_units, output_units):
return scale_factors
-def createFilesForEventGrid(inputDir, outputDir, removeInputDir):
- if not os.path.isdir(inputDir):
- print(f"input dir: {inputDir} does not exist")
+def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: C901, D103, N802, N803, PLR0914, PLR0915
+ if not os.path.isdir(inputDir): # noqa: PTH112
+ print(f'input dir: {inputDir} does not exist') # noqa: T201
return 0
-
- if not os.path.exists(outputDir):
- os.mkdir(outputDir)
-
+ if not os.path.exists(outputDir): # noqa: PTH110
+ os.mkdir(outputDir) # noqa: PTH102
#
# FMK bug fix - have to copy AIM files back to the inputDir dir as code below assumes they are there
#
-
- extension ="AIM.json"
- the_dir = os.path.abspath(inputDir)
+
+ extension = 'AIM.json'
+ the_dir = os.path.abspath(inputDir) # noqa: PTH100
for item in os.listdir(the_dir):
- item_path = os.path.join(the_dir, item)
- if os.path.isdir(item_path):
- template_dir = os.path.join(item_path, 'templatedir')
+ item_path = os.path.join(the_dir, item) # noqa: PTH118
+ if os.path.isdir(item_path): # noqa: PTH112
+ template_dir = os.path.join(item_path, 'templatedir') # noqa: PTH118
for the_file in os.listdir(template_dir):
if the_file.endswith(extension):
- bim_path = os.path.join(template_dir, the_file)
+ bim_path = os.path.join(template_dir, the_file) # noqa: PTH118
shutil.copy(bim_path, the_dir)
-
-
- #siteFiles = glob(f"{inputDir}/*BIM.json")
+
+ # siteFiles = glob(f"{inputDir}/*BIM.json")
# KZ: changing BIM to AIM
- siteFiles = glob(f"{inputDir}/*AIM.json")
+ siteFiles = glob(f'{inputDir}/*AIM.json') # noqa: PTH207, N806
- GP_file = []
- Longitude = []
- Latitude = []
- id = []
+ GP_file = [] # noqa: N806, F841
+ Longitude = [] # noqa: N806
+ Latitude = [] # noqa: N806
+ id = [] # noqa: A001
sites = []
# site im dictionary
- periods = np.array([0.01,0.02,0.03,0.04,0.05,0.075,0.1,0.2,0.3,0.4,0.5,0.75,1,2,3,4,5,7.5,10])
- dict_im_all = {('type','loc','dir','stat'):[],
- ('PGA',0,1,'median'):[],
- ('PGA',0,1,'beta'):[],
- ('PGA',0,2,'median'):[],
- ('PGA',0,2,'beta'):[],
- ('PGV',0,1,'median'):[],
- ('PGV',0,1,'beta'):[],
- ('PGV',0,2,'median'):[],
- ('PGV',0,2,'beta'):[],
- ('PGD',0,1,'median'):[],
- ('PGD',0,1,'beta'):[],
- ('PGD',0,2,'median'):[],
- ('PGD',0,2,'beta'):[]}
- dict_im_site = {'1-PGA-0-1':[], '1-PGA-0-2':[],'1-PGV-0-1':[],'1-PGV-0-2':[],'1-PGD-0-1':[],'1-PGD-0-2':[]}
- for Ti in periods:
- dict_im_all.update({('SA({}s)'.format(Ti),0,1,'median'):[],
- ('SA({}s)'.format(Ti),0,1,'beta'):[],
- ('SA({}s)'.format(Ti),0,2,'median'):[],
- ('SA({}s)'.format(Ti),0,2,'beta'):[]})
- dict_im_site.update({'1-SA({}s)-0-1'.format(Ti):[],
- '1-SA({}s)-0-2'.format(Ti):[]})
-
- for site in siteFiles:
-
- dict_im = {('type','loc','dir','stat'):[],
- ('PGA',0,1,'median'):[],
- ('PGA',0,1,'beta'):[],
- ('PGA',0,2,'median'):[],
- ('PGA',0,2,'beta'):[],
- ('PGV',0,1,'median'):[],
- ('PGV',0,1,'beta'):[],
- ('PGV',0,2,'median'):[],
- ('PGV',0,2,'beta'):[],
- ('PGD',0,1,'median'):[],
- ('PGD',0,1,'beta'):[],
- ('PGD',0,2,'median'):[],
- ('PGD',0,2,'beta'):[]}
- dict_im_site = {'1-PGA-0-1':[], '1-PGA-0-2':[],'1-PGV-0-1':[],'1-PGV-0-2':[],'1-PGD-0-1':[],'1-PGD-0-2':[]}
- for Ti in periods:
- dict_im.update({('SA({}s)'.format(Ti),0,1,'median'):[],
- ('SA({}s)'.format(Ti),0,1,'beta'):[],
- ('SA({}s)'.format(Ti),0,2,'median'):[],
- ('SA({}s)'.format(Ti),0,2,'beta'):[]})
- dict_im_site.update({'1-SA({}s)-0-1'.format(Ti):[],
- '1-SA({}s)-0-2'.format(Ti):[]})
-
- with open(site, 'r') as f:
-
- All_json = json.load(f)
- generalInfo = All_json['GeneralInformation']
+ periods = np.array(
+ [
+ 0.01,
+ 0.02,
+ 0.03,
+ 0.04,
+ 0.05,
+ 0.075,
+ 0.1,
+ 0.2,
+ 0.3,
+ 0.4,
+ 0.5,
+ 0.75,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 7.5,
+ 10,
+ ]
+ )
+ dict_im_all = {
+ ('type', 'loc', 'dir', 'stat'): [],
+ ('PGA', 0, 1, 'median'): [],
+ ('PGA', 0, 1, 'beta'): [],
+ ('PGA', 0, 2, 'median'): [],
+ ('PGA', 0, 2, 'beta'): [],
+ ('PGV', 0, 1, 'median'): [],
+ ('PGV', 0, 1, 'beta'): [],
+ ('PGV', 0, 2, 'median'): [],
+ ('PGV', 0, 2, 'beta'): [],
+ ('PGD', 0, 1, 'median'): [],
+ ('PGD', 0, 1, 'beta'): [],
+ ('PGD', 0, 2, 'median'): [],
+ ('PGD', 0, 2, 'beta'): [],
+ }
+ dict_im_site = {
+ '1-PGA-0-1': [],
+ '1-PGA-0-2': [],
+ '1-PGV-0-1': [],
+ '1-PGV-0-2': [],
+ '1-PGD-0-1': [],
+ '1-PGD-0-2': [],
+ }
+ for Ti in periods: # noqa: N806
+ dict_im_all.update(
+ {
+ (f'SA({Ti}s)', 0, 1, 'median'): [],
+ (f'SA({Ti}s)', 0, 1, 'beta'): [],
+ (f'SA({Ti}s)', 0, 2, 'median'): [],
+ (f'SA({Ti}s)', 0, 2, 'beta'): [],
+ }
+ )
+ dict_im_site.update({f'1-SA({Ti}s)-0-1': [], f'1-SA({Ti}s)-0-2': []})
+
+ for site in siteFiles: # noqa: PLR1702
+ dict_im = {
+ ('type', 'loc', 'dir', 'stat'): [],
+ ('PGA', 0, 1, 'median'): [],
+ ('PGA', 0, 1, 'beta'): [],
+ ('PGA', 0, 2, 'median'): [],
+ ('PGA', 0, 2, 'beta'): [],
+ ('PGV', 0, 1, 'median'): [],
+ ('PGV', 0, 1, 'beta'): [],
+ ('PGV', 0, 2, 'median'): [],
+ ('PGV', 0, 2, 'beta'): [],
+ ('PGD', 0, 1, 'median'): [],
+ ('PGD', 0, 1, 'beta'): [],
+ ('PGD', 0, 2, 'median'): [],
+ ('PGD', 0, 2, 'beta'): [],
+ }
+ dict_im_site = {
+ '1-PGA-0-1': [],
+ '1-PGA-0-2': [],
+ '1-PGV-0-1': [],
+ '1-PGV-0-2': [],
+ '1-PGD-0-1': [],
+ '1-PGD-0-2': [],
+ }
+ for Ti in periods: # noqa: N806
+ dict_im.update(
+ {
+ (f'SA({Ti}s)', 0, 1, 'median'): [],
+ (f'SA({Ti}s)', 0, 1, 'beta'): [],
+ (f'SA({Ti}s)', 0, 2, 'median'): [],
+ (f'SA({Ti}s)', 0, 2, 'beta'): [],
+ }
+ )
+ dict_im_site.update({f'1-SA({Ti}s)-0-1': [], f'1-SA({Ti}s)-0-2': []})
+
+ with open(site) as f: # noqa: PLW1514, PTH123
+ All_json = json.load(f) # noqa: N806
+ generalInfo = All_json['GeneralInformation'] # noqa: N806
Longitude.append(generalInfo['Longitude'])
Latitude.append(generalInfo['Latitude'])
- #siteID = generalInfo['BIM_id']
+ # siteID = generalInfo['BIM_id']
# KZ: changing BIM to AIM
- siteID = generalInfo['AIM_id']
+ siteID = generalInfo['AIM_id'] # noqa: N806
# get unit info (needed for determining the simulated acc unit)
- unitInfo = All_json['units']
+ unitInfo = All_json['units'] # noqa: N806
# get scaling factor for surface acceleration
- acc_unit = {"AccelerationEvent": "g"}
+ acc_unit = {'AccelerationEvent': 'g'}
f_scale_units = get_scale_factors(acc_unit, unitInfo)
# if f_scale_units is None
@@ -239,13 +276,13 @@ def createFilesForEventGrid(inputDir, outputDir, removeInputDir):
f_scale = f_scale_units.get(cur_var)
id.append(int(siteID))
-
- siteFileName = f"Site_{siteID}.csv"
+
+ siteFileName = f'Site_{siteID}.csv' # noqa: N806
sites.append(siteFileName)
-
- workdirs = glob(f"{inputDir}/{siteID}/workdir.*")
- siteEventFiles = []
- siteEventFactors = []
+
+ workdirs = glob(f'{inputDir}/{siteID}/workdir.*') # noqa: PTH207
+ siteEventFiles = [] # noqa: N806
+ siteEventFactors = [] # noqa: N806
# initialization
psa_x = []
@@ -256,36 +293,39 @@ def createFilesForEventGrid(inputDir, outputDir, removeInputDir):
pgv_y = []
pgd_x = []
pgd_y = []
-
- for workdir in workdirs:
- head, sep, sampleID = workdir.partition('workdir.')
- #print(sampleID)
+ for workdir in workdirs:
+ head, sep, sampleID = workdir.partition('workdir.') # noqa: F841, N806
+ # print(sampleID)
- eventName = f"Event_{siteID}_{sampleID}"
- #print(eventName)
- shutil.copy(f"{workdir}/fmkEVENT", f"{outputDir}/{eventName}.json")
+ eventName = f'Event_{siteID}_{sampleID}' # noqa: N806
+ # print(eventName)
+ shutil.copy(f'{workdir}/fmkEVENT', f'{outputDir}/{eventName}.json')
siteEventFiles.append(eventName)
siteEventFactors.append(1.0)
# compute ground motion intensity measures
- with open(f"{outputDir}/{eventName}.json", 'r') as f:
+ with open(f'{outputDir}/{eventName}.json') as f: # noqa: PLW1514, PLW2901, PTH123
cur_gm = json.load(f)
cur_seismograms = cur_gm['Events'][0]['timeSeries']
- num_seismograms = len(cur_seismograms)
+ num_seismograms = len(cur_seismograms) # noqa: F841
# im_X and im_Y
for cur_time_series in cur_seismograms:
dt = cur_time_series.get('dT')
acc = [x / f_scale for x in cur_time_series.get('data')]
- acc_hist = np.array([[dt*x for x in range(len(acc))],acc])
+ acc_hist = np.array([[dt * x for x in range(len(acc))], acc]) # noqa: F841
# get intensity measure
- my_response_spectrum_calc = NewmarkBeta(acc, dt, periods, damping=0.05, units='g')
- tmp, time_series, accel, vel, disp = my_response_spectrum_calc.run()
+ my_response_spectrum_calc = NewmarkBeta( # noqa: F405
+ acc, dt, periods, damping=0.05, units='g'
+ )
+ tmp, time_series, accel, vel, disp = ( # noqa: F841
+ my_response_spectrum_calc.run()
+ )
psa = tmp.get('Pseudo-Acceleration')
- pga = time_series.get('PGA',0.0)
- pgv = time_series.get('PGV',0.0)
- pgd = time_series.get('PGD',0.0)
+ pga = time_series.get('PGA', 0.0)
+ pgv = time_series.get('PGV', 0.0)
+ pgd = time_series.get('PGD', 0.0)
# append
if cur_time_series.get('name') == 'accel_X':
psa_x.append(psa)
@@ -305,160 +345,179 @@ def createFilesForEventGrid(inputDir, outputDir, removeInputDir):
dict_im_site['1-PGV-0-2'] = pgv_y
dict_im_site['1-PGD-0-1'] = pgd_x
dict_im_site['1-PGD-0-2'] = pgd_y
- for jj, Ti in enumerate(periods):
- cur_sa = '1-SA({}s)-0-1'.format(Ti)
- dict_im_site[cur_sa]=[tmp[jj] for tmp in psa_x]
- cur_sa = '1-SA({}s)-0-2'.format(Ti)
- dict_im_site[cur_sa]=[tmp[jj] for tmp in psa_y]
+ for jj, Ti in enumerate(periods): # noqa: N806
+ cur_sa = f'1-SA({Ti}s)-0-1'
+ dict_im_site[cur_sa] = [tmp[jj] for tmp in psa_x]
+ cur_sa = f'1-SA({Ti}s)-0-2'
+ dict_im_site[cur_sa] = [tmp[jj] for tmp in psa_y]
# dump dict_im_site
df_im_site = pd.DataFrame.from_dict(dict_im_site)
- site_im_file = f"{inputDir}/{siteID}/IM_realization.csv"
+ site_im_file = f'{inputDir}/{siteID}/IM_realization.csv'
df_im_site.to_csv(site_im_file, index=False)
# median and dispersion
# psa
if len(psa_x) > 0:
- m_psa_x = np.exp(np.mean(np.log(psa_x),axis=0))
- s_psa_x = np.std(np.log(psa_x),axis=0)
+ m_psa_x = np.exp(np.mean(np.log(psa_x), axis=0))
+ s_psa_x = np.std(np.log(psa_x), axis=0)
else:
m_psa_x = np.zeros((len(periods),))
s_psa_x = np.zeros((len(periods),))
if len(psa_y) > 0:
- m_psa_y = np.exp(np.mean(np.log(psa_y),axis=0))
- s_psa_y = np.std(np.log(psa_y),axis=0)
+ m_psa_y = np.exp(np.mean(np.log(psa_y), axis=0))
+ s_psa_y = np.std(np.log(psa_y), axis=0)
else:
m_psa_y = np.zeros((len(periods),))
s_psa_y = np.zeros((len(periods),))
# pga
if len(pga_x) > 0:
- m_pga_x = np.exp(np.mean(np.log(pga_x)))
- s_pga_x = np.std(np.log(pga_x))
+ m_pga_x = np.exp(np.mean(np.log(pga_x)))
+ s_pga_x = np.std(np.log(pga_x))
else:
m_psa_x = 0.0
s_pga_x = 0.0
if len(pga_y) > 0:
- m_pga_y = np.exp(np.mean(np.log(pga_y)))
- s_pga_y = np.std(np.log(pga_y))
+ m_pga_y = np.exp(np.mean(np.log(pga_y)))
+ s_pga_y = np.std(np.log(pga_y))
else:
m_psa_y = 0.0
s_pga_y = 0.0
# pgv
if len(pgv_x) > 0:
- m_pgv_x = np.exp(np.mean(np.log(pgv_x)))
- s_pgv_x = np.std(np.log(pgv_x))
+ m_pgv_x = np.exp(np.mean(np.log(pgv_x)))
+ s_pgv_x = np.std(np.log(pgv_x))
else:
m_pgv_x = 0.0
s_pgv_x = 0.0
if len(pgv_y) > 0:
- m_pgv_y = np.exp(np.mean(np.log(pgv_y)))
- s_pgv_y = np.std(np.log(pgv_y))
+ m_pgv_y = np.exp(np.mean(np.log(pgv_y)))
+ s_pgv_y = np.std(np.log(pgv_y))
else:
m_pgv_y = 0.0
s_pgv_y = 0.0
# pgd
if len(pgd_x) > 0:
- m_pgd_x = np.exp(np.mean(np.log(pgd_x)))
- s_pgd_x = np.std(np.log(pgd_x))
+ m_pgd_x = np.exp(np.mean(np.log(pgd_x)))
+ s_pgd_x = np.std(np.log(pgd_x))
else:
m_pgd_x = 0.0
s_pgd_x = 0.0
if len(pgd_y) > 0:
- m_pgd_y = np.exp(np.mean(np.log(pgd_y)))
- s_pgd_y = np.std(np.log(pgd_y))
+ m_pgd_y = np.exp(np.mean(np.log(pgd_y)))
+ s_pgd_y = np.std(np.log(pgd_y))
else:
m_pgd_y = 0.0
s_pgd_y = 0.0
# add to dictionary
- dict_im[('type','loc','dir','stat')].append(int(siteID))
+ dict_im[('type', 'loc', 'dir', 'stat')].append(int(siteID))
# pga
- dict_im[('PGA',0,1,'median')].append(m_pga_x)
- dict_im[('PGA',0,1,'beta')].append(s_pga_x)
- dict_im[('PGA',0,2,'median')].append(m_pga_y)
- dict_im[('PGA',0,2,'beta')].append(s_pga_y)
+ dict_im[('PGA', 0, 1, 'median')].append(m_pga_x)
+ dict_im[('PGA', 0, 1, 'beta')].append(s_pga_x)
+ dict_im[('PGA', 0, 2, 'median')].append(m_pga_y)
+ dict_im[('PGA', 0, 2, 'beta')].append(s_pga_y)
# pgv
- dict_im[('PGV',0,1,'median')].append(m_pgv_x)
- dict_im[('PGV',0,1,'beta')].append(s_pgv_x)
- dict_im[('PGV',0,2,'median')].append(m_pgv_y)
- dict_im[('PGV',0,2,'beta')].append(s_pgv_y)
+ dict_im[('PGV', 0, 1, 'median')].append(m_pgv_x)
+ dict_im[('PGV', 0, 1, 'beta')].append(s_pgv_x)
+ dict_im[('PGV', 0, 2, 'median')].append(m_pgv_y)
+ dict_im[('PGV', 0, 2, 'beta')].append(s_pgv_y)
# pgd
- dict_im[('PGD',0,1,'median')].append(m_pgd_x)
- dict_im[('PGD',0,1,'beta')].append(s_pgd_x)
- dict_im[('PGD',0,2,'median')].append(m_pgd_y)
- dict_im[('PGD',0,2,'beta')].append(s_pgd_y)
- for jj, Ti in enumerate(periods):
- cur_sa = 'SA({}s)'.format(Ti)
- dict_im[(cur_sa,0,1,'median')].append(m_psa_x[jj])
- dict_im[(cur_sa,0,1,'beta')].append(s_psa_x[jj])
- dict_im[(cur_sa,0,2,'median')].append(m_psa_y[jj])
- dict_im[(cur_sa,0,2,'beta')].append(s_psa_y[jj])
+ dict_im[('PGD', 0, 1, 'median')].append(m_pgd_x)
+ dict_im[('PGD', 0, 1, 'beta')].append(s_pgd_x)
+ dict_im[('PGD', 0, 2, 'median')].append(m_pgd_y)
+ dict_im[('PGD', 0, 2, 'beta')].append(s_pgd_y)
+ for jj, Ti in enumerate(periods): # noqa: N806
+ cur_sa = f'SA({Ti}s)'
+ dict_im[(cur_sa, 0, 1, 'median')].append(m_psa_x[jj])
+ dict_im[(cur_sa, 0, 1, 'beta')].append(s_psa_x[jj])
+ dict_im[(cur_sa, 0, 2, 'median')].append(m_psa_y[jj])
+ dict_im[(cur_sa, 0, 2, 'beta')].append(s_psa_y[jj])
# aggregate
for cur_key, cur_value in dict_im.items():
- if isinstance(cur_value,list):
+ if isinstance(cur_value, list):
dict_im_all[cur_key].append(cur_value[0])
else:
dict_im_all[cur_key].append(cur_value)
# save median and standard deviation to IM.csv
df_im = pd.DataFrame.from_dict(dict_im)
- df_im.to_csv(f"{inputDir}/{siteID}/IM.csv", index=False)
+ df_im.to_csv(f'{inputDir}/{siteID}/IM.csv', index=False)
# create site csv
- siteDF = pd.DataFrame(list(zip(siteEventFiles, siteEventFactors)), columns =['TH_file', 'factor'])
- siteDF.to_csv(f"{outputDir}/{siteFileName}", index=False)
+ siteDF = pd.DataFrame( # noqa: N806
+ list(zip(siteEventFiles, siteEventFactors)),
+ columns=['TH_file', 'factor'],
+ )
+ siteDF.to_csv(f'{outputDir}/{siteFileName}', index=False)
# create the EventFile
- gridDF = pd.DataFrame(list(zip(sites, Longitude, Latitude)), columns =['GP_file', 'Longitude', 'Latitude'])
+ gridDF = pd.DataFrame( # noqa: N806
+ list(zip(sites, Longitude, Latitude)),
+ columns=['GP_file', 'Longitude', 'Latitude'],
+ )
# change the writing mode to append for paralleling workflow
- if os.path.exists(f"{outputDir}/EventGrid.csv"):
+ if os.path.exists(f'{outputDir}/EventGrid.csv'): # noqa: PTH110
# EventGrid.csv has been created
- gridDF.to_csv(f"{outputDir}/EventGrid.csv", mode='a', index=False, header=False)
+ gridDF.to_csv(
+ f'{outputDir}/EventGrid.csv', mode='a', index=False, header=False
+ )
else:
# EventGrid.csv to be created
- gridDF.to_csv(f"{outputDir}/EventGrid.csv", index=False)
- #gridDF.to_csv(f"{outputDir}/EventGrid.csv", index=False)
- print(f"EventGrid.csv saved to {outputDir}")
+ gridDF.to_csv(f'{outputDir}/EventGrid.csv', index=False)
+ # gridDF.to_csv(f"{outputDir}/EventGrid.csv", index=False)
+ print(f'EventGrid.csv saved to {outputDir}') # noqa: T201
# create pandas
- im_csv_path = os.path.dirname(os.path.dirname(outputDir))
+ im_csv_path = os.path.dirname(os.path.dirname(outputDir)) # noqa: PTH120
df_im_all = pd.DataFrame.from_dict(dict_im_all)
try:
- os.mkdir(os.path.join(im_csv_path,'Results'))
- except:
- print(f"Results folder already exists")
+ os.mkdir(os.path.join(im_csv_path, 'Results')) # noqa: PTH102, PTH118
+ except: # noqa: E722
+ print('Results folder already exists') # noqa: T201
# KZ: 10/19/2022, minor patch for Buildings
- df_im_all.to_csv(os.path.join(im_csv_path,'Results','Buildings','IM_{}-{}.csv'.format(min(id),max(id))),index=False)
- df_im_all.to_csv(os.path.join(im_csv_path,'IM_{}-{}.csv'.format(min(id),max(id))),index=False)
+ df_im_all.to_csv(
+ os.path.join( # noqa: PTH118
+ im_csv_path,
+ 'Results',
+ 'Buildings',
+ f'IM_{min(id)}-{max(id)}.csv',
+ ),
+ index=False,
+ )
+ df_im_all.to_csv(
+ os.path.join(im_csv_path, f'IM_{min(id)}-{max(id)}.csv'), # noqa: PTH118
+ index=False,
+ )
# remove original files
- if removeInputDir:
+ if removeInputDir:
shutil.rmtree(inputDir)
-
+
return 0
-if __name__ == "__main__":
- #Defining the command line arguments
+if __name__ == '__main__':
+ # Defining the command line arguments
- workflowArgParser = argparse.ArgumentParser(
- "Create ground motions for BIM.",
- allow_abbrev=False)
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Create ground motions for BIM.', allow_abbrev=False
+ )
- workflowArgParser.add_argument("-i", "--inputDir",
- help="Dir containing results of siteResponseWhale.")
+ workflowArgParser.add_argument(
+ '-i', '--inputDir', help='Dir containing results of siteResponseWhale.'
+ )
- workflowArgParser.add_argument("-o", "--outputDir",
- help="Dir where results to be stored.")
+ workflowArgParser.add_argument(
+ '-o', '--outputDir', help='Dir where results to be stored.'
+ )
- workflowArgParser.add_argument("--removeInput", action='store_true')
+ workflowArgParser.add_argument('--removeInput', action='store_true')
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
- print(wfArgs)
- #Calling the main function
+ print(wfArgs) # noqa: T201
+ # Calling the main function
createFilesForEventGrid(wfArgs.inputDir, wfArgs.outputDir, wfArgs.removeInput)
-
-
diff --git a/modules/Workflow/femUQ-OLD.py b/modules/Workflow/femUQ-OLD.py
index 2de71605f..cd269d7dd 100755
--- a/modules/Workflow/femUQ-OLD.py
+++ b/modules/Workflow/femUQ-OLD.py
@@ -1,102 +1,98 @@
-# written: fmk, adamzs
+# written: fmk, adamzs # noqa: CPY001, D100, EXE002, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import json
import os
-import subprocess
-from time import gmtime, strftime
import posixpath
+from time import gmtime, strftime
divider = '#' * 80
log_output = []
-from WorkflowUtils import *
+from WorkflowUtils import * # noqa: E402, F403
+
-def main(run_type, inputFile, applicationsRegistry):
+def main(run_type, inputFile, applicationsRegistry): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# the whole workflow is wrapped within a 'try' block.
# a number of exceptions (files missing, explicit application failures, etc.) are
# handled explicitly to aid the user.
# But unhandled exceptions case the workflow to stop with an error, handled in the
# exception block way at the bottom of this main() function
- try:
-
- workflow_log(divider)
- workflow_log('Start of run')
- workflow_log(divider)
- workflow_log('workflow input file: %s' % inputFile)
- workflow_log('application registry file: %s' % applicationsRegistry)
- workflow_log('runtype: %s' % run_type)
- workflow_log(divider)
-
+ try: # noqa: PLR1702
+ workflow_log(divider) # noqa: F405
+ workflow_log('Start of run') # noqa: F405
+ workflow_log(divider) # noqa: F405
+ workflow_log('workflow input file: %s' % inputFile) # noqa: F405, UP031
+ workflow_log('application registry file: %s' % applicationsRegistry) # noqa: F405, UP031
+ workflow_log('runtype: %s' % run_type) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
#
# first we parse the applications registry to load all possible applications
# - for each application type we place in a dictionary key being name, value containing path to executable
#
- with open(applicationsRegistry, 'r') as data_file:
- registryData = json.load(data_file)
+ with open(applicationsRegistry) as data_file: # noqa: PLW1514, PTH123
+ registryData = json.load(data_file) # noqa: N806
# convert all relative paths to full paths
- A = 'Applications'
- Applications = dict()
- appList = 'Event Modeling EDP Simulation UQ'.split(' ')
- appList = [a + A for a in appList]
+ A = 'Applications' # noqa: N806
+ Applications = dict() # noqa: C408, N806
+ appList = 'Event Modeling EDP Simulation UQ'.split(' ') # noqa: N806
+ appList = [a + A for a in appList] # noqa: N806
for app_type in appList:
-
if app_type in registryData:
- xApplicationData = registryData[app_type]
- applicationsData = xApplicationData['Applications']
+ xApplicationData = registryData[app_type] # noqa: N806
+ applicationsData = xApplicationData['Applications'] # noqa: N806
for app in applicationsData:
- appName = app['Name']
- appExe = app['ExecutablePath']
- if not app_type in Applications:
- Applications[app_type] = dict()
+ appName = app['Name'] # noqa: N806
+ appExe = app['ExecutablePath'] # noqa: N806
+ if app_type not in Applications:
+ Applications[app_type] = dict() # noqa: C408
Applications[app_type][appName] = appExe
#
# open input file, and parse json into data
#
- with open(inputFile, 'r') as data_file:
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
# convert all relative paths to full paths
# relative2fullpath(data)
if 'runDir' in data:
- runDIR = data['runDir']
+ runDIR = data['runDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a runDir Entry')
+ raise WorkFlowInputError('Need a runDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'remoteAppDir' in data:
- remoteAppDir = data['remoteAppDir']
+ remoteAppDir = data['remoteAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a remoteAppDir Entry')
+ raise WorkFlowInputError('Need a remoteAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
if 'localAppDir' in data:
- localAppDir = data['localAppDir']
+ localAppDir = data['localAppDir'] # noqa: N806
else:
- raise WorkFlowInputError('Need a localAppDir Entry')
+ raise WorkFlowInputError('Need a localAppDir Entry') # noqa: EM101, F405, TRY003, TRY301
#
# before running chdir to templatedir
#
- workflow_log('run Directory: %s' % runDIR)
+ workflow_log('run Directory: %s' % runDIR) # noqa: F405, UP031
os.chdir(runDIR)
os.chdir('templatedir')
-
#
# now we parse for the applications & app specific data in workflow
#
@@ -104,7 +100,7 @@ def main(run_type, inputFile, applicationsRegistry):
if 'Applications' in data:
available_apps = data['Applications']
else:
- raise WorkFlowInputError('Need an Applications Entry')
+ raise WorkFlowInputError('Need an Applications Entry') # noqa: EM101, F405, TRY003, TRY301
#
# get events, for each the application and its data .. FOR NOW 1 EVENT
@@ -114,140 +110,179 @@ def main(run_type, inputFile, applicationsRegistry):
events = available_apps['Events']
for event in events:
-
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
- if eventClassification == 'Earthquake' or eventClassification == 'Wind':
+ eventClassification = event['EventClassification'] # noqa: N806
+ if (
+ eventClassification == 'Earthquake' # noqa: PLR1714
+ or eventClassification == 'Wind'
+ ):
if 'Application' in event:
- eventApplication = event['Application']
- eventAppData = event['ApplicationData']
- eventData = event['ApplicationData']
-
- if eventApplication in Applications['EventApplications'].keys():
- eventAppExe = Applications['EventApplications'].get(eventApplication)
- workflow_log(remoteAppDir)
- workflow_log(eventAppExe)
- eventAppExeLocal = posixpath.join(localAppDir,eventAppExe)
- eventAppExeRemote = posixpath.join(remoteAppDir,eventAppExe)
- workflow_log(eventAppExeRemote)
+ eventApplication = event['Application'] # noqa: N806
+ eventAppData = event['ApplicationData'] # noqa: N806
+ eventData = event['ApplicationData'] # noqa: N806, F841
+
+ if (
+ eventApplication # noqa: SIM118
+ in Applications['EventApplications'].keys()
+ ):
+ eventAppExe = Applications['EventApplications'].get( # noqa: N806
+ eventApplication
+ )
+ workflow_log(remoteAppDir) # noqa: F405
+ workflow_log(eventAppExe) # noqa: F405
+ eventAppExeLocal = posixpath.join( # noqa: N806
+ localAppDir, eventAppExe
+ )
+ eventAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, eventAppExe
+ )
+ workflow_log(eventAppExeRemote) # noqa: F405
else:
- raise WorkFlowInputError('Event application %s not in registry' % eventApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event application %s not in registry' # noqa: UP031
+ % eventApplication
+ )
else:
- raise WorkFlowInputError('Need an EventApplication section')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an EventApplication section' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Event classification must be Earthquake, not %s' % eventClassification)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Event classification must be Earthquake, not %s' # noqa: UP031
+ % eventClassification
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
+ raise WorkFlowInputError('Need Event Classification') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need an Events Entry in Applications')
+ raise WorkFlowInputError('Need an Events Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get modeling application and its data
#
if 'Modeling' in available_apps:
- modelingApp = available_apps['Modeling']
+ modelingApp = available_apps['Modeling'] # noqa: N806
if 'Application' in modelingApp:
- modelingApplication = modelingApp['Application']
+ modelingApplication = modelingApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- modelingAppData = modelingApp['ApplicationData']
- if modelingApplication in Applications['ModelingApplications'].keys():
- modelingAppExe = Applications['ModelingApplications'].get(modelingApplication)
- modelingAppExeLocal = posixpath.join(localAppDir,modelingAppExe)
- modelingAppExeRemote = posixpath.join(remoteAppDir,modelingAppExe)
+ modelingAppData = modelingApp['ApplicationData'] # noqa: N806
+ if (
+ modelingApplication # noqa: SIM118
+ in Applications['ModelingApplications'].keys()
+ ):
+ modelingAppExe = Applications['ModelingApplications'].get( # noqa: N806
+ modelingApplication
+ )
+ modelingAppExeLocal = posixpath.join(localAppDir, modelingAppExe) # noqa: N806
+ modelingAppExeRemote = posixpath.join( # noqa: N806
+ remoteAppDir, modelingAppExe
+ )
else:
- raise WorkFlowInputError('Modeling application %s not in registry' % modelingApplication)
+ raise WorkFlowInputError( # noqa: F405, TRY301
+ 'Modeling application %s not in registry' # noqa: UP031
+ % modelingApplication
+ )
else:
- raise WorkFlowInputError('Need a ModelingApplication in Modeling data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need a ModelingApplication in Modeling data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Modeling Entry in Applications')
+ raise WorkFlowInputError('Need a Modeling Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
# get edp application and its data .. CURRENTLY MODELING APP MUST CREATE EDP
#
if 'EDP' in available_apps:
- edpApp = available_apps['EDP']
-
+ edpApp = available_apps['EDP'] # noqa: N806
+
if 'Application' in edpApp:
- edpApplication = edpApp['Application']
-
+ edpApplication = edpApp['Application'] # noqa: N806
+
# check modeling app in registry, if so get full executable path
- edpAppData = edpApp['ApplicationData']
- if edpApplication in Applications['EDPApplications'].keys():
- edpAppExe = Applications['EDPApplications'].get(edpApplication)
- edpAppExeLocal = posixpath.join(localAppDir,edpAppExe)
- edpAppExeRemote = posixpath.join(remoteAppDir,edpAppExe)
+ edpAppData = edpApp['ApplicationData'] # noqa: N806
+ if edpApplication in Applications['EDPApplications'].keys(): # noqa: SIM118
+ edpAppExe = Applications['EDPApplications'].get(edpApplication) # noqa: N806
+ edpAppExeLocal = posixpath.join(localAppDir, edpAppExe) # noqa: N806
+ edpAppExeRemote = posixpath.join(remoteAppDir, edpAppExe) # noqa: N806
else:
- raise WorkFlowInputError('EDP application {} not in registry'.format(edpApplication))
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'EDP application {edpApplication} not in registry' # noqa: EM102
+ )
+
else:
- raise WorkFlowInputError('Need an EDPApplication in EDP data')
-
+ raise WorkFlowInputError('Need an EDPApplication in EDP data') # noqa: EM101, F405, TRY003, TRY301
+
else:
- raise WorkFlowInputError('Need an EDP Entry in Applications')
+ raise WorkFlowInputError('Need an EDP Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
#
- # get simulation application and its data
+ # get simulation application and its data
#
if 'Simulation' in available_apps:
- simulationApp = available_apps['Simulation']
+ simulationApp = available_apps['Simulation'] # noqa: N806
if 'Application' in simulationApp:
- simulationApplication = simulationApp['Application']
+ simulationApplication = simulationApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- simAppData = simulationApp['ApplicationData']
- if simulationApplication in Applications['SimulationApplications'].keys():
- simAppExe = Applications['SimulationApplications'].get(simulationApplication)
- simAppExeLocal = posixpath.join(localAppDir,simAppExe)
- simAppExeRemote = posixpath.join(remoteAppDir,simAppExe)
+ simAppData = simulationApp['ApplicationData'] # noqa: N806
+ if (
+ simulationApplication # noqa: SIM118
+ in Applications['SimulationApplications'].keys()
+ ):
+ simAppExe = Applications['SimulationApplications'].get( # noqa: N806
+ simulationApplication
+ )
+ simAppExeLocal = posixpath.join(localAppDir, simAppExe) # noqa: N806
+ simAppExeRemote = posixpath.join(remoteAppDir, simAppExe) # noqa: N806
else:
- raise WorkFlowInputError('Simulation application {} not in registry'.format(simulationApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'Simulation application {simulationApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need an SimulationApplication in Simulation data')
-
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ 'Need an SimulationApplication in Simulation data' # noqa: EM101
+ )
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
if 'UQ' in available_apps:
- uqApp = available_apps['UQ']
+ uqApp = available_apps['UQ'] # noqa: N806
if 'Application' in uqApp:
- uqApplication = uqApp['Application']
+ uqApplication = uqApp['Application'] # noqa: N806
# check modeling app in registry, if so get full executable path
- uqAppData = uqApp['ApplicationData']
- if uqApplication in Applications['UQApplications'].keys():
- uqAppExe = Applications['UQApplications'].get(uqApplication)
- uqAppExeLocal = posixpath.join(localAppDir,uqAppExe)
- uqAppExeRemote = posixpath.join(localAppDir,uqAppExe)
+ uqAppData = uqApp['ApplicationData'] # noqa: N806
+ if uqApplication in Applications['UQApplications'].keys(): # noqa: SIM118
+ uqAppExe = Applications['UQApplications'].get(uqApplication) # noqa: N806
+ uqAppExeLocal = posixpath.join(localAppDir, uqAppExe) # noqa: N806
+ uqAppExeRemote = posixpath.join(localAppDir, uqAppExe) # noqa: N806, F841
else:
- raise WorkFlowInputError('UQ application {} not in registry'.format(uqApplication))
+ raise WorkFlowInputError( # noqa: F405, TRY003, TRY301
+ f'UQ application {uqApplication} not in registry' # noqa: EM102
+ )
else:
- raise WorkFlowInputError('Need a UQApplication in UQ data')
-
+ raise WorkFlowInputError('Need a UQApplication in UQ data') # noqa: EM101, F405, TRY003, TRY301
else:
- raise WorkFlowInputError('Need a Simulation Entry in Applications')
-
+ raise WorkFlowInputError('Need a Simulation Entry in Applications') # noqa: EM101, F405, TRY003, TRY301
- workflow_log('SUCCESS: Parsed workflow input')
- workflow_log(divider)
+ workflow_log('SUCCESS: Parsed workflow input') # noqa: F405
+ workflow_log(divider) # noqa: F405
#
# now invoke the applications
@@ -260,174 +295,223 @@ def main(run_type, inputFile, applicationsRegistry):
# - perform Simulation
# - getDL
- bimFILE = 'dakota.json'
- eventFILE = 'EVENT.json'
- samFILE = 'SAM.json'
- edpFILE = 'EDP.json'
- simFILE = 'SIM.json'
- driverFile = 'driver'
+ bimFILE = 'dakota.json' # noqa: N806
+ eventFILE = 'EVENT.json' # noqa: N806
+ samFILE = 'SAM.json' # noqa: N806
+ edpFILE = 'EDP.json' # noqa: N806
+ simFILE = 'SIM.json' # noqa: N806
+ driverFile = 'driver' # noqa: N806
# open driver file & write building app (minus the --getRV) to it
- driverFILE = open(driverFile, 'w')
+ driverFILE = open(driverFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
# get RV for event
- eventAppDataList = ['"{}"'.format(eventAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE]
- if (eventAppExe.endswith('.py')):
+ eventAppDataList = [ # noqa: N806
+ f'"{eventAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ ]
+ if eventAppExe.endswith('.py'):
eventAppDataList.insert(0, 'python')
- for key in eventAppData.keys():
- eventAppDataList.append(u"--" + key)
+ for key in eventAppData.keys(): # noqa: SIM118
+ eventAppDataList.append('--' + key)
value = eventAppData.get(key)
- eventAppDataList.append(u"" + value)
-
+ eventAppDataList.append('' + value)
+
for item in eventAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
eventAppDataList.append('--getRV')
- if (eventAppExe.endswith('.py')):
- eventAppDataList[1] = u""+eventAppExeLocal
+ if eventAppExe.endswith('.py'):
+ eventAppDataList[1] = '' + eventAppExeLocal
else:
- eventAppDataList[0] = u""+eventAppExeLocal
+ eventAppDataList[0] = '' + eventAppExeLocal
- command, result, returncode = runApplication(eventAppDataList)
+ command, result, returncode = runApplication(eventAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for building model
- modelAppDataList = ['"{}"'.format(modelingAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM',
- samFILE]
-
- if (modelingAppExe.endswith('.py')):
+ modelAppDataList = [ # noqa: N806
+ f'"{modelingAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ ]
+
+ if modelingAppExe.endswith('.py'):
modelAppDataList.insert(0, 'python')
- for key in modelingAppData.keys():
- modelAppDataList.append(u'--' + key)
- modelAppDataList.append(u'' + modelingAppData.get(key))
+ for key in modelingAppData.keys(): # noqa: SIM118
+ modelAppDataList.append('--' + key) # noqa: FURB113
+ modelAppDataList.append('' + modelingAppData.get(key))
for item in modelAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
modelAppDataList.append('--getRV')
- if (modelingAppExe.endswith('.py')):
+ if modelingAppExe.endswith('.py'):
modelAppDataList[1] = modelingAppExeLocal
else:
modelAppDataList[0] = modelingAppExeLocal
- command, result, returncode = runApplication(modelAppDataList)
+ command, result, returncode = runApplication(modelAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# get RV for EDP!
- edpAppDataList = ['"{}"'.format(edpAppExeRemote), '--filenameBIM', bimFILE, '--filenameEVENT', eventFILE, '--filenameSAM', samFILE,
- '--filenameEDP', edpFILE]
-
- if (edpAppExe.endswith('.py')):
+ edpAppDataList = [ # noqa: N806
+ f'"{edpAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEDP',
+ edpFILE,
+ ]
+
+ if edpAppExe.endswith('.py'):
edpAppDataList.insert(0, 'python')
- for key in edpAppData.keys():
- edpAppDataList.append(u'--' + key)
- edpAppDataList.append(u'' + edpAppData.get(key))
+ for key in edpAppData.keys(): # noqa: SIM118
+ edpAppDataList.append('--' + key) # noqa: FURB113
+ edpAppDataList.append('' + edpAppData.get(key))
for item in edpAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
- if (edpAppExe.endswith('.py')):
+ if edpAppExe.endswith('.py'):
edpAppDataList[1] = edpAppExeLocal
else:
edpAppDataList[0] = edpAppExeLocal
edpAppDataList.append('--getRV')
- command, result, returncode = runApplication(edpAppDataList)
+ command, result, returncode = runApplication(edpAppDataList) # noqa: F405
log_output.append([command, result, returncode])
# get RV for Simulation
- simAppDataList = ['"{}"'.format(simAppExeRemote), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE]
-
- if (simAppExe.endswith('.py')):
+ simAppDataList = [ # noqa: N806
+ f'"{simAppExeRemote}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ ]
+
+ if simAppExe.endswith('.py'):
simAppDataList.insert(0, 'python')
- for key in simAppData.keys():
- simAppDataList.append(u'--' + key)
- simAppDataList.append(u'' + simAppData.get(key))
+ for key in simAppData.keys(): # noqa: SIM118
+ simAppDataList.append('--' + key) # noqa: FURB113
+ simAppDataList.append('' + simAppData.get(key))
for item in simAppDataList:
- driverFILE.write('%s ' % item)
+ driverFILE.write('%s ' % item) # noqa: UP031
driverFILE.write('\n')
simAppDataList.append('--getRV')
- if (simAppExe.endswith('.py')):
+ if simAppExe.endswith('.py'):
simAppDataList[1] = simAppExeLocal
else:
simAppDataList[0] = simAppExeLocal
- command, result, returncode = runApplication(simAppDataList)
+ command, result, returncode = runApplication(simAppDataList) # noqa: F405
log_output.append([command, result, returncode])
-
# perform the simulation
driverFILE.close()
- uqAppDataList = ['"{}"'.format(uqAppExeLocal), '--filenameBIM', bimFILE, '--filenameSAM', samFILE, '--filenameEVENT', eventFILE,
- '--filenameEDP', edpFILE, '--filenameSIM', simFILE, '--driverFile', driverFile]
-
- if (uqAppExe.endswith('.py')):
+ uqAppDataList = [ # noqa: N806
+ f'"{uqAppExeLocal}"',
+ '--filenameBIM',
+ bimFILE,
+ '--filenameSAM',
+ samFILE,
+ '--filenameEVENT',
+ eventFILE,
+ '--filenameEDP',
+ edpFILE,
+ '--filenameSIM',
+ simFILE,
+ '--driverFile',
+ driverFile,
+ ]
+
+ if uqAppExe.endswith('.py'):
uqAppDataList.insert(0, 'python')
uqAppDataList[1] = uqAppExeLocal
- uqAppDataList.append('--runType')
+ uqAppDataList.append('--runType') # noqa: FURB113
uqAppDataList.append(run_type)
- for key in uqAppData.keys():
- uqAppDataList.append(u'--' + key)
- value = uqAppData.get(key)
+ for key in uqAppData.keys(): # noqa: SIM118
+ uqAppDataList.append('--' + key)
+ value = uqAppData.get(key)
if isinstance(value, string_types):
- uqAppDataList.append(u'' + value)
+ uqAppDataList.append('' + value)
else:
- uqAppDataList.append(u'' + str(value))
+ uqAppDataList.append('' + str(value))
- if run_type == 'run' or run_type == 'set_up':
- workflow_log('Running Simulation...')
- workflow_log(' '.join(uqAppDataList))
- command, result, returncode = runApplication(uqAppDataList)
+ if run_type == 'run' or run_type == 'set_up': # noqa: PLR1714
+ workflow_log('Running Simulation...') # noqa: F405
+ workflow_log(' '.join(uqAppDataList)) # noqa: F405
+ command, result, returncode = runApplication(uqAppDataList) # noqa: F405
log_output.append([command, result, returncode])
- workflow_log('Simulation ended...')
+ workflow_log('Simulation ended...') # noqa: F405
else:
- workflow_log('Setup run only. No simulation performed.')
+ workflow_log('Setup run only. No simulation performed.') # noqa: F405
- except WorkFlowInputError as e:
- print('workflow error: %s' % e.value)
- workflow_log('workflow error: %s' % e.value)
- workflow_log(divider)
- exit(1)
+ except WorkFlowInputError as e: # noqa: F405
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('workflow error: %s' % e.value) # noqa: F405, UP031
+ workflow_log(divider) # noqa: F405
+ exit(1) # noqa: PLR1722
# unhandled exceptions are handled here
except Exception as e:
- print('workflow error: %s' % e.value)
- workflow_log('unhandled exception... exiting')
+ print('workflow error: %s' % e.value) # noqa: T201, UP031
+ workflow_log('unhandled exception... exiting') # noqa: F405
raise
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
run_type = sys.argv[1]
- inputFile = sys.argv[2]
- applicationsRegistry = sys.argv[3]
+ inputFile = sys.argv[2] # noqa: N816
+ applicationsRegistry = sys.argv[3] # noqa: N816
main(run_type, inputFile, applicationsRegistry)
- workflow_log_file = 'workflow-log-%s.txt' % (strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime()))
- log_filehandle = open(workflow_log_file, 'w')
+ workflow_log_file = 'workflow-log-%s.txt' % ( # noqa: UP031
+ strftime('%Y-%m-%d-%H-%M-%S-utc', gmtime())
+ )
+ log_filehandle = open(workflow_log_file, 'w') # noqa: PLW1514, PTH123, SIM115
- print(type(log_filehandle))
+ print(type(log_filehandle)) # noqa: T201
print(divider, file=log_filehandle)
print('Start of Log', file=log_filehandle)
print(divider, file=log_filehandle)
@@ -435,14 +519,13 @@ def main(run_type, inputFile, applicationsRegistry):
# nb: log_output is a global variable, defined at the top of this script.
for result in log_output:
print(divider, file=log_filehandle)
- print('command line:\n%s\n' % result[0], file=log_filehandle)
+ print('command line:\n%s\n' % result[0], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
- print('output from process:\n%s\n' % result[1], file=log_filehandle)
+ print('output from process:\n%s\n' % result[1], file=log_filehandle) # noqa: UP031
print(divider, file=log_filehandle)
print('End of Log', file=log_filehandle)
print(divider, file=log_filehandle)
- workflow_log('Log file: %s' % workflow_log_file)
- workflow_log('End of run.')
-
+ workflow_log('Log file: %s' % workflow_log_file) # noqa: F405, UP031
+ workflow_log('End of run.') # noqa: F405
diff --git a/modules/Workflow/femUQ.py b/modules/Workflow/femUQ.py
index c2b7990e4..6b6a64f41 100755
--- a/modules/Workflow/femUQ.py
+++ b/modules/Workflow/femUQ.py
@@ -1,37 +1,36 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: EXE002, INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
# This file is part of pelicun.
-#
-# Redistribution and use in source and binary forms, with or without
+#
+# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
-# 1. Redistributions of source code must retain the above copyright notice,
+# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
-# 2. Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
-# 3. Neither the name of the copyright holder nor the names of its contributors
-# may be used to endorse or promote products derived from this software without
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
-#
-# You should have received a copy of the BSD 3-Clause License along with
+#
+# You should have received a copy of the BSD 3-Clause License along with
# pelicun. If not, see .
#
# Contributors:
@@ -42,58 +41,65 @@
# Chaofeng Wang
# import functions for Python 2.X support
-from __future__ import division, print_function
-import sys, os, json
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+import json
+import os
+import sys
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
+from whale.main import log_div, log_msg
-def main(run_type, input_file, app_registry):
+def main(run_type, input_file, app_registry): # noqa: D103
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
whale.log_file = runDir + '/log.txt'
- with open(whale.log_file, 'w') as f:
- f.write('femUQ workflow\n')
+ with open(whale.log_file, 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('femUQ workflow\n')
# echo the inputs
log_msg(log_div)
log_msg('Started running the workflow script')
- log_msg(log_div)
+ log_msg(log_div)
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Event', 'Modeling', 'EDP', 'Simulation', 'UQ'])
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=['Event', 'Modeling', 'EDP', 'Simulation', 'UQ'],
+ )
# initialize the working directory
WF.init_simdir()
# prepare the input files for the simulation
- WF.create_RV_files(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF.create_RV_files(app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'])
# create the workflow driver file
- WF.create_driver_file(
- app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF.create_driver_file(app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'])
# run uq engine to simulate response
WF.simulate_response()
-if __name__ == '__main__':
- if len(sys.argv) != 4:
- print('\nNeed three arguments, e.g.:\n')
- print(' python %s action workflowinputfile.json workflowapplications.json' % sys.argv[0])
- print('\nwhere: action is either check or run\n')
- exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 4: # noqa: PLR2004
+ print('\nNeed three arguments, e.g.:\n') # noqa: T201
+ print( # noqa: T201
+ ' python %s action workflowinputfile.json workflowapplications.json' # noqa: UP031
+ % sys.argv[0]
+ )
+ print('\nwhere: action is either check or run\n') # noqa: T201
+ exit(1) # noqa: PLR1722
main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
diff --git a/modules/Workflow/qWHALE.py b/modules/Workflow/qWHALE.py
index 12dd82ae6..f47d4c0ac 100755
--- a/modules/Workflow/qWHALE.py
+++ b/modules/Workflow/qWHALE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: EXE002, INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,36 +40,37 @@
# Michael Gardner
# Chaofeng Wang
-import sys, os, json
import argparse
+import json
+import os
+import sys
from pathlib import Path
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
+from whale.main import log_div, log_msg
-def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
+def main(run_type, input_file, app_registry, working_dir, app_dir, log_file): # noqa: ARG001, D103
# initialize the log file
- with open(input_file, 'r') as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
if working_dir is not None:
- runDir = working_dir
+ runDir = working_dir # noqa: N806
else:
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
whale.log_file = runDir + '/log.txt'
# initialize log file
- whale.set_options({
- "LogFile": runDir + '/log.txt',
- "LogShowMS": False,
- "PrintLog": True
- })
- log_msg('\nqWHALE workflow\n',
- prepend_timestamp=False, prepend_blank_space=False)
+ whale.set_options(
+ {'LogFile': runDir + '/log.txt', 'LogShowMS': False, 'PrintLog': True}
+ )
+ log_msg(
+ '\nqWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False
+ )
whale.print_system_info()
@@ -78,25 +78,28 @@ def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
log_div(prepend_blank_space=False)
log_div(prepend_blank_space=False)
log_msg('Started running the workflow script')
- log_div()
-
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['FEM', 'UQ'],
- working_dir = working_dir,
- app_dir = app_dir)
+ log_div()
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=['FEM', 'UQ'],
+ working_dir=working_dir,
+ app_dir=app_dir,
+ )
# initialize the working directory
WF.init_simdir()
-
+
# prepare the input files for the simulation
- WF.preprocess_inputs(app_sequence = ['FEM'])
+ WF.preprocess_inputs(app_sequence=['FEM'])
# run uq engine to simulate response
- WF.simulate_response()
+ WF.simulate_response()
-if __name__ == '__main__':
+if __name__ == '__main__':
"""
if len(sys.argv) != 4:
print('\nNeed three arguments, e.g.:\n')
@@ -107,45 +110,61 @@ def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
"""
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter sWHALE workflow for a single asset.",
- allow_abbrev=False)
-
- workflowArgParser.add_argument("runType",
- help="Specifies the type of run requested.")
- workflowArgParser.add_argument("inputFile",
- help="Specifies the input file for the workflow.")
- workflowArgParser.add_argument("registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-w", "--workDir",
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter sWHALE workflow for a single asset.',
+ allow_abbrev=False,
+ )
+
+ workflowArgParser.add_argument(
+ 'runType', help='Specifies the type of run requested.'
+ )
+ workflowArgParser.add_argument(
+ 'inputFile', help='Specifies the input file for the workflow.'
+ )
+ workflowArgParser.add_argument(
+ 'registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
default=None,
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
+ help='Path where the log file will be saved.',
+ )
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
- #Calling the main workflow method and passing the parsed arguments
-
- main(run_type = wfArgs.runType,
- input_file = wfArgs.inputFile,
- app_registry = wfArgs.registry,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile)
-
+ # Calling the main workflow method and passing the parsed arguments
+ main(
+ run_type=wfArgs.runType,
+ input_file=wfArgs.inputFile,
+ app_registry=wfArgs.registry,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ )
diff --git a/modules/Workflow/rWHALE.py b/modules/Workflow/rWHALE.py
index 1bd766a8d..c95b73f09 100644
--- a/modules/Workflow/rWHALE.py
+++ b/modules/Workflow/rWHALE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -45,22 +44,36 @@
# Jinyan Zhao
# Sina Naeimi
-import sys, os, json
import argparse
+import json
+import os
+import sys
from pathlib import Path
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
-import whale.main as whale
-from whale.main import log_msg, log_div
-from sWHALE import runSWhale
import importlib
-
-def main(run_type, input_file, app_registry,
- force_cleanup, bldg_id_filter, reference_dir,
- working_dir, app_dir, log_file, site_response,
- parallelType, mpiExec, numPROC):
+import whale.main as whale
+from sWHALE import runSWhale
+from whale.main import log_div, log_msg
+
+
+def main( # noqa: C901, D103
+ run_type,
+ input_file,
+ app_registry,
+ force_cleanup,
+ bldg_id_filter,
+ reference_dir,
+ working_dir,
+ app_dir,
+ log_file,
+ site_response, # noqa: ARG001
+ parallelType, # noqa: N803
+ mpiExec, # noqa: N803
+ numPROC, # noqa: N803
+):
#
# check if running in a parallel mpi job
# - if so set variables:
@@ -70,49 +83,48 @@ def main(run_type, input_file, app_registry,
# - else set numP = 1, procID = 0 and doParallel = False
#
- numP = 1
- procID = 0
- doParallel = False
-
- mpi_spec = importlib.util.find_spec("mpi4py")
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ doParallel = False # noqa: N806
+
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found and parallelType == 'parRUN':
-
- import mpi4py
- from mpi4py import MPI
+ from mpi4py import MPI # noqa: PLC0415
+
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = False
- numP = 1
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
else:
- doParallel = True;
+ doParallel = True # noqa: N806
# save the reference dir in the input file
- with open(input_file, 'r', encoding="utf-8") as f:
- inputs = json.load(f)
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
+ inputs = json.load(f) # noqa: F841
- # TODO: if the ref dir is needed, do NOT save it to the input file, store it
+ # TODO: if the ref dir is needed, do NOT save it to the input file, store it # noqa: TD002
# somewhere else in a file that i not shared among processes
- #inputs['refDir'] = reference_dir
- #with open(input_file, 'w') as f:
+ # inputs['refDir'] = reference_dir
+ # with open(input_file, 'w') as f:
# json.dump(inputs, f, indent=2)
- # TODO: remove the commented section below, I only kept it for now to make
+ # TODO: remove the commented section below, I only kept it for now to make # noqa: TD002
# sure it is not needed
- #if working_dir is not None:
+ # if working_dir is not None:
# runDir = working_dir
- #else:
+ # else:
# runDir = inputs['runDir']
- if not os.path.exists(working_dir):
- os.mkdir(working_dir)
+ if not os.path.exists(working_dir): # noqa: PTH110
+ os.mkdir(working_dir) # noqa: PTH102
# initialize log file
- if parallelType == 'parSETUP' or parallelType == 'seqRUN':
+ if parallelType == 'parSETUP' or parallelType == 'seqRUN': # noqa: PLR1714
if log_file == 'log.txt':
log_file_path = working_dir + '/log.txt'
else:
@@ -120,17 +132,17 @@ def main(run_type, input_file, app_registry,
else:
log_file_path = working_dir + '/log.txt' + '.' + str(procID)
- whale.set_options({
- "LogFile": log_file_path,
- "LogShowMS": False,
- "PrintLog": True
- })
-
- log_msg('\nrWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False)
+ whale.set_options(
+ {'LogFile': log_file_path, 'LogShowMS': False, 'PrintLog': True}
+ )
+
+ log_msg(
+ '\nrWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False
+ )
if procID == 0:
whale.print_system_info()
-
+
# echo the inputs
log_div(prepend_blank_space=False)
log_div(prepend_blank_space=False)
@@ -140,85 +152,91 @@ def main(run_type, input_file, app_registry,
if force_cleanup:
log_msg('Forced cleanup turned on.')
- WF = whale.Workflow(run_type,
- input_file,
- app_registry,
- app_type_list = ['Assets', 'RegionalEvent',
- 'RegionalMapping', 'Event',
- 'Modeling', 'EDP', 'Simulation',
- 'UQ', 'DL', 'SystemPerformance', 'Recovery'],
- reference_dir = reference_dir,
- working_dir = working_dir,
- app_dir = app_dir,
- parType = parallelType,
- mpiExec = mpiExec,
- numProc = numPROC)
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=[
+ 'Assets',
+ 'RegionalEvent',
+ 'RegionalMapping',
+ 'Event',
+ 'Modeling',
+ 'EDP',
+ 'Simulation',
+ 'UQ',
+ 'DL',
+ 'SystemPerformance',
+ 'Recovery',
+ ],
+ reference_dir=reference_dir,
+ working_dir=working_dir,
+ app_dir=app_dir,
+ parType=parallelType,
+ mpiExec=mpiExec,
+ numProc=numPROC,
+ )
if bldg_id_filter is not None:
log_msg(f'Overriding simulation scope; running buildings {bldg_id_filter}')
# If a Min or Max attribute is used when calling the script, we need to
# update the min and max values in the input file.
- WF.workflow_apps['Building'].pref["filter"] = bldg_id_filter
+ WF.workflow_apps['Building'].pref['filter'] = bldg_id_filter
# initialize the working directory
- if parallelType == 'seqRUN' or parallelType == 'parSETUP':
+ if parallelType == 'seqRUN' or parallelType == 'parSETUP': # noqa: PLR1714
WF.init_workdir()
# prepare the basic inputs for individual assets
- if parallelType == 'seqRUN' or parallelType == 'parSETUP':
+ if parallelType == 'seqRUN' or parallelType == 'parSETUP': # noqa: PLR1714
asset_files = WF.create_asset_files()
- if parallelType != 'parSETUP':
+ if parallelType != 'parSETUP':
asset_files = WF.augment_asset_files()
- # run the regional event & do mapping
- if parallelType == 'seqRUN' or parallelType == 'parSETUP':
-
+ # run the regional event & do mapping
+ if parallelType == 'seqRUN' or parallelType == 'parSETUP': # noqa: PLR1714
# run event
WF.perform_regional_event()
# now for each asset, do regional mapping
- for asset_type, assetIt in asset_files.items() :
+ for asset_type, assetIt in asset_files.items(): # noqa: N806
WF.perform_regional_mapping(assetIt, asset_type)
-
if parallelType == 'parSETUP':
return
-
+
# now for each asset run dl workflow .. in parallel if requested
count = 0
- for asset_type, assetIt in asset_files.items() :
-
+ for asset_type, assetIt in asset_files.items(): # noqa: N806
# perform the regional mapping
# WF.perform_regional_mapping(assetIt, asset_type)
- # TODO: not elegant code, fix later
- with open(assetIt, 'r', encoding="utf-8") as f:
+ # TODO: not elegant code, fix later # noqa: TD002
+ with open(assetIt, encoding='utf-8') as f: # noqa: PTH123
asst_data = json.load(f)
-
+
# Sometimes multiple asset types need to be analyzed together, e.g., pipelines and nodes in a water network
run_asset_type = asset_type
- if asset_type == 'Buildings' or asset_type == "TransportationNetwork":
+ if asset_type == 'Buildings' or asset_type == 'TransportationNetwork': # noqa: PLR1714
pass
- elif asset_type == 'WaterNetworkNodes' :
- continue # Run the nodes with the pipelines, i.e., the water distribution network
- elif asset_type == 'WaterNetworkPipelines' :
- run_asset_type = 'WaterDistributionNetwork' # Run the pipelines with the entire water distribution network
- else :
- print("No support for asset type: ",asset_type)
-
+ elif asset_type == 'WaterNetworkNodes':
+ continue # Run the nodes with the pipelines, i.e., the water distribution network
+ elif asset_type == 'WaterNetworkPipelines':
+ run_asset_type = 'WaterDistributionNetwork' # Run the pipelines with the entire water distribution network
+ else:
+ print('No support for asset type: ', asset_type) # noqa: T201
+
# The preprocess app sequence (previously get_RV)
preprocess_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation']
-
+
# The workflow app sequence
- WF_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation']
+ WF_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'] # noqa: N806
# For each asset
for asst in asst_data:
-
if count % numP == procID:
-
log_msg('', prepend_timestamp=False)
log_div(prepend_blank_space=False)
log_msg(f"{asset_type} id {asst['id']} in file {asst['file']}")
@@ -226,51 +244,55 @@ def main(run_type, input_file, app_registry,
# Run sWhale
runSWhale(
- inputs = None,
- WF = WF,
- assetID = asst['id'],
- assetAIM = asst['file'],
- prep_app_sequence = preprocess_app_sequence,
- WF_app_sequence = WF_app_sequence,
- asset_type = run_asset_type,
- copy_resources = True,
- force_cleanup = force_cleanup)
-
- count = count + 1
+ inputs=None,
+ WF=WF,
+ assetID=asst['id'],
+ assetAIM=asst['file'],
+ prep_app_sequence=preprocess_app_sequence,
+ WF_app_sequence=WF_app_sequence,
+ asset_type=run_asset_type,
+ copy_resources=True,
+ force_cleanup=force_cleanup,
+ )
+
+ count = count + 1 # noqa: PLR6104
# wait for every process to finish
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
-
- # aggregate results
- if asset_type == 'Buildings' or asset_type == 'TransportationNetwork'\
- or asset_type == 'WaterDistributionNetwork':
- WF.aggregate_results(asst_data = asst_data, asset_type = asset_type)
-
- elif asset_type == 'WaterNetworkPipelines' :
-
+ # aggregate results
+ if (
+ asset_type == 'Buildings' # noqa: PLR1714
+ or asset_type == 'TransportationNetwork'
+ or asset_type == 'WaterDistributionNetwork'
+ ):
+ WF.aggregate_results(asst_data=asst_data, asset_type=asset_type)
+
+ elif asset_type == 'WaterNetworkPipelines':
# Provide the headers and out types
- headers = dict(DV = [0])
-
+ headers = dict(DV=[0]) # noqa: C408
+
out_types = ['DV']
- if procID == 0:
- WF.aggregate_results(asst_data = asst_data,
- asset_type = asset_type,
- out_types = out_types,
- headers = headers)
+ if procID == 0:
+ WF.aggregate_results(
+ asst_data=asst_data,
+ asset_type=asset_type,
+ out_types=out_types,
+ headers=headers,
+ )
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
-
- WF.combine_assets_results(asset_files)
+
+ WF.combine_assets_results(asset_files)
#
# add system performance
#
system_performance_performed = False
- for asset_type in asset_files.keys() :
+ for asset_type in asset_files.keys(): # noqa: SIM118
performed = WF.perform_system_performance_assessment(asset_type)
if performed:
system_performance_performed = True
@@ -283,75 +305,112 @@ def main(run_type, input_file, app_registry,
# WF.perform_recovery_simulation(asset_files.keys())
WF.compile_r2d_results_geojson(asset_files)
-
-
+
if force_cleanup:
# clean up intermediate files from the working directory
- if procID == 0:
+ if procID == 0:
WF.cleanup_workdir()
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
-
+
log_msg('Workflow completed.')
log_div(prepend_blank_space=False)
log_div(prepend_blank_space=False)
-if __name__ == '__main__':
-
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter rWHALE workflow for a set of assets.",
- allow_abbrev=False)
- workflowArgParser.add_argument("configuration",
- help="Configuration file specifying the applications and data to be "
- "used")
- workflowArgParser.add_argument("-F", "--filter",
+if __name__ == '__main__':
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter rWHALE workflow for a set of assets.',
+ allow_abbrev=False,
+ )
+
+ workflowArgParser.add_argument(
+ 'configuration',
+ help='Configuration file specifying the applications and data to be ' 'used',
+ )
+ workflowArgParser.add_argument(
+ '-F',
+ '--filter',
default=None,
- help="Provide a subset of building ids to run")
- workflowArgParser.add_argument("-c", "--check",
- help="Check the configuration file")
- workflowArgParser.add_argument("-r", "--registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-f", "--forceCleanup",
- action="store_true",
- help="Remove working directories after the simulation is completed.")
- workflowArgParser.add_argument("-d", "--referenceDir",
- default=os.path.join(os.getcwd(), 'input_data'),
- help="Relative paths in the config file are referenced to this directory.")
- workflowArgParser.add_argument("-w", "--workDir",
- default=os.path.join(os.getcwd(), 'Results'),
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Provide a subset of building ids to run',
+ )
+ workflowArgParser.add_argument(
+ '-c', '--check', help='Check the configuration file'
+ )
+ workflowArgParser.add_argument(
+ '-r',
+ '--registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-f',
+ '--forceCleanup',
+ action='store_true',
+ help='Remove working directories after the simulation is completed.',
+ )
+ workflowArgParser.add_argument(
+ '-d',
+ '--referenceDir',
+ default=os.path.join(os.getcwd(), 'input_data'), # noqa: PTH109, PTH118
+ help='Relative paths in the config file are referenced to this directory.',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
+ default=os.path.join(os.getcwd(), 'Results'), # noqa: PTH109, PTH118
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
- workflowArgParser.add_argument("-s", "--siteResponse",
+ help='Path where the log file will be saved.',
+ )
+ workflowArgParser.add_argument(
+ '-s',
+ '--siteResponse',
default='sequential',
- help="How site response analysis runs.")
+ help='How site response analysis runs.',
+ )
- workflowArgParser.add_argument("-p", "--parallelType",
+ workflowArgParser.add_argument(
+ '-p',
+ '--parallelType',
default='seqRUN',
- help="How parallel runs: options seqRUN, parSETUP, parRUN")
- workflowArgParser.add_argument("-m", "--mpiexec",
+ help='How parallel runs: options seqRUN, parSETUP, parRUN',
+ )
+ workflowArgParser.add_argument(
+ '-m',
+ '--mpiexec',
default='mpiexec',
- help="How mpi runs, e.g. ibrun, mpirun, mpiexec")
- workflowArgParser.add_argument("-n", "--numP",
+ help='How mpi runs, e.g. ibrun, mpirun, mpiexec',
+ )
+ workflowArgParser.add_argument(
+ '-n',
+ '--numP',
default='8',
- help="If parallel, how many jobs to start with mpiexec option")
+ help='If parallel, how many jobs to start with mpiexec option',
+ )
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
if wfArgs.check:
@@ -359,19 +418,23 @@ def main(run_type, input_file, app_registry,
else:
run_type = 'runningLocal'
- #Calling the main workflow method and passing the parsed arguments
- numPROC = int(wfArgs.numP)
-
- main(run_type = run_type,
- input_file = Path(wfArgs.configuration).resolve(), # to pass the absolute path to the input file
- app_registry = wfArgs.registry,
- force_cleanup = wfArgs.forceCleanup,
- bldg_id_filter = wfArgs.filter,
- reference_dir = wfArgs.referenceDir,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile,
- site_response = wfArgs.siteResponse,
- parallelType = wfArgs.parallelType,
- mpiExec = wfArgs.mpiexec,
- numPROC = numPROC)
+ # Calling the main workflow method and passing the parsed arguments
+ numPROC = int(wfArgs.numP) # noqa: N816
+
+ main(
+ run_type=run_type,
+ input_file=Path(
+ wfArgs.configuration
+ ).resolve(), # to pass the absolute path to the input file
+ app_registry=wfArgs.registry,
+ force_cleanup=wfArgs.forceCleanup,
+ bldg_id_filter=wfArgs.filter,
+ reference_dir=wfArgs.referenceDir,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ site_response=wfArgs.siteResponse,
+ parallelType=wfArgs.parallelType,
+ mpiExec=wfArgs.mpiexec,
+ numPROC=numPROC,
+ )
diff --git a/modules/Workflow/sWHALE.py b/modules/Workflow/sWHALE.py
index 874b85ec4..5f098a3f3 100755
--- a/modules/Workflow/sWHALE.py
+++ b/modules/Workflow/sWHALE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: EXE002, INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -42,26 +41,29 @@
# Chaofeng Wang
# Stevan Gavrilovic
-import sys, os, json
import argparse
+import json
+import os
+import sys
from pathlib import Path
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
-
-
-def runSWhale(inputs,
- WF,
- assetID = None,
- assetAIM = 'AIM.json',
- prep_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'],
- WF_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'],
- asset_type = None,
- copy_resources = False,
- force_cleanup = False) :
-
+from whale.main import log_div, log_msg
+
+
+def runSWhale( # noqa: N802, D103
+ inputs,
+ WF, # noqa: N803
+ assetID=None, # noqa: N803
+ assetAIM='AIM.json', # noqa: N803
+ prep_app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'], # noqa: B006
+ WF_app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'], # noqa: B006, N803
+ asset_type=None,
+ copy_resources=False, # noqa: FBT002
+ force_cleanup=False, # noqa: FBT002
+):
# update the runDir, if needed
# with open(input_file, 'r', encoding="utf-8") as f:
# inputs = json.load(f)
@@ -82,10 +84,14 @@ def runSWhale(inputs,
# "PrintLog": True
# })
#
-
- log_msg('\nStarting sWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False)
-# whale.print_system_info()
+ log_msg(
+ '\nStarting sWHALE workflow\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ # whale.print_system_info()
# echo the inputs
log_div(prepend_blank_space=False)
@@ -94,46 +100,46 @@ def runSWhale(inputs,
log_div()
if WF.run_type != 'loss_only':
-
# initialize the working directory
# assetID is a unique asset identifier, assetAIM is the asset information model, e.g., 'AIM.json'
WF.init_simdir(assetID, assetAIM)
# prepare the input files for the simulation
WF.preprocess_inputs(prep_app_sequence, assetAIM, assetID, asset_type)
-
+
# create the workflow driver file
WF.create_driver_file(WF_app_sequence, assetID, assetAIM)
-
- # gather all Randomvariables and EDP's and place in new input file for UQ
- WF.gather_workflow_inputs(assetID, assetAIM);
+ # gather all Randomvariables and EDP's and place in new input file for UQ
+ WF.gather_workflow_inputs(assetID, assetAIM)
# run uq engine to simulate response
- WF.simulate_response(AIM_file_path = assetAIM, asst_id = assetID)
-
- if WF.run_type != 'set_up':
+ WF.simulate_response(AIM_file_path=assetAIM, asst_id=assetID)
+ if WF.run_type != 'set_up':
# run dl engine to estimate losses
# Use the templatedir/AIM.json for pelicun
- WF.estimate_losses(AIM_file_path = assetAIM,
- asst_id = assetID,
- asset_type = asset_type,
- input_file = inputs,
- copy_resources=copy_resources)
-
+ WF.estimate_losses(
+ AIM_file_path=assetAIM,
+ asst_id=assetID,
+ asset_type=asset_type,
+ input_file=inputs,
+ copy_resources=copy_resources,
+ )
# run performance engine to assess asset performance, e.g., recovery
- WF.estimate_performance(AIM_file_path = assetAIM,
- asst_id = assetID,
- asset_type = asset_type,
- input_file = inputs,
- copy_resources=copy_resources)
-
- #When used in rWhale, delete the origional AIM since it is the same with asset_id/templatedir/AIM
+ WF.estimate_performance(
+ AIM_file_path=assetAIM,
+ asst_id=assetID,
+ asset_type=asset_type,
+ input_file=inputs,
+ copy_resources=copy_resources,
+ )
+
+ # When used in rWhale, delete the original AIM since it is the same with asset_id/templatedir/AIM
if assetAIM != 'AIM.json':
- os.remove(assetAIM)
+ os.remove(assetAIM) # noqa: PTH107
if force_cleanup:
- #clean up intermediate files from the simulation
+ # clean up intermediate files from the simulation
WF.cleanup_simdir(assetID)
log_msg('Workflow completed.')
@@ -141,30 +147,27 @@ def runSWhale(inputs,
log_div(prepend_blank_space=False)
-def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
-
+def main(run_type, input_file, app_registry, working_dir, app_dir, log_file): # noqa: ARG001, D103
# update the runDir, if needed
- with open(input_file, 'r', encoding="utf-8") as f:
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
inputs = json.load(f)
- runDir = inputs['runDir']
+ runDir = inputs['runDir'] # noqa: N806
if working_dir is not None:
- runDir = working_dir
+ runDir = working_dir # noqa: N806
else:
- runDir = inputs['runDir']
-
+ runDir = inputs['runDir'] # noqa: N806
whale.log_file = runDir + '/log.txt'
-
+
# initialize log file
- whale.set_options({
- "LogFile": runDir + '/log.txt',
- "LogShowMS": False,
- "PrintLog": True
- })
-
- log_msg('\nsWHALE workflow\n',
- prepend_timestamp=False, prepend_blank_space=False)
+ whale.set_options(
+ {'LogFile': runDir + '/log.txt', 'LogShowMS': False, 'PrintLog': True}
+ )
+
+ log_msg(
+ '\nsWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False
+ )
whale.print_system_info()
@@ -178,22 +181,35 @@ def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
try:
if inputs['DL']['Demands']['DemandFilePath'] is not None:
run_type = 'loss_only'
- except:
+ except: # noqa: S110, E722
pass
- WF = whale.Workflow(run_type, input_file, app_registry,
- app_type_list = ['Event', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL', 'Performance'],
- working_dir = working_dir,
- app_dir = app_dir)
-
- runSWhale(inputs = input_file,
- WF = WF,
- prep_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'],
- WF_app_sequence = ['Event', 'Modeling', 'EDP', 'Simulation'])
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list=[
+ 'Event',
+ 'Modeling',
+ 'EDP',
+ 'Simulation',
+ 'UQ',
+ 'DL',
+ 'Performance',
+ ],
+ working_dir=working_dir,
+ app_dir=app_dir,
+ )
+
+ runSWhale(
+ inputs=input_file,
+ WF=WF,
+ prep_app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'],
+ WF_app_sequence=['Event', 'Modeling', 'EDP', 'Simulation'],
+ )
if __name__ == '__main__':
-
"""
if len(sys.argv) != 4:
print('\nNeed three arguments, e.g.:\n')
@@ -204,42 +220,60 @@ def main(run_type, input_file, app_registry, working_dir, app_dir, log_file):
main(run_type=sys.argv[1], input_file=sys.argv[2], app_registry=sys.argv[3])
"""
- #Defining the command line arguments
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter sWHALE workflow for a single asset.",
- allow_abbrev=False)
-
- workflowArgParser.add_argument("runType",
- help="Specifies the type of run requested.")
- workflowArgParser.add_argument("inputFile",
- help="Specifies the input file for the workflow.")
- workflowArgParser.add_argument("registry",
- default=os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "WorkflowApplications.json"),
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-w", "--workDir",
+ # Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter sWHALE workflow for a single asset.',
+ allow_abbrev=False,
+ )
+
+ workflowArgParser.add_argument(
+ 'runType', help='Specifies the type of run requested.'
+ )
+ workflowArgParser.add_argument(
+ 'inputFile', help='Specifies the input file for the workflow.'
+ )
+ workflowArgParser.add_argument(
+ 'registry',
+ default=os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'WorkflowApplications.json',
+ ),
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
default=None,
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
+ help='Path where the log file will be saved.',
+ )
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
- #Calling the main workflow method and passing the parsed arguments
- main(run_type = wfArgs.runType,
- input_file = Path(wfArgs.inputFile).resolve(),
- app_registry = wfArgs.registry,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile)
+ # Calling the main workflow method and passing the parsed arguments
+ main(
+ run_type=wfArgs.runType,
+ input_file=Path(wfArgs.inputFile).resolve(),
+ app_registry=wfArgs.registry,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ )
diff --git a/modules/Workflow/siteResponseWHALE.py b/modules/Workflow/siteResponseWHALE.py
index 79234fc15..140ba013f 100644
--- a/modules/Workflow/siteResponseWHALE.py
+++ b/modules/Workflow/siteResponseWHALE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,74 +40,92 @@
# Michael Gardner
# Chaofeng Wang
-import sys, os, json
import argparse
+import importlib
+import json
+import os
+import sys
from pathlib import Path
+
from createGM4BIM import createFilesForEventGrid
-import importlib
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
import whale.main as whale
-from whale.main import log_msg, log_div
-
from sWHALE import runSWhale
-
-def main(run_type, input_file, app_registry,
- force_cleanup, bldg_id_filter, reference_dir,
- working_dir, app_dir, log_file, output_dir,
- parallelType, mpiExec, numPROC):
-
- numP = 1
- procID = 0
- doParallel = False
-
- mpi_spec = importlib.util.find_spec("mpi4py")
+from whale.main import log_div, log_msg
+
+
+def main( # noqa: C901, D103
+ run_type,
+ input_file,
+ app_registry,
+ force_cleanup,
+ bldg_id_filter,
+ reference_dir,
+ working_dir,
+ app_dir,
+ log_file,
+ output_dir,
+ parallelType, # noqa: N803
+ mpiExec, # noqa: N803
+ numPROC, # noqa: N803
+):
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ doParallel = False # noqa: N806
+
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
+ from mpi4py import MPI # noqa: PLC0415
+
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank()
- parallelType = 'parRUN'
- if numP < 2:
- doParallel = False
- numP = 1
- parallelType = 'seqRUN'
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ parallelType = 'parRUN' # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ parallelType = 'seqRUN' # noqa: N806
+ procID = 0 # noqa: N806
else:
- doParallel = True;
-
+ doParallel = True # noqa: N806
- print('siteResponse (doParallel, procID, numP):', doParallel, procID, numP, mpiExec, numPROC)
+ print( # noqa: T201
+ 'siteResponse (doParallel, procID, numP):',
+ doParallel,
+ procID,
+ numP,
+ mpiExec,
+ numPROC,
+ )
# save the reference dir in the input file
- with open(input_file, 'r', encoding="utf-8") as f:
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
inputs = json.load(f)
- print('WORKING_DIR', working_dir)
-
+ print('WORKING_DIR', working_dir) # noqa: T201
+
if procID == 0:
- if not os.path.exists(working_dir):
- os.mkdir(working_dir)
+ if not os.path.exists(working_dir): # noqa: PTH110
+ os.mkdir(working_dir) # noqa: PTH102
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
-
+
# initialize log file
if log_file == 'log.txt':
log_file_path = working_dir + '/log.txt' + '.' + str(procID)
else:
log_file_path = log_file + '.' + str(procID)
- whale.set_options({
- "LogFile": log_file_path,
- "LogShowMS": False,
- "PrintLog": True
- })
- log_msg('\nrWHALE workflow\n',
- prepend_timestamp=False, prepend_blank_space=False)
+ whale.set_options(
+ {'LogFile': log_file_path, 'LogShowMS': False, 'PrintLog': True}
+ )
+ log_msg(
+ '\nrWHALE workflow\n', prepend_timestamp=False, prepend_blank_space=False
+ )
whale.print_system_info()
@@ -122,209 +139,206 @@ def main(run_type, input_file, app_registry,
log_msg('Forced cleanup turned on.')
#
- # parse regionalEventAppData, create new input file
+ # parse regionalEventAppData, create new input file
# for the rWHALE workflow
#
- randomVariables = []
- if "randomVariables" in inputs.keys():
- randomVariables = inputs["randomVariables"]
+ randomVariables = [] # noqa: N806
+ if 'randomVariables' in inputs.keys(): # noqa: SIM118
+ randomVariables = inputs['randomVariables'] # noqa: N806
- inputApplications = inputs["Applications"]
- regionalApplication = inputApplications["RegionalEvent"]
- appData = regionalApplication["ApplicationData"]
- regionalData = inputs["RegionalEvent"]
- regionalData["eventFile"]=appData["inputEventFilePath"] + "/" + appData["inputEventFile"]
- regionalData["eventFilePath"]=appData["inputEventFilePath"]
+ inputApplications = inputs['Applications'] # noqa: N806
+ regionalApplication = inputApplications['RegionalEvent'] # noqa: N806
+ appData = regionalApplication['ApplicationData'] # noqa: N806
+ regionalData = inputs['RegionalEvent'] # noqa: N806
+ regionalData['eventFile'] = (
+ appData['inputEventFilePath'] + '/' + appData['inputEventFile']
+ )
+ regionalData['eventFilePath'] = appData['inputEventFilePath']
- siteFilter = appData["filter"]
+ siteFilter = appData['filter'] # noqa: N806
# KZ: 10/19/2022, adding new attributes for the refactored whale
- remoteAppDir = inputs.get('remoteAppDir', "")
- localAppDir = inputs.get('localAppDir', "")
- if localAppDir == "":
- localAppDir = remoteAppDir
- if remoteAppDir == "":
- remoteAppDir = localAppDir
-
- siteResponseInput = {
- "units": inputs["units"],
- "outputs": {
- "IM": True,
- "EDP": False,
- "DM": False,
- "AIM": False,
- "DV": False,
- "every_realization": False
- },
- "RegionalEvent": regionalData,
- "randomVariables" : randomVariables,
- "Applications": {
- "RegionalMapping": inputApplications["RegionalMapping"],
- "UQ": inputApplications["UQ"],
- "Assets" : {
- "Buildings": {
- "Application": "CSV_to_AIM",
- "ApplicationData": {
- "assetSourceFile": appData["soilGridParametersFilePath"] + "/" + appData["soilGridParametersFile"],
- "filter": siteFilter
- }
+ remoteAppDir = inputs.get('remoteAppDir', '') # noqa: N806
+ localAppDir = inputs.get('localAppDir', '') # noqa: N806
+ if localAppDir == '': # noqa: PLC1901
+ localAppDir = remoteAppDir # noqa: N806
+ if remoteAppDir == '': # noqa: PLC1901
+ remoteAppDir = localAppDir # noqa: N806
+
+ siteResponseInput = { # noqa: N806
+ 'units': inputs['units'],
+ 'outputs': {
+ 'IM': True,
+ 'EDP': False,
+ 'DM': False,
+ 'AIM': False,
+ 'DV': False,
+ 'every_realization': False,
+ },
+ 'RegionalEvent': regionalData,
+ 'randomVariables': randomVariables,
+ 'Applications': {
+ 'RegionalMapping': inputApplications['RegionalMapping'],
+ 'UQ': inputApplications['UQ'],
+ 'Assets': {
+ 'Buildings': {
+ 'Application': 'CSV_to_AIM',
+ 'ApplicationData': {
+ 'assetSourceFile': appData['soilGridParametersFilePath']
+ + '/'
+ + appData['soilGridParametersFile'],
+ 'filter': siteFilter,
+ },
}
},
- "EDP": {
- "Application": "DummyEDP",
- "ApplicationData": {}
- },
- "Events": [
+ 'EDP': {'Application': 'DummyEDP', 'ApplicationData': {}},
+ 'Events': [
{
- "EventClassification": "Earthquake",
- "Application": "RegionalSiteResponse",
- "ApplicationData": {
- "pathEventData": "inputMotions",
- "mainScript": appData["siteResponseScript"],
- "modelPath": appData["siteResponseScriptPath"],
- "ndm": 3
- }
+ 'EventClassification': 'Earthquake',
+ 'Application': 'RegionalSiteResponse',
+ 'ApplicationData': {
+ 'pathEventData': 'inputMotions',
+ 'mainScript': appData['siteResponseScript'],
+ 'modelPath': appData['siteResponseScriptPath'],
+ 'ndm': 3,
+ },
}
- ]
- },
- "UQ": inputs.get('UQ', dict()),
- "localAppDir": localAppDir,
- "remoteAppDir": remoteAppDir,
- "runType": inputs.get('runType', ""),
- "DefaultValues": {
- "driverFile": "driver",
- "edpFiles": [
- "EDP.json"
- ],
- "filenameDL": "BIM.json",
- "filenameEDP": "EDP.json",
- "filenameEVENT": "EVENT.json",
- "filenameSAM": "SAM.json",
- "filenameSIM": "SIM.json",
- "rvFiles": [
- "SAM.json",
- "EVENT.json",
- "SIM.json"
],
- "workflowInput": "scInput.json",
- "workflowOutput": "EDP.json"
- }
- }
+ },
+ 'UQ': inputs.get('UQ', dict()), # noqa: C408
+ 'localAppDir': localAppDir,
+ 'remoteAppDir': remoteAppDir,
+ 'runType': inputs.get('runType', ''),
+ 'DefaultValues': {
+ 'driverFile': 'driver',
+ 'edpFiles': ['EDP.json'],
+ 'filenameDL': 'BIM.json',
+ 'filenameEDP': 'EDP.json',
+ 'filenameEVENT': 'EVENT.json',
+ 'filenameSAM': 'SAM.json',
+ 'filenameSIM': 'SIM.json',
+ 'rvFiles': ['SAM.json', 'EVENT.json', 'SIM.json'],
+ 'workflowInput': 'scInput.json',
+ 'workflowOutput': 'EDP.json',
+ },
+ }
- #siteResponseInputFile = 'tmpSiteResponseInput.json'
- #siteResponseInputFile = os.path.join(os.path.dirname(input_file),'tmpSiteResponseInput.json')
+ # siteResponseInputFile = 'tmpSiteResponseInput.json'
+ # siteResponseInputFile = os.path.join(os.path.dirname(input_file),'tmpSiteResponseInput.json')
# KZ: 10/19/2022, fixing the json file path
- siteResponseInputFile = os.path.join(os.path.dirname(reference_dir),'tmpSiteResponseInput.json')
+ siteResponseInputFile = os.path.join( # noqa: PTH118, N806
+ os.path.dirname(reference_dir), # noqa: PTH120
+ 'tmpSiteResponseInput.json',
+ )
if procID == 0:
- with open(siteResponseInputFile, 'w') as json_file:
- json_file.write(json.dumps(siteResponseInput, indent=2))
-
-
- WF = whale.Workflow(run_type,
- siteResponseInputFile,
- app_registry,
- app_type_list = ['Assets', 'RegionalMapping', 'Event', 'EDP', 'UQ'],
- reference_dir = reference_dir,
- working_dir = working_dir,
- app_dir = app_dir,
- parType = parallelType,
- mpiExec = mpiExec,
- numProc = numPROC)
+ with open(siteResponseInputFile, 'w') as json_file: # noqa: FURB103, PLW1514, PTH123
+ json_file.write(json.dumps(siteResponseInput, indent=2))
+
+ WF = whale.Workflow( # noqa: N806
+ run_type,
+ siteResponseInputFile,
+ app_registry,
+ app_type_list=['Assets', 'RegionalMapping', 'Event', 'EDP', 'UQ'],
+ reference_dir=reference_dir,
+ working_dir=working_dir,
+ app_dir=app_dir,
+ parType=parallelType,
+ mpiExec=mpiExec,
+ numProc=numPROC,
+ )
if bldg_id_filter is not None:
- print(bldg_id_filter)
- log_msg(
- f'Overriding simulation scope; running buildings {bldg_id_filter}')
+ print(bldg_id_filter) # noqa: T201
+ log_msg(f'Overriding simulation scope; running buildings {bldg_id_filter}')
# If a Min or Max attribute is used when calling the script, we need to
# update the min and max values in the input file.
- WF.workflow_apps['Building'].pref["filter"] = bldg_id_filter
+ WF.workflow_apps['Building'].pref['filter'] = bldg_id_filter
if procID == 0:
-
- # initialize the working directory
+ # initialize the working directory
WF.init_workdir()
# prepare the basic inputs for individual buildings
asset_files = WF.create_asset_files()
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
- asset_files = WF.augment_asset_files()
-
- if procID == 0:
- for asset_type, assetIt in asset_files.items() :
+ asset_files = WF.augment_asset_files()
+ if procID == 0:
+ for asset_type, assetIt in asset_files.items(): # noqa: N806
# perform the regional mapping
- #WF.perform_regional_mapping(assetIt)
+ # WF.perform_regional_mapping(assetIt)
# KZ: 10/19/2022, adding the required argument for the new whale
- print('0 STARTING MAPPING')
+ print('0 STARTING MAPPING') # noqa: T201
# FMK _ PARALLEL WF.perform_regional_mapping(assetIt, asset_type, False)
- # WF.perform_regional_mapping(assetIt, asset_type)
- WF.perform_regional_mapping(assetIt, asset_type, False)
+ # WF.perform_regional_mapping(assetIt, asset_type)
+ WF.perform_regional_mapping(assetIt, asset_type, False) # noqa: FBT003
# get all other processes to wait till we are here
- if doParallel == True:
- comm.Barrier()
+ if doParallel == True: # noqa: E712
+ comm.Barrier()
- print("BARRIER AFTER PERFORM REGIONAL MAPPING")
+ print('BARRIER AFTER PERFORM REGIONAL MAPPING') # noqa: T201
count = 0
- for asset_type, assetIt in asset_files.items() :
-
- # TODO: not elegant code, fix later
- with open(assetIt, 'r', encoding="utf-8") as f:
+ for asset_type, assetIt in asset_files.items(): # noqa: N806
+ # TODO: not elegant code, fix later # noqa: TD002
+ with open(assetIt, encoding='utf-8') as f: # noqa: PTH123
asst_data = json.load(f)
-
+
# The preprocess app sequence (previously get_RV)
preprocess_app_sequence = ['Event', 'EDP']
-
+
# The workflow app sequence
- WF_app_sequence = ['Assets', 'Event', 'EDP']
+ WF_app_sequence = ['Assets', 'Event', 'EDP'] # noqa: N806
# For each asset
for asst in asst_data:
-
- if count % numP == procID:
+ if count % numP == procID:
log_msg('', prepend_timestamp=False)
log_div(prepend_blank_space=False)
log_msg(f"{asset_type} id {asst['id']} in file {asst['file']}")
log_div()
-
+
# Run sWhale
- print('COUNT: ', count, ' ID: ', procID)
-
- runSWhale(inputs = None,
- WF = WF,
- assetID = asst['id'],
- assetAIM = asst['file'],
- prep_app_sequence = preprocess_app_sequence,
- WF_app_sequence = WF_app_sequence,
- asset_type = asset_type,
- copy_resources = True,
- force_cleanup = force_cleanup)
-
- count = count+1
-
- if doParallel == True:
+ print('COUNT: ', count, ' ID: ', procID) # noqa: T201
+
+ runSWhale(
+ inputs=None,
+ WF=WF,
+ assetID=asst['id'],
+ assetAIM=asst['file'],
+ prep_app_sequence=preprocess_app_sequence,
+ WF_app_sequence=WF_app_sequence,
+ asset_type=asset_type,
+ copy_resources=True,
+ force_cleanup=force_cleanup,
+ )
+
+ count = count + 1 # noqa: PLR6104
+
+ if doParallel == True: # noqa: E712
comm.Barrier()
-
- if procID == 0:
- createFilesForEventGrid(os.path.join(working_dir,'Buildings'),
- output_dir,
- force_cleanup)
- # aggregate results
- #WF.aggregate_results(bldg_data = bldg_data)
- # KZ: 10/19/2022, chaning bldg_data to asst_data
- WF.aggregate_results(asst_data= asst_data)
+ if procID == 0:
+ createFilesForEventGrid(
+ os.path.join(working_dir, 'Buildings'), # noqa: PTH118
+ output_dir,
+ force_cleanup,
+ )
+ # aggregate results
+ # WF.aggregate_results(bldg_data = bldg_data)
+ # KZ: 10/19/2022, chaining bldg_data to asst_data
+ WF.aggregate_results(asst_data=asst_data)
- if doParallel == True:
+ if doParallel == True: # noqa: E712
comm.Barrier()
# clean up intermediate files from the working directory
@@ -336,124 +350,160 @@ def main(run_type, input_file, app_registry,
log_div(prepend_blank_space=False)
log_div(prepend_blank_space=False)
-if __name__ == '__main__':
- pwd1 = os.getcwd()
- if os.path.basename(pwd1) == 'Results':
+if __name__ == '__main__':
+ pwd1 = os.getcwd() # noqa: PTH109
+ if os.path.basename(pwd1) == 'Results': # noqa: PTH119
os.chdir('..')
-
+
#
# little bit of preprocessing
#
-
- thisScriptPath = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
- registryFile = thisScriptPath / "WorkflowApplications.json"
- applicationDir = Path(thisScriptPath).parents[1]
- pwd = os.getcwd()
- currentDir = Path(pwd)
- referenceDir = currentDir / "input_data"
- siteResponseOutputDir = referenceDir / "siteResponseWorkingDir"
- siteResponseAggregatedResultsDir = referenceDir / "siteResponseOutputMotions"
-
- print('PWD: ', pwd);
- print('currentDir: ', currentDir);
- print('referenceDir: ', referenceDir);
- print('siteResponseOutputDir: ', siteResponseOutputDir);
-
-
+
+ thisScriptPath = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120, N816
+ registryFile = thisScriptPath / 'WorkflowApplications.json' # noqa: N816
+ applicationDir = Path(thisScriptPath).parents[1] # noqa: N816
+ pwd = os.getcwd() # noqa: PTH109
+ currentDir = Path(pwd) # noqa: N816
+ referenceDir = currentDir / 'input_data' # noqa: N816
+ siteResponseOutputDir = referenceDir / 'siteResponseWorkingDir' # noqa: N816
+ siteResponseAggregatedResultsDir = referenceDir / 'siteResponseOutputMotions' # noqa: N816
+
+ print('PWD: ', pwd) # noqa: T201
+ print('currentDir: ', currentDir) # noqa: T201
+ print('referenceDir: ', referenceDir) # noqa: T201
+ print('siteResponseOutputDir: ', siteResponseOutputDir) # noqa: T201
#
# parse command line
#
-
- workflowArgParser = argparse.ArgumentParser(
- "Run the NHERI SimCenter rWHALE workflow for a set of assets.",
- allow_abbrev=False)
- workflowArgParser.add_argument("-i", "--input",
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the NHERI SimCenter rWHALE workflow for a set of assets.',
+ allow_abbrev=False,
+ )
+
+ workflowArgParser.add_argument(
+ '-i',
+ '--input',
default=None,
- help="Configuration file specifying the applications and data to be "
- "used")
- workflowArgParser.add_argument("-F", "--filter",
+ help='Configuration file specifying the applications and data to be ' 'used',
+ )
+ workflowArgParser.add_argument(
+ '-F',
+ '--filter',
default=None,
- help="Provide a subset of building ids to run")
- workflowArgParser.add_argument("-c", "--check",
- help="Check the configuration file")
- workflowArgParser.add_argument("-r", "--registry",
- default = registryFile,
- help="Path to file containing registered workflow applications")
- workflowArgParser.add_argument("-f", "--forceCleanup",
- action="store_true",
- help="Remove working directories after the simulation is completed.")
- workflowArgParser.add_argument("-d", "--referenceDir",
- default = str(referenceDir),
- help="Relative paths in the config file are referenced to this directory.")
- workflowArgParser.add_argument("-w", "--workDir",
+ help='Provide a subset of building ids to run',
+ )
+ workflowArgParser.add_argument(
+ '-c', '--check', help='Check the configuration file'
+ )
+ workflowArgParser.add_argument(
+ '-r',
+ '--registry',
+ default=registryFile,
+ help='Path to file containing registered workflow applications',
+ )
+ workflowArgParser.add_argument(
+ '-f',
+ '--forceCleanup',
+ action='store_true',
+ help='Remove working directories after the simulation is completed.',
+ )
+ workflowArgParser.add_argument(
+ '-d',
+ '--referenceDir',
+ default=str(referenceDir),
+ help='Relative paths in the config file are referenced to this directory.',
+ )
+ workflowArgParser.add_argument(
+ '-w',
+ '--workDir',
default=str(siteResponseOutputDir),
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-o", "--outputDir",
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-o',
+ '--outputDir',
default=str(siteResponseAggregatedResultsDir),
- help="Absolute path to the working directory.")
- workflowArgParser.add_argument("-a", "--appDir",
+ help='Absolute path to the working directory.',
+ )
+ workflowArgParser.add_argument(
+ '-a',
+ '--appDir',
default=None,
- help="Absolute path to the local application directory.")
- workflowArgParser.add_argument("-l", "--logFile",
+ help='Absolute path to the local application directory.',
+ )
+ workflowArgParser.add_argument(
+ '-l',
+ '--logFile',
default='log.txt',
- help="Path where the log file will be saved.")
+ help='Path where the log file will be saved.',
+ )
# adding some parallel stuff
- workflowArgParser.add_argument("-p", "--parallelType",
+ workflowArgParser.add_argument(
+ '-p',
+ '--parallelType',
default='seqRUN',
- help="How parallel runs: options seqRUN, parSETUP, parRUN")
- workflowArgParser.add_argument("-m", "--mpiexec",
+ help='How parallel runs: options seqRUN, parSETUP, parRUN',
+ )
+ workflowArgParser.add_argument(
+ '-m',
+ '--mpiexec',
default='mpiexec',
- help="How mpi runs, e.g. ibrun, mpirun, mpiexec")
- workflowArgParser.add_argument("-n", "--numP",
+ help='How mpi runs, e.g. ibrun, mpirun, mpiexec',
+ )
+ workflowArgParser.add_argument(
+ '-n',
+ '--numP',
default='8',
- help="If parallel, how many jobs to start with mpiexec option")
+ help='If parallel, how many jobs to start with mpiexec option',
+ )
+
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
-
# update the local app dir with the default - if needed
if wfArgs.appDir is None:
- workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
wfArgs.appDir = workflow_dir.parents[1]
if wfArgs.check:
run_type = 'set_up'
else:
- #run_type = 'run'
+ # run_type = 'run'
# KZ: 10/19/22, changing to the new run type for the refactored whale
run_type = 'runningLocal'
#
# Calling the main workflow method and passing the parsed arguments
#
- print('FMK siteResponse main: WORKDIR: ', wfArgs.workDir)
- numPROC = int(wfArgs.numP)
-
- main(run_type = run_type,
- input_file = wfArgs.input,
- app_registry = wfArgs.registry,
- force_cleanup = wfArgs.forceCleanup,
- bldg_id_filter = wfArgs.filter,
- reference_dir = wfArgs.referenceDir,
- working_dir = wfArgs.workDir,
- app_dir = wfArgs.appDir,
- log_file = wfArgs.logFile,
- output_dir = wfArgs.outputDir,
- parallelType = wfArgs.parallelType,
- mpiExec = wfArgs.mpiexec,
- numPROC = numPROC)
+ print('FMK siteResponse main: WORKDIR: ', wfArgs.workDir) # noqa: T201
+ numPROC = int(wfArgs.numP) # noqa: N816
+
+ main(
+ run_type=run_type,
+ input_file=wfArgs.input,
+ app_registry=wfArgs.registry,
+ force_cleanup=wfArgs.forceCleanup,
+ bldg_id_filter=wfArgs.filter,
+ reference_dir=wfArgs.referenceDir,
+ working_dir=wfArgs.workDir,
+ app_dir=wfArgs.appDir,
+ log_file=wfArgs.logFile,
+ output_dir=wfArgs.outputDir,
+ parallelType=wfArgs.parallelType,
+ mpiExec=wfArgs.mpiexec,
+ numPROC=numPROC,
+ )
#
# now create new event file, sites and record files
#
-
- #createFilesForEventGrid(wfArgs.workDir,
+
+ # createFilesForEventGrid(wfArgs.workDir,
# wfArgs.outputDir,
# wfArgs.forceCleanup)
-
# chdir again back to where ADAM starts!
os.chdir(pwd1)
diff --git a/modules/Workflow/whale/__init__.py b/modules/Workflow/whale/__init__.py
index d094ead40..e84de852c 100644
--- a/modules/Workflow/whale/__init__.py
+++ b/modules/Workflow/whale/__init__.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: D104
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,11 +40,11 @@
# Michael Gardner
# Chaofeng Wang
-name = "whale"
+name = 'whale'
__version__ = '2.4.1'
__copyright__ = """Copyright (c) 2018 The Regents of the University of
California and Leland Stanford Junior University"""
-__license__ = "BSD 3-Clause License"
\ No newline at end of file
+__license__ = 'BSD 3-Clause License'
diff --git a/modules/Workflow/whale/main.py b/modules/Workflow/whale/main.py
index 75dfa48da..6f92dd86a 100644
--- a/modules/Workflow/whale/main.py
+++ b/modules/Workflow/whale/main.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
@@ -44,8 +43,7 @@
# Jinyan Zhao
# Sina Naeimi
-"""
-This module has classes and methods that handle everything at the moment.
+"""This module has classes and methods that handle everything at the moment.
.. rubric:: Contents
@@ -53,65 +51,61 @@
...
-"""
+""" # noqa: D404
-from time import strftime
-from datetime import datetime
-import sys, os, json
import argparse
import importlib
-
+import json
+import os
+import platform
+import posixpath
import pprint
import shlex
-
import shutil
-import subprocess
-
-from copy import deepcopy
-
+import subprocess # noqa: S404
+import sys
import warnings
-import posixpath
+from copy import deepcopy
+from datetime import datetime
+from pathlib import Path, PurePath
import numpy as np
import pandas as pd
+import shapely.geometry
+import shapely.wkt
-import platform
-from pathlib import Path, PurePath
-
-import shapely.wkt, shapely.geometry
-
-#import posixpath
-#import ntpath
+# import posixpath
+# import ntpath
pp = pprint.PrettyPrinter(indent=4)
# get the absolute path of the whale directory
-whale_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+whale_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
-def str2bool(v):
+
+def str2bool(v): # noqa: D103
# courtesy of Maxim @ stackoverflow
if isinstance(v, bool):
- return v
- if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'):
+ return v
+ if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'): # noqa: PLR6201
return True
- elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'):
+ elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'): # noqa: PLR6201, RET505
return False
else:
- raise argparse.ArgumentTypeError('Boolean value expected.')
+ raise argparse.ArgumentTypeError('Boolean value expected.') # noqa: EM101, TRY003
-class Options(object):
+class Options: # noqa: D101
def __init__(self):
-
self._log_show_ms = False
self._print_log = False
self.reset_log_strings()
@property
- def log_show_ms(self):
+ def log_show_ms(self): # noqa: D102
return self._log_show_ms
@log_show_ms.setter
@@ -121,112 +115,110 @@ def log_show_ms(self, value):
self.reset_log_strings()
@property
- def log_pref(self):
+ def log_pref(self): # noqa: D102
return self._log_pref
@property
- def log_div(self):
+ def log_div(self): # noqa: D102
return self._log_div
@property
- def log_time_format(self):
+ def log_time_format(self): # noqa: D102
return self._log_time_format
@property
- def log_file(self):
+ def log_file(self): # noqa: D102
return globals()['log_file']
@log_file.setter
- def log_file(self, value):
-
+ def log_file(self, value): # noqa: PLR6301
if value is None:
globals()['log_file'] = value
else:
-
filepath = Path(value).resolve()
try:
globals()['log_file'] = str(filepath)
- with open(filepath, 'w', encoding="utf-8") as f:
+ with open(filepath, 'w', encoding='utf-8') as f: # noqa: FURB103, PTH123
f.write('')
- except:
- raise ValueError(f"The filepath provided does not point to a "
- f"valid location: {filepath}")
+ except: # noqa: E722
+ raise ValueError( # noqa: B904, TRY003
+ f'The filepath provided does not point to a ' # noqa: EM102
+ f'valid location: {filepath}'
+ )
@property
- def print_log(self):
+ def print_log(self): # noqa: D102
return self._print_log
@print_log.setter
def print_log(self, value):
self._print_log = str2bool(value)
- def reset_log_strings(self):
-
+ def reset_log_strings(self): # noqa: D102
if self._log_show_ms:
self._log_time_format = '%H:%M:%S:%f'
- self._log_pref = ' ' * 16 # the length of the time string in the log file
- self._log_div = '-' * (80 - 17) # to have a total length of 80 with the time added
+ self._log_pref = (
+ ' ' * 16
+ ) # the length of the time string in the log file
+ self._log_div = '-' * (
+ 80 - 17
+ ) # to have a total length of 80 with the time added
else:
self._log_time_format = '%H:%M:%S'
self._log_pref = ' ' * 9
self._log_div = '-' * (80 - 9)
+
options = Options()
log_file = None
-def set_options(config_options):
+def set_options(config_options): # noqa: D103
if config_options is not None:
-
for key, value in config_options.items():
-
- if key == "LogShowMS":
+ if key == 'LogShowMS':
options.log_show_ms = value
- elif key == "LogFile":
+ elif key == 'LogFile':
options.log_file = value
- elif key == "PrintLog":
+ elif key == 'PrintLog':
options.print_log = value
-
# Monkeypatch warnings to get prettier messages
-def _warning(message, category, filename, lineno, file=None, line=None):
+def _warning(message, category, filename, lineno, file=None, line=None): # noqa: ARG001
if '\\' in filename:
file_path = filename.split('\\')
elif '/' in filename:
file_path = filename.split('/')
python_file = '/'.join(file_path[-3:])
- print('WARNING in {} at line {}\n{}\n'.format(python_file, lineno, message))
+ print(f'WARNING in {python_file} at line {lineno}\n{message}\n') # noqa: T201
-warnings.showwarning = _warning
-def log_div(prepend_timestamp=False, prepend_blank_space=True):
- """
- Print a divider line to the log file
-
- """
+warnings.showwarning = _warning
- if prepend_timestamp:
- msg = options.log_div
- elif prepend_blank_space:
+def log_div(prepend_timestamp=False, prepend_blank_space=True): # noqa: FBT002
+ """Print a divider line to the log file""" # noqa: D400
+ if prepend_timestamp or prepend_blank_space:
msg = options.log_div
else:
msg = '-' * 80
- log_msg(msg, prepend_timestamp = prepend_timestamp,
- prepend_blank_space = prepend_blank_space)
+ log_msg(
+ msg,
+ prepend_timestamp=prepend_timestamp,
+ prepend_blank_space=prepend_blank_space,
+ )
-def log_msg(msg='', prepend_timestamp=True, prepend_blank_space=True):
- """
- Print a message to the screen with the current time as prefix
+def log_msg(msg='', prepend_timestamp=True, prepend_blank_space=True): # noqa: FBT002
+ """Print a message to the screen with the current time as prefix
The time is in ISO-8601 format, e.g. 2018-06-16T20:24:04Z
@@ -235,58 +227,59 @@ def log_msg(msg='', prepend_timestamp=True, prepend_blank_space=True):
msg: string
Message to print.
- """
-
+ """ # noqa: D400
msg_lines = msg.split('\n')
for msg_i, msg_line in enumerate(msg_lines):
-
- if (prepend_timestamp and (msg_i==0)):
- formatted_msg = '{} {}'.format(
- datetime.now().strftime(options.log_time_format), msg_line)
- elif prepend_timestamp:
- formatted_msg = options.log_pref + msg_line
- elif prepend_blank_space:
+ if prepend_timestamp and (msg_i == 0):
+ formatted_msg = (
+ f'{datetime.now().strftime(options.log_time_format)} {msg_line}' # noqa: DTZ005
+ )
+ elif prepend_timestamp or prepend_blank_space:
formatted_msg = options.log_pref + msg_line
else:
formatted_msg = msg_line
if options.print_log:
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
if globals()['log_file'] is not None:
- with open(globals()['log_file'], 'a', encoding="utf-8") as f:
- f.write('\n'+formatted_msg)
+ with open(globals()['log_file'], 'a', encoding='utf-8') as f: # noqa: PTH123
+ f.write('\n' + formatted_msg)
+
def log_error(msg):
- """
- Print an error message to the screen
+ """Print an error message to the screen
Parameters
----------
msg: string
Message to print.
- """
+ """ # noqa: D400
log_div()
- log_msg(''*(80-21-6) + ' ERROR')
+ log_msg('' * (80 - 21 - 6) + ' ERROR')
log_msg(msg)
log_div()
-def print_system_info():
- log_msg('System Information:',
- prepend_timestamp=False, prepend_blank_space=False)
- log_msg(f' local time zone: {datetime.utcnow().astimezone().tzinfo}\n'
- f' start time: {datetime.now().strftime("%Y-%m-%dT%H:%M:%S")}\n'
- f' python: {sys.version}\n'
- f' numpy: {np.__version__}\n'
- f' pandas: {pd.__version__}\n',
- prepend_timestamp=False, prepend_blank_space=False)
+def print_system_info(): # noqa: D103
+ log_msg(
+ 'System Information:', prepend_timestamp=False, prepend_blank_space=False
+ )
+ log_msg(
+ f' local time zone: {datetime.utcnow().astimezone().tzinfo}\n'
+ f' start time: {datetime.now().strftime("%Y-%m-%dT%H:%M:%S")}\n' # noqa: DTZ005
+ f' python: {sys.version}\n'
+ f' numpy: {np.__version__}\n'
+ f' pandas: {pd.__version__}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
def create_command(command_list, enforced_python=None):
- """
- Short description
+ """Short description
Long description
@@ -294,9 +287,9 @@ def create_command(command_list, enforced_python=None):
----------
command_list: array of unicode strings
Explain...
- """
- if command_list[0] == 'python':
+ """ # noqa: D400
+ if command_list[0] == 'python':
# replace python with...
if enforced_python is None:
# the full path to the python interpreter
@@ -305,21 +298,23 @@ def create_command(command_list, enforced_python=None):
# the prescribed path to the python interpreter
python_exe = enforced_python
- command = '"{}" "{}" '.format(python_exe, command_list[1])# + ' '.join(command_list[2:])
+ command = (
+ f'"{python_exe}" "{command_list[1]}" ' # + ' '.join(command_list[2:])
+ )
for command_arg in command_list[2:]:
- command += '"{}" '.format(command_arg)
+ command += f'"{command_arg}" '
else:
- command = '"{}" '.format(command_list[0])# + ' '.join(command_list[1:])
+ command = f'"{command_list[0]}" ' # + ' '.join(command_list[1:])
for command_arg in command_list[1:]:
- command += '"{}" '.format(command_arg)
+ command += f'"{command_arg}" '
return command
+
def run_command(command):
- """
- Short description
+ """Short description
Long description
@@ -328,19 +323,17 @@ def run_command(command):
command_list: array of unicode strings
Explain...
- """
-
+ """ # noqa: D400
# If it is a python script, we do not run it, but rather import the main
# function. This ensures that the script is run using the same python
# interpreter that this script uses and it is also faster because we do not
# need to run multiple python interpreters simultaneously.
- Frank_trusts_this_approach = False
+ Frank_trusts_this_approach = False # noqa: N806
if command[:6] == 'python' and Frank_trusts_this_approach:
-
- import importlib # only import this when it's needed
+ import importlib # only import this when it's needed # noqa: PLC0415
command_list = command.split()[1:]
- #py_args = command_list[1:]
+ # py_args = command_list[1:]
# get the dir and file name
py_script_dir, py_script_file = os.path.split(command_list[0][1:-1])
@@ -350,30 +343,37 @@ def run_command(command):
# import the file
py_script = importlib.__import__(
- py_script_file[:-3], globals(), locals(), ['main',], 0)
+ py_script_file[:-3],
+ globals(),
+ locals(),
+ [
+ 'main',
+ ],
+ 0,
+ )
# remove the quotes from the arguments
arg_list = [c[1:-1] for c in command_list[1:]]
py_script.main(arg_list)
- return "", ""
+ return '', ''
- else:
-
+ else: # noqa: RET505
# fmk with Shell=True not working on older windows machines, new approach needed for quoted command .. turn into a list
command = shlex.split(command)
-
- try:
- result = subprocess.check_output(command, stderr=subprocess.STDOUT, text=True)
+ try:
+ result = subprocess.check_output( # noqa: S603
+ command, stderr=subprocess.STDOUT, text=True
+ )
returncode = 0
except subprocess.CalledProcessError as e:
result = e.output
returncode = e.returncode
if returncode != 0:
- log_error('return code: {}'.format(returncode))
+ log_error(f'return code: {returncode}')
# if platform.system() == 'Windows':
# return result.decode(sys.stdout.encoding), returncode
@@ -383,19 +383,20 @@ def run_command(command):
return result, returncode
-def show_warning(warning_msg):
- warnings.warn(UserWarning(warning_msg))
-def resolve_path(target_path, ref_path):
+def show_warning(warning_msg): # noqa: D103
+ warnings.warn(UserWarning(warning_msg)) # noqa: B028
+
+def resolve_path(target_path, ref_path): # noqa: D103
ref_path = Path(ref_path)
target_path = str(target_path).strip()
- while target_path.startswith('/') or target_path.startswith('\\'):
+ while target_path.startswith('/') or target_path.startswith('\\'): # noqa: PIE810
target_path = target_path[1:]
- if target_path == "":
+ if target_path == '': # noqa: PLC1901
target_path = ref_path
else:
@@ -407,51 +408,51 @@ def resolve_path(target_path, ref_path):
if target_path.exists():
target_path = target_path.resolve()
else:
- #raise ValueError(
+ # raise ValueError(
# f"{target_path} does not point to a valid location")
- print(f"{target_path} does not point to a valid location")
+ print(f'{target_path} does not point to a valid location') # noqa: T201
return target_path
-def _parse_app_registry(registry_path, app_types, list_available_apps=False):
- """
- Load the information about available workflow applications.
+
+def _parse_app_registry(registry_path, app_types, list_available_apps=False): # noqa: FBT002
+ """Load the information about available workflow applications.
Parameters
----------
registry_path: string
- Path to the JSON file with the app registry information. By default,
+ Path to the JSON file with the app registry information. By default,
this file is stored at applications/Workflow/WorkflowApplications.json
app_types: list of strings
- List of application types (e.g., Assets, Modeling, DL) to parse from the
+ List of application types (e.g., Assets, Modeling, DL) to parse from the
registry
list_available_apps: bool, optional, default: False
If True, all available applications of the requested types are printed
in the log file.
-
+
Returns
-------
app_registry: dict
A dictionary with WorkflowApplication objects. Primary keys are
the type of application (e.g., Assets, Modeling, DL); secondary keys
- are the name of the specific application (e.g, MDOF-LU). See the
+ are the name of the specific application (e.g, MDOF-LU). See the
documentation for more details.
default_values: dict
Default values of filenames used to pass data between applications. Keys
are the placeholder names (e.g., filenameAIM) and values are the actual
filenames (e.g,. AIM.json)
- """
+ """
log_msg('Parsing application registry file')
# open the registry file
log_msg('Loading the json file...', prepend_timestamp=False)
- with open(registry_path, 'r', encoding="utf-8") as f:
+ with open(registry_path, encoding='utf-8') as f: # noqa: PTH123
app_registry_data = json.load(f)
log_msg(' OK', prepend_timestamp=False)
# initialize the app registry
- app_registry = dict([(a, dict()) for a in app_types])
+ app_registry = dict([(a, dict()) for a in app_types]) # noqa: C404, C408
log_msg('Loading default values...', prepend_timestamp=False)
@@ -462,11 +463,9 @@ def _parse_app_registry(registry_path, app_types, list_available_apps=False):
log_msg('Collecting application data...', prepend_timestamp=False)
# for each application type
for app_type in sorted(app_registry.keys()):
-
# if the registry contains information about it
- app_type_long = app_type+'Applications'
+ app_type_long = app_type + 'Applications'
if app_type_long in app_registry_data:
-
# get the list of available applications
available_apps = app_registry_data[app_type_long]['Applications']
api_info = app_registry_data[app_type_long]['API']
@@ -476,9 +475,10 @@ def _parse_app_registry(registry_path, app_types, list_available_apps=False):
api_info.update({'DefaultValues': default_values})
# and create a workflow application for each app of this type
- for app in available_apps:
+ for app in available_apps:
app_registry[app_type][app['Name']] = WorkflowApplication(
- app_type=app_type, app_info=app, api_info=api_info)
+ app_type=app_type, app_info=app, api_info=api_info
+ )
log_msg(' OK', prepend_timestamp=False)
@@ -486,102 +486,97 @@ def _parse_app_registry(registry_path, app_types, list_available_apps=False):
log_msg('Available applications:', prepend_timestamp=False)
for app_type, app_list in app_registry.items():
- for app_name, app_object in app_list.items():
- log_msg(' {} : {}'.format(app_type, app_name),
- prepend_timestamp=False)
+ for app_name, app_object in app_list.items(): # noqa: B007, PERF102
+ log_msg(f' {app_type} : {app_name}', prepend_timestamp=False)
- #pp.pprint(self.app_registry)
+ # pp.pprint(self.app_registry)
- log_msg('Successfully parsed application registry',
- prepend_timestamp=False)
+ log_msg('Successfully parsed application registry', prepend_timestamp=False)
log_div()
return app_registry, default_values
-class WorkFlowInputError(Exception):
+
+class WorkFlowInputError(Exception): # noqa: D101
def __init__(self, value):
self.value = value
- def __str__(self):
+ def __str__(self): # noqa: D105
return repr(self.value)
-class WorkflowApplication(object):
- """
- Short description.
-
+class WorkflowApplication:
+ """Short description.
Longer description.
Parameters
----------
- """
+ """ # noqa: D414
def __init__(self, app_type, app_info, api_info):
+ # print('APP_TYPE', app_type)
+ # print('APP_INFO', app_info)
+ # print('API_INFO', api_info)
+ # print('APP_RELPATH', app_info['ExecutablePath'])
- #print('APP_TYPE', app_type)
- #print('APP_INFO', app_info)
- #print('API_INFO', api_info)
- #print('APP_RELPATH', app_info['ExecutablePath'])
-
self.name = app_info['Name']
self.app_type = app_type
self.rel_path = app_info['ExecutablePath']
-
- if 'RunsParallel' in app_info.keys():
- self.runsParallel = app_info['RunsParallel']
+
+ if 'RunsParallel' in app_info.keys(): # noqa: SIM118
+ self.runsParallel = app_info['RunsParallel']
else:
- self.runsParallel = False;
-
- self.app_spec_inputs = app_info.get('ApplicationSpecificInputs',[])
+ self.runsParallel = False
+
+ self.app_spec_inputs = app_info.get('ApplicationSpecificInputs', [])
self.inputs = api_info['Inputs']
self.outputs = api_info['Outputs']
- if 'DefaultValues' in api_info.keys():
+ if 'DefaultValues' in api_info.keys(): # noqa: SIM118
self.defaults = api_info['DefaultValues']
else:
- self.defaults = None;
+ self.defaults = None
def set_pref(self, preferences, ref_path):
- """
- Short description
+ """Short description
Parameters
----------
preferences: dictionary
Explain...
- """
+
+ """ # noqa: D400
self.pref = preferences
# parse the relative paths (if any)
- ASI = [inp['id'] for inp in self.app_spec_inputs]
+ ASI = [inp['id'] for inp in self.app_spec_inputs] # noqa: N806
for preference in list(self.pref.keys()):
if preference in ASI:
input_id = np.where([preference == asi for asi in ASI])[0][0]
input_type = self.app_spec_inputs[input_id]['type']
if input_type == 'path':
-
if 'PelicunDefault' in self.pref[preference]:
continue
self.pref[preference] = resolve_path(
- self.pref[preference], ref_path)
+ self.pref[preference], ref_path
+ )
- def get_command_list(self, app_path, force_posix=False):
- """
- Short description
+ def get_command_list(self, app_path, force_posix=False): # noqa: FBT002, C901
+ """Short description
Parameters
----------
app_path: Path
Explain...
- """
+ """ # noqa: D400
abs_path = Path(app_path) / self.rel_path
-
- #abs_path = posixpath.join(app_path, self.rel_path)
+
+ # abs_path = posixpath.join(app_path, self.rel_path)
arg_list = []
@@ -589,12 +584,12 @@ def get_command_list(self, app_path, force_posix=False):
arg_list.append('python')
if force_posix:
- arg_list.append(u'{}'.format(abs_path.as_posix()))
+ arg_list.append(f'{abs_path.as_posix()}')
else:
- arg_list.append(u'{}'.format(abs_path))
+ arg_list.append(f'{abs_path}')
for in_arg in self.inputs:
- arg_list.append(u'--{}'.format(in_arg['id']))
+ arg_list.append('--{}'.format(in_arg['id']))
# Default values are protected, they cannot be overwritten simply
# by providing application specific inputs in the config file
@@ -603,29 +598,31 @@ def get_command_list(self, app_path, force_posix=False):
# If the user also provided an input, let them know that their
# input is invalid
- if in_arg['id'] in self.pref.keys():
- log_msg('\nWARNING: Application specific parameters cannot '
- 'overwrite default workflow\nparameters. See the '
- 'documentation on how to edit workflowDefault '
- 'inputs.\n', prepend_timestamp=False,
- prepend_blank_space=False)
-
- elif in_arg['id'] in self.pref.keys():
+ if in_arg['id'] in self.pref.keys(): # noqa: SIM118
+ log_msg(
+ '\nWARNING: Application specific parameters cannot '
+ 'overwrite default workflow\nparameters. See the '
+ 'documentation on how to edit workflowDefault '
+ 'inputs.\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ elif in_arg['id'] in self.pref.keys(): # noqa: SIM118
arg_value = self.pref[in_arg['id']]
else:
arg_value = in_arg['default']
if isinstance(arg_value, Path) and force_posix:
- arg_list.append(u'{}'.format(arg_value.as_posix()))
+ arg_list.append(f'{arg_value.as_posix()}')
else:
- arg_list.append(u'{}'.format(arg_value))
+ arg_list.append(f'{arg_value}')
for out_arg in self.outputs:
- out_id = u'--{}'.format(out_arg['id'])
+ out_id = '--{}'.format(out_arg['id'])
if out_id not in arg_list:
-
arg_list.append(out_id)
# Default values are protected, they cannot be overwritten simply
@@ -635,50 +632,51 @@ def get_command_list(self, app_path, force_posix=False):
# If the user also provided an input, let them know that
# their input is invalid
- if out_arg['id'] in self.pref.keys():
- log_msg('\nWARNING: Application specific parameters '
- 'cannot overwrite default workflow\nparameters. '
- 'See the documentation on how to edit '
- 'workflowDefault inputs.\n',
- prepend_timestamp=False,
- prepend_blank_space=False)
-
- elif out_arg['id'] in self.pref.keys():
+ if out_arg['id'] in self.pref.keys(): # noqa: SIM118
+ log_msg(
+ '\nWARNING: Application specific parameters '
+ 'cannot overwrite default workflow\nparameters. '
+ 'See the documentation on how to edit '
+ 'workflowDefault inputs.\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ elif out_arg['id'] in self.pref.keys(): # noqa: SIM118
arg_value = self.pref[out_arg['id']]
else:
arg_value = out_arg['default']
if isinstance(arg_value, Path) and force_posix:
- arg_list.append(u'{}'.format(arg_value.as_posix()))
+ arg_list.append(f'{arg_value.as_posix()}')
else:
- arg_list.append(u'{}'.format(arg_value))
+ arg_list.append(f'{arg_value}')
- ASI_list = [inp['id'] for inp in self.app_spec_inputs]
+ ASI_list = [inp['id'] for inp in self.app_spec_inputs] # noqa: N806
for pref_name, pref_value in self.pref.items():
# only pass those input arguments that are in the registry
if pref_name in ASI_list:
- pref_id = u'--{}'.format(pref_name)
+ pref_id = f'--{pref_name}'
if pref_id not in arg_list:
arg_list.append(pref_id)
if isinstance(pref_value, Path) and force_posix:
- arg_list.append(u'{}'.format(pref_value.as_posix()))
+ arg_list.append(f'{pref_value.as_posix()}')
else:
- arg_list.append(u'{}'.format(pref_value))
+ arg_list.append(f'{pref_value}')
- #pp.pprint(arg_list)
+ # pp.pprint(arg_list)
return arg_list
-class Workflow(object):
- """
- A class that collects methods common to all workflows developed by the
+
+class Workflow: # noqa: PLR0904
+ """A class that collects methods common to all workflows developed by the
SimCenter. Child-classes will be introduced later if needed.
Parameters
----------
-
run_type: string
Explain...
input_file: string
@@ -686,72 +684,90 @@ class Workflow(object):
app_registry: string
Explain...
- """
-
- def __init__(self,
- run_type,
- input_file,
- app_registry,
- app_type_list,
- reference_dir = None,
- working_dir = None,
- app_dir = None,
- parType="seqRUN",
- mpiExec="mpiExec",
- numProc=8):
-
+ """ # noqa: D205
+
+ def __init__(
+ self,
+ run_type,
+ input_file,
+ app_registry,
+ app_type_list,
+ reference_dir=None,
+ working_dir=None,
+ app_dir=None,
+ parType='seqRUN', # noqa: N803
+ mpiExec='mpiExec', # noqa: N803
+ numProc=8, # noqa: N803
+ ):
log_msg('Inputs provided:')
- log_msg('workflow input file: {}'.format(input_file),
- prepend_timestamp=False)
- log_msg('application registry file: {}'.format(app_registry),
- prepend_timestamp=False)
- log_msg('run type: {}'.format(run_type),
- prepend_timestamp=False)
+ log_msg(f'workflow input file: {input_file}', prepend_timestamp=False)
+ log_msg(
+ f'application registry file: {app_registry}',
+ prepend_timestamp=False,
+ )
+ log_msg(f'run type: {run_type}', prepend_timestamp=False)
log_div()
self.optional_apps = ['RegionalEvent', 'Modeling', 'EDP', 'UQ', 'DL', 'FEM']
-
+
# Create the asset registry
- self.asset_type_list = ['Buildings', 'WaterDistributionNetwork', 'TransportationNetwork']
- self.asset_registry = dict([(a, dict()) for a in self.asset_type_list])
+ self.asset_type_list = [
+ 'Buildings',
+ 'WaterDistributionNetwork',
+ 'TransportationNetwork',
+ ]
+ self.asset_registry = dict([(a, dict()) for a in self.asset_type_list]) # noqa: C404, C408
self.run_type = run_type
self.input_file = input_file
self.app_registry_file = app_registry
- self.modifiedRun = False # ADAM to fix
- self.parType = parType;
+ self.modifiedRun = False # ADAM to fix
+ self.parType = parType
self.mpiExec = mpiExec
- self.numProc = numProc
+ self.numProc = numProc
# if parallel setup, open script file to run
- self.inputFilePath = os.path.dirname(input_file)
- parCommandFileName = os.path.join(self.inputFilePath, 'sc_parScript.sh')
- if (parType == 'parSETUP'):
- self.parCommandFile = open(parCommandFileName, "w")
- self.parCommandFile.write("#!/bin/sh" + "\n")
-
- print('WF: parType, mpiExec, numProc: ', self.parType, self.mpiExec, self.numProc)
+ self.inputFilePath = os.path.dirname(input_file) # noqa: PTH120
+ parCommandFileName = os.path.join(self.inputFilePath, 'sc_parScript.sh') # noqa: PTH118, N806
+ if parType == 'parSETUP':
+ self.parCommandFile = open(parCommandFileName, 'w') # noqa: PLW1514, PTH123, SIM115
+ self.parCommandFile.write('#!/bin/sh' + '\n')
+
+ print( # noqa: T201
+ 'WF: parType, mpiExec, numProc: ',
+ self.parType,
+ self.mpiExec,
+ self.numProc,
+ )
self.numP = 1
self.procID = 0
self.doParallel = False
- if (parType == 'parRUN'):
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if parType == 'parRUN':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
+ from mpi4py import MPI # noqa: PLC0415
+
self.comm = MPI.COMM_WORLD
self.numP = self.comm.Get_size()
- self.procID = self.comm.Get_rank();
- if self.numP < 2:
+ self.procID = self.comm.Get_rank()
+ if self.numP < 2: # noqa: PLR2004
self.doParallel = False
self.numP = 1
self.procID = 0
else:
- self.doParallel = True;
+ self.doParallel = True
+
+ print( # noqa: T201
+ 'WF: parType, mpiExec, numProc, do? numP, procID: ',
+ self.parType,
+ self.mpiExec,
+ self.numProc,
+ self.doParallel,
+ self.numP,
+ self.procID,
+ )
- print('WF: parType, mpiExec, numProc, do? numP, procID: ', self.parType, self.mpiExec, self.numProc, self.doParallel, self.numP, self.procID)
-
if reference_dir is not None:
self.reference_dir = Path(reference_dir)
else:
@@ -770,168 +786,161 @@ def __init__(self,
self.app_type_list = app_type_list
if self.run_type == 'parSETUP':
- self.app_dir_local = self.app_dir_remote
+ self.app_dir_local = self.app_dir_remote
# parse the application registry
- self.app_registry, self.default_values = (
- _parse_app_registry(registry_path = self.app_registry_file,
- app_types=self.app_type_list))
+ self.app_registry, self.default_values = _parse_app_registry(
+ registry_path=self.app_registry_file, app_types=self.app_type_list
+ )
# parse the input file
self.workflow_apps = {}
self.workflow_assets = {}
self._parse_inputs()
- def __del__(self):
-
- # if parallel setup, add command to run this scipt with parellel option
- if (self.parType == 'parSETUP'):
- inputArgs = sys.argv
+ def __del__(self): # noqa: D105
+ # if parallel setup, add command to run this script with parallel option
+ if self.parType == 'parSETUP':
+ inputArgs = sys.argv # noqa: N806
length = len(inputArgs)
i = 0
while i < length:
- if 'parSETUP' == inputArgs[i]:
+ if inputArgs[i] == 'parSETUP':
inputArgs[i] = 'parRUN'
i += 1
-
- inputArgs.insert(0,'python')
+
+ inputArgs.insert(0, 'python')
command = create_command(inputArgs)
- self.parCommandFile.write("\n# Writing Final command to run this application in parallel\n")
- self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command)
+ self.parCommandFile.write(
+ '\n# Writing Final command to run this application in parallel\n'
+ )
+ self.parCommandFile.write(
+ self.mpiExec + ' -n ' + str(self.numProc) + ' ' + command
+ )
self.parCommandFile.close()
-
- def _register_app_type(self, app_type, app_dict, sub_app = ''):
-
- """
- Function to register the applications provided in the input file into
+ def _register_app_type(self, app_type, app_dict, sub_app=''): # noqa: C901
+ """Function to register the applications provided in the input file into
memory, i.e., the 'App registry'
Parameters
----------
-
app_type - the type of application
-
+
app_dict - dictionary containing app data
- """
-
- if type(app_dict) is not dict :
+ """ # noqa: D205, D400, D401
+ if type(app_dict) is not dict:
return
- else :
- for itmKey, itm in app_dict.items() :
- self._register_app_type(app_type,itm,itmKey)
-
+ else: # noqa: RET505
+ for itmKey, itm in app_dict.items(): # noqa: N806
+ self._register_app_type(app_type, itm, itmKey)
# The provided application
app_in = app_dict.get('Application')
-
+
# Check to ensure the applications key is provided in the input
- if app_in == None :
+ if app_in == None: # noqa: E711
return
- err = 'Need to provide the \'Application\' key in ' + app_type
- raise WorkFlowInputError(err)
-
+ err = "Need to provide the 'Application' key in " + app_type
+ raise WorkFlowInputError(err) # noqa: DOC501
+
# Check to see if the app type is in the application registry
app_type_obj = self.app_registry.get(app_type)
-
- if app_in == None :
+
+ if app_in == None: # noqa: E711
+ return
+
+ if app_in == 'None':
return
- if app_in == 'None' :
- return
-
- if app_type_obj == None :
- err = 'The application ' +app_type+' is not found in the app registry'
- raise WorkFlowInputError(err)
-
+ if app_type_obj == None: # noqa: E711
+ err = 'The application ' + app_type + ' is not found in the app registry'
+ raise WorkFlowInputError(err) # noqa: DOC501
+
# Finally check to see if the app registry contains the provided application
- if app_type_obj.get(app_in) == None :
- err = 'Could not find the provided application in the internal app registry, app name: ' + app_in
- print("Error",app_in)
- raise WorkFlowInputError(err)
-
+ if app_type_obj.get(app_in) == None: # noqa: E711
+ err = (
+ 'Could not find the provided application in the internal app registry, app name: '
+ + app_in
+ )
+ print('Error', app_in) # noqa: T201
+ raise WorkFlowInputError(err) # noqa: DOC501
+
+ appData = app_dict['ApplicationData'] # noqa: N806
+ #
+ # for itmKey, itm in appData.items() :
+ # self._register_app_type(app_type,itm,itmKey)
- appData = app_dict['ApplicationData']
-#
-# for itmKey, itm in appData.items() :
-# self._register_app_type(app_type,itm,itmKey)
-
# Make a deep copy of the app object
app_object = deepcopy(app_type_obj.get(app_in))
-
+
# Check if the app object was created successfully
if app_object is None:
- raise WorkFlowInputError('Application deep copy failed for {}'.format(app_type))
-
+ raise WorkFlowInputError(f'Application deep copy failed for {app_type}') # noqa: DOC501, EM102, TRY003
+
# only assign the app to the workflow if it has an executable
if app_object.rel_path is None:
log_msg(
f'{app_dict["Application"]} is '
'a passive application (i.e., it does not invoke '
'any calculation within the workflow.',
- prepend_timestamp=False)
+ prepend_timestamp=False,
+ )
else:
app_object.set_pref(appData, self.reference_dir)
-
- if len(sub_app) == 0 :
- log_msg(f'For {app_type}',prepend_timestamp=False)
+
+ if len(sub_app) == 0:
+ log_msg(f'For {app_type}', prepend_timestamp=False)
self.workflow_apps[app_type] = app_object
- else :
-
- if self.workflow_apps.get(app_type,None) is None :
+ else:
+ if self.workflow_apps.get(app_type, None) is None:
self.workflow_apps[app_type] = {}
-
- log_msg(f'For {sub_app} in {app_type}',prepend_timestamp=False)
+
+ log_msg(f'For {sub_app} in {app_type}', prepend_timestamp=False)
self.workflow_apps[app_type][sub_app] = app_object
-
- log_msg(f' Registering application {app_dict["Application"]} ',prepend_timestamp=False)
-
-
-
+
+ log_msg(
+ f' Registering application {app_dict["Application"]} ',
+ prepend_timestamp=False,
+ )
+
def _register_asset(self, asset_type, asset_dict):
-
- """
- Function to register the assets provided in the input file into memory
-
+ """Function to register the assets provided in the input file into memory
+
Parameters
----------
-
asset_type - the type of asset, e.g., buildings, water pipelines
-
- app_dict - dictionary containing asset data
- """
-
+ app_dict - dictionary containing asset data
+ """ # noqa: D400, D401
# Check to see if the app type is in the application registry
asset_object = self.asset_registry.get(asset_type)
-
- if asset_object is None :
- err = 'The asset ' +asset_type+' is not found in the asset registry. Supported assets are '+' '.join(self.asset_type_list)
- raise WorkFlowInputError(err)
-
-
+
+ if asset_object is None:
+ err = (
+ 'The asset '
+ + asset_type
+ + ' is not found in the asset registry. Supported assets are '
+ + ' '.join(self.asset_type_list)
+ )
+ raise WorkFlowInputError(err) # noqa: DOC501
+
# Add the incoming asset to the workflow assets
self.workflow_assets[asset_type] = asset_dict
-
- log_msg(f'Found asset: {asset_type} ',prepend_timestamp=False)
-
-
- def _parse_inputs(self):
-
- """
- Load the information about the workflow to run
- """
+ log_msg(f'Found asset: {asset_type} ', prepend_timestamp=False)
+ def _parse_inputs(self): # noqa: C901
+ """Load the information about the workflow to run""" # noqa: D400
log_msg('Parsing workflow input file')
# open input file
log_msg('Loading the json file...', prepend_timestamp=False)
- with open(self.input_file, 'r', encoding="utf-8") as f:
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
log_msg(' OK', prepend_timestamp=False)
@@ -939,33 +948,38 @@ def _parse_inputs(self):
if 'units' in input_data:
self.units = input_data['units']
- log_msg('The following units were specified: ',
- prepend_timestamp=False)
+ log_msg('The following units were specified: ', prepend_timestamp=False)
for key, unit in self.units.items():
- log_msg(' {}: {}'.format(key, unit), prepend_timestamp=False)
+ log_msg(f' {key}: {unit}', prepend_timestamp=False)
else:
self.units = None
- log_msg('No units specified; using Standard units.',
- prepend_timestamp=False)
+ log_msg(
+ 'No units specified; using Standard units.', prepend_timestamp=False
+ )
# store the specified output types
self.output_types = input_data.get('outputs', None)
if self.output_types is None:
default_output_types = {
- "AIM": False,
- "EDP": True,
- "DM": True,
- "DV": True,
- "every_realization": False
+ 'AIM': False,
+ 'EDP': True,
+ 'DM': True,
+ 'DV': True,
+ 'every_realization': False,
}
- log_msg("Missing output type specification, using default "
- "settings.", prepend_timestamp=False)
+ log_msg(
+ 'Missing output type specification, using default ' 'settings.',
+ prepend_timestamp=False,
+ )
self.output_types = default_output_types
else:
- log_msg("The following output_types were requested: ", prepend_timestamp=False)
+ log_msg(
+ 'The following output_types were requested: ',
+ prepend_timestamp=False,
+ )
for out_type, flag in self.output_types.items():
if flag:
log_msg(f' {out_type}', prepend_timestamp=False)
@@ -975,28 +989,31 @@ def _parse_inputs(self):
if default_values is None:
default_values = {}
-
+
# workflow input is input file
- default_values['workflowInput']=os.path.basename(self.input_file)
+ default_values['workflowInput'] = os.path.basename(self.input_file) # noqa: PTH119
if default_values is not None:
-
- log_msg("The following workflow defaults were overwritten:", prepend_timestamp=False)
+ log_msg(
+ 'The following workflow defaults were overwritten:',
+ prepend_timestamp=False,
+ )
for key, value in default_values.items():
-
- if key in self.default_values.keys():
+ if key in self.default_values.keys(): # noqa: SIM118
self.default_values[key] = value
else:
self.default_values.update({key: value})
- log_msg(f" {key}: {value}", prepend_timestamp=False)
+ log_msg(f' {key}: {value}', prepend_timestamp=False)
# parse the shared data in the input file
self.shared_data = {}
- for shared_key in ['RegionalEvent',]:
+ for shared_key in [
+ 'RegionalEvent',
+ ]:
value = input_data.get(shared_key, None)
- if value != None:
+ if value != None: # noqa: E711
self.shared_data.update({shared_key: value})
# parse the location of the run_dir
@@ -1004,316 +1021,387 @@ def _parse_inputs(self):
self.run_dir = self.working_dir
elif 'runDir' in input_data:
self.run_dir = Path(input_data['runDir'])
- #else:
+ # else:
# raise WorkFlowInputError('Need a runDir entry in the input file')
# parse the location(s) of the applications directory
if 'localAppDir' in input_data:
self.app_dir_local = input_data['localAppDir']
- #else:
+ # else:
# raise WorkFlowInputError('Need a localAppDir entry in the input file')
if 'remoteAppDir' in input_data:
self.app_dir_remote = Path(input_data['remoteAppDir'])
else:
self.app_dir_remote = self.app_dir_local
- log_msg('remoteAppDir not specified. Using the value provided for '
- 'localAppDir instead.', prepend_timestamp=False)
+ log_msg(
+ 'remoteAppDir not specified. Using the value provided for '
+ 'localAppDir instead.',
+ prepend_timestamp=False,
+ )
- if self.app_dir_local == "" and self.app_dir_remote != "":
+ if self.app_dir_local == '' and self.app_dir_remote != '': # noqa: PLC1901
self.app_dir_local = self.app_dir_remote
- if self.app_dir_remote == "" and self.app_dir_local != "":
+ if self.app_dir_remote == '' and self.app_dir_local != '': # noqa: PLC1901
self.app_dir_remote = self.app_dir_local
-
if 'referenceDir' in input_data:
self.reference_dir = input_data['referenceDir']
for loc_name, loc_val in zip(
- ['Run dir', 'Local applications dir','Remote applications dir',
- 'Reference dir'],
- [self.run_dir, self.app_dir_local, self.app_dir_remote,
- self.reference_dir]):
- log_msg('{} : {}'.format(loc_name, loc_val),
- prepend_timestamp=False)
+ [
+ 'Run dir',
+ 'Local applications dir',
+ 'Remote applications dir',
+ 'Reference dir',
+ ],
+ [
+ self.run_dir,
+ self.app_dir_local,
+ self.app_dir_remote,
+ self.reference_dir,
+ ],
+ ):
+ log_msg(f'{loc_name} : {loc_val}', prepend_timestamp=False)
# get the list of requested applications
- log_msg('\nParsing the requested list of applications...', prepend_timestamp=False)
-
+ log_msg(
+ '\nParsing the requested list of applications...',
+ prepend_timestamp=False,
+ )
+
if 'Applications' in input_data:
requested_apps = input_data['Applications']
else:
- raise WorkFlowInputError('Need an Applications entry in the input file')
+ raise WorkFlowInputError('Need an Applications entry in the input file') # noqa: DOC501, EM101, TRY003
# create the requested applications
# Events are special because they are in an array
if 'Events' in requested_apps:
if len(requested_apps['Events']) > 1:
- raise WorkFlowInputError('Currently, WHALE only supports a single event.')
- for event in requested_apps['Events'][:1]: #this limitation can be relaxed in the future
+ raise WorkFlowInputError( # noqa: DOC501, TRY003
+ 'Currently, WHALE only supports a single event.' # noqa: EM101
+ )
+ for event in requested_apps['Events'][
+ :1
+ ]: # this limitation can be relaxed in the future
if 'EventClassification' in event:
- eventClassification = event['EventClassification']
- if eventClassification in ['Earthquake', 'Wind', 'Hurricane', 'Flood', 'Hydro', 'Tsunami', 'Surge', 'Lahar'] :
-
- app_object = deepcopy(self.app_registry['Event'].get(event['Application']))
+ eventClassification = event['EventClassification'] # noqa: N806
+ if eventClassification in [ # noqa: PLR6201
+ 'Earthquake',
+ 'Wind',
+ 'Hurricane',
+ 'Flood',
+ 'Hydro',
+ 'Tsunami',
+ 'Surge',
+ 'Lahar',
+ ]:
+ app_object = deepcopy(
+ self.app_registry['Event'].get(event['Application'])
+ )
if app_object is None:
- raise WorkFlowInputError('Application entry missing for {}'.format('Events'))
+ raise WorkFlowInputError( # noqa: DOC501
+ 'Application entry missing for {}'.format('Events') # noqa: EM103
+ )
- app_object.set_pref(event['ApplicationData'],self.reference_dir)
+ app_object.set_pref(
+ event['ApplicationData'], self.reference_dir
+ )
self.workflow_apps['Event'] = app_object
else:
- raise WorkFlowInputError(
- ('Currently, only earthquake and wind events are supported. '
- 'EventClassification must be Earthquake, not {}'
- ).format(eventClassification))
+ raise WorkFlowInputError( # noqa: DOC501, TRY003
+ 'Currently, only earthquake and wind events are supported. ' # noqa: EM102
+ f'EventClassification must be Earthquake, not {eventClassification}'
+ )
else:
- raise WorkFlowInputError('Need Event Classification')
-
+ raise WorkFlowInputError('Need Event Classification') # noqa: DOC501, EM101, TRY003
+
# Figure out what types of assets are coming into the analysis
- assetObjs = requested_apps.get('Assets', None)
-
+ assetObjs = requested_apps.get('Assets', None) # noqa: N806
+
# Check if an asset object exists
- if assetObjs != None :
- #raise WorkFlowInputError('Need to define the assets for analysis')
-
+ if assetObjs != None: # noqa: E711
+ # raise WorkFlowInputError('Need to define the assets for analysis')
+
# Check if asset list is not empty
- if len(assetObjs) == 0 :
- raise WorkFlowInputError('The provided asset object is empty')
-
+ if len(assetObjs) == 0:
+ raise WorkFlowInputError('The provided asset object is empty') # noqa: DOC501, EM101, TRY003
+
# Iterate through the asset objects
- for assetObj in assetObjs :
+ for assetObj in assetObjs: # noqa: N806
self._register_asset(assetObj, assetObjs[assetObj])
-
-
+
# Iterate through the app type list which is set when you instantiate the workflow
for app_type in self.app_type_list:
-
# If the app_type is not an event
if app_type == 'Event':
continue
-
- # Check to make sure the required app type is in the list of requested apps
- # i.e., the apps in provided in the input.json file
- if app_type in requested_apps:
+ # Check to make sure the required app type is in the list of requested apps
+ # i.e., the apps in provided in the input.json file
+ if app_type in requested_apps:
self._register_app_type(app_type, requested_apps[app_type])
-
for app_type in self.optional_apps:
-
- if (app_type not in self.app_registry) and (app_type in self.app_type_list):
+ if (app_type not in self.app_registry) and (
+ app_type in self.app_type_list
+ ):
self.app_type_list.remove(app_type)
-
-
- def recursiveLog(app_type, app_object) :
-
- if type(app_object) is dict :
- for sub_app_type, sub_object in app_object.items() :
- log_msg(' {} : '.format(app_type), prepend_timestamp=False)
- recursiveLog(sub_app_type,sub_object)
- else :
- log_msg(' {} : {}'.format(app_type, app_object.name), prepend_timestamp=False)
+
+ def recursiveLog(app_type, app_object): # noqa: N802
+ if type(app_object) is dict:
+ for sub_app_type, sub_object in app_object.items():
+ log_msg(f' {app_type} : ', prepend_timestamp=False)
+ recursiveLog(sub_app_type, sub_object)
+ else:
+ log_msg(
+ f' {app_type} : {app_object.name}',
+ prepend_timestamp=False,
+ )
log_msg('\nRequested workflow:', prepend_timestamp=False)
-
+
for app_type, app_object in self.workflow_apps.items():
recursiveLog(app_type, app_object)
log_msg('\nSuccessfully parsed workflow inputs', prepend_timestamp=False)
log_div()
-
def create_asset_files(self):
- """
- Short description
+ """Short description
Longer description
Parameters
----------
- """
-
+ """ # noqa: D400, D414
log_msg('Creating files for individual assets')
-
+
# Open the input file - we'll need it later
- with open(self.input_file, 'r', encoding="utf-8") as f:
- input_data = json.load(f)
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
+ input_data = json.load(f) # noqa: F841
# Get the workflow assets
- assetsWfapps = self.workflow_apps.get('Assets', None)
- assetWfList = self.workflow_assets.keys()
-
- # TODO: not elegant code, fix later
+ assetsWfapps = self.workflow_apps.get('Assets', None) # noqa: N806
+ assetWfList = self.workflow_assets.keys() # noqa: N806, F841
+
+ # TODO: not elegant code, fix later # noqa: TD002
os.chdir(self.run_dir)
-
- assetFilesList = {}
- #Iterate through the asset workflow apps
- for asset_type, asset_app in assetsWfapps.items() :
-
+ assetFilesList = {} # noqa: N806
+
+ # Iterate through the asset workflow apps
+ for asset_type, asset_app in assetsWfapps.items():
asset_folder = posixpath.join(self.run_dir, asset_type)
-
+
# Make a new directory for each asset
- os.mkdir(asset_folder)
-
+ os.mkdir(asset_folder) # noqa: PTH102
+
asset_file = posixpath.join(asset_folder, asset_type) + '.json'
-
- assetPrefs = asset_app.pref
-
+
+ assetPrefs = asset_app.pref # noqa: N806, F841
+
# filter assets (if needed)
asset_filter = asset_app.pref.get('filter', None)
- if asset_filter == "":
+ if asset_filter == '': # noqa: PLC1901
del asset_app.pref['filter']
asset_filter = None
if asset_filter is not None:
atag = [bs.split('-') for bs in asset_filter.split(',')]
- asset_file = Path(str(asset_file).replace(".json", f"{atag[0][0]}-{atag[-1][-1]}.json"))
-
+ asset_file = Path(
+ str(asset_file).replace(
+ '.json', f'{atag[0][0]}-{atag[-1][-1]}.json'
+ )
+ )
# store the path to the asset file
-
+
assetFilesList[asset_type] = str(asset_file)
-
+
for output in asset_app.outputs:
if output['id'] == 'assetFile':
output['default'] = asset_file
- asset_command_list = asset_app.get_command_list(app_path = self.app_dir_local)
+ asset_command_list = asset_app.get_command_list(
+ app_path=self.app_dir_local
+ )
# The GEOJSON_TO_ASSET application is special because it can be used
# for multiple asset types. "asset_type" needs to be added so the app
- # knows which asset_type it's processing.
- if asset_app.name == 'GEOJSON_TO_ASSET' or asset_app.name == 'INP_FILE':
- asset_command_list = asset_command_list + [u'--assetType',\
- asset_type, u'--inputJsonFile', self.input_file]
+ # knows which asset_type it's processing.
+ if asset_app.name == 'GEOJSON_TO_ASSET' or asset_app.name == 'INP_FILE': # noqa: PLR1714
+ asset_command_list = asset_command_list + [ # noqa: PLR6104, RUF005
+ '--assetType',
+ asset_type,
+ '--inputJsonFile',
+ self.input_file,
+ ]
- asset_command_list.append(u'--getRV')
+ asset_command_list.append('--getRV')
# Create the asset command list
command = create_command(asset_command_list)
-
- if (self.parType == 'parSETUP'):
-
- log_msg('\nWriting Asset Info command for asset type: '+asset_type+ ' to script', prepend_timestamp=False)
- self.parCommandFile.write("\n# Perform Asset File Creation for type: " + asset_type + " \n")
- if asset_app.runsParallel == False:
- self.parCommandFile.write(command + "\n")
+ if self.parType == 'parSETUP':
+ log_msg(
+ '\nWriting Asset Info command for asset type: '
+ + asset_type
+ + ' to script',
+ prepend_timestamp=False,
+ )
+ self.parCommandFile.write(
+ '\n# Perform Asset File Creation for type: ' + asset_type + ' \n'
+ )
+
+ if asset_app.runsParallel == False: # noqa: E712
+ self.parCommandFile.write(command + '\n')
else:
- self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command + "\n")
+ self.parCommandFile.write(
+ self.mpiExec
+ + ' -n '
+ + str(self.numProc)
+ + ' '
+ + command
+ + '\n'
+ )
else:
+ log_msg(
+ '\nCreating initial asset information model (AIM) files for '
+ + asset_type,
+ prepend_timestamp=False,
+ )
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- log_msg('\nCreating initial asset information model (AIM) files for '+asset_type, prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
-
result, returncode = run_command(command)
-
+
# Check if the command was completed successfully
- if returncode != 0 :
- print(result)
- raise WorkFlowInputError('Failed to create the AIM file for '+asset_type)
- else :
- log_msg('AIM files created for '+asset_type+'\n', prepend_timestamp=False)
-
-
- log_msg('Output: '+str(returncode), prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\nAsset Information Model (AIM) files successfully created.', prepend_timestamp=False)
-
- log_div()
-
- return assetFilesList
+ if returncode != 0:
+ print(result) # noqa: T201
+ raise WorkFlowInputError( # noqa: DOC501
+ 'Failed to create the AIM file for ' + asset_type
+ )
+ else: # noqa: RET506
+ log_msg(
+ 'AIM files created for ' + asset_type + '\n',
+ prepend_timestamp=False,
+ )
+
+ log_msg(
+ 'Output: ' + str(returncode),
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ '\nAsset Information Model (AIM) files successfully created.',
+ prepend_timestamp=False,
+ )
+ log_div()
- def augment_asset_files(self):
+ return assetFilesList
- """
- Short description
+ def augment_asset_files(self): # noqa: C901
+ """Short description
Longer description
Parameters
----------
- """
-
+ """ # noqa: D400, D414
log_msg('Augmenting files for individual assets for Workflow')
# print('INPUT FILE:', self.input_file)
# Open the input file - we'll need it later
- with open(self.input_file, 'r', encoding="utf-8") as f:
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
# Get the workflow assets
- assetsWfapps = self.workflow_apps.get('Assets', None)
- assetWfList = self.workflow_assets.keys()
-
- # TODO: not elegant code, fix later
+ assetsWfapps = self.workflow_apps.get('Assets', None) # noqa: N806
+ assetWfList = self.workflow_assets.keys() # noqa: N806, F841
+
+ # TODO: not elegant code, fix later # noqa: TD002
os.chdir(self.run_dir)
-
- assetFilesList = {}
- #Iterate through the asset workflow apps
- for asset_type, asset_app in assetsWfapps.items() :
-
+ assetFilesList = {} # noqa: N806
+
+ # Iterate through the asset workflow apps
+ for asset_type, asset_app in assetsWfapps.items(): # noqa: PLR1702
asset_folder = posixpath.join(self.run_dir, asset_type)
-
+
asset_file = posixpath.join(asset_folder, asset_type) + '.json'
-
- assetPrefs = asset_app.pref
-
+
+ assetPrefs = asset_app.pref # noqa: N806, F841
+
# filter assets (if needed)
asset_filter = asset_app.pref.get('filter', None)
- if asset_filter == "":
+ if asset_filter == '': # noqa: PLC1901
del asset_app.pref['filter']
asset_filter = None
if asset_filter is not None:
atag = [bs.split('-') for bs in asset_filter.split(',')]
- asset_file = Path(str(asset_file).replace(".json", f"{atag[0][0]}-{atag[-1][-1]}.json"))
-
+ asset_file = Path(
+ str(asset_file).replace(
+ '.json', f'{atag[0][0]}-{atag[-1][-1]}.json'
+ )
+ )
# store the path to the asset file
assetFilesList[asset_type] = str(asset_file)
-
+
for output in asset_app.outputs:
if output['id'] == 'assetFile':
output['default'] = asset_file
# Check if the command was completed successfully
# FMK check AIM file exists
-
+
# Append workflow settings to the BIM file
log_msg('Appending additional settings to the AIM files...\n')
-
- with open(asset_file, 'r', encoding="utf-8") as f:
+
+ with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
# extract the extra information from the input file for this asset type
- extra_input = {
- 'Applications': {}
- }
-
- if self.parType == "parRUN":
- extra_input['parType'] = self.parType;
- extra_input['mpiExec'] = self.mpiExec;
- extra_input['numProc'] = self.numProc;
-
- apps_of_interest = ['Events', 'Modeling', 'EDP', 'Simulation', 'UQ', 'DL']
+ extra_input = {'Applications': {}}
+
+ if self.parType == 'parRUN':
+ extra_input['parType'] = self.parType
+ extra_input['mpiExec'] = self.mpiExec
+ extra_input['numProc'] = self.numProc
+
+ apps_of_interest = [
+ 'Events',
+ 'Modeling',
+ 'EDP',
+ 'Simulation',
+ 'UQ',
+ 'DL',
+ ]
for app_type in apps_of_interest:
-
# Start with the app data under Applications
- if app_type in input_data['Applications'].keys():
+ if app_type in input_data['Applications'].keys(): # noqa: SIM118
if app_type == 'Events':
# Events are stored in an array, so they require special treatment
app_data_array = input_data['Applications'][app_type]
@@ -1321,7 +1409,6 @@ def augment_asset_files(self):
extra_input['Applications'][app_type] = []
for app_data in app_data_array:
-
if 'Application' in app_data:
app_info = app_data
elif asset_type in app_data:
@@ -1341,7 +1428,7 @@ def augment_asset_files(self):
extra_input['Applications'][app_type] = app_info
# Then, look at the app data in the root of the input json
- if app_type in input_data.keys():
+ if app_type in input_data.keys(): # noqa: SIM118
if app_type == 'Events':
# Events are stored in an array, so they require special treatment
app_data_array = input_data[app_type]
@@ -1349,7 +1436,6 @@ def augment_asset_files(self):
extra_input[app_type] = []
for app_data in app_data_array:
-
if asset_type in app_data:
extra_input[app_type].append(app_data[asset_type])
@@ -1362,64 +1448,66 @@ def augment_asset_files(self):
count = 0
for asst in asset_data:
-
if count % self.numP == self.procID:
-
- AIM_file = asst['file']
+ AIM_file = asst['file'] # noqa: N806
# Open the AIM file and add the unit information to it
# print(count, self.numP, self.procID, AIM_file)
-
- with open(AIM_file, 'r', encoding="utf-8") as f:
- AIM_data = json.load(f)
- if 'DefaultValues' in input_data.keys():
- AIM_data.update({'DefaultValues':input_data['DefaultValues']})
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
+ AIM_data = json.load(f) # noqa: N806
+
+ if 'DefaultValues' in input_data.keys(): # noqa: SIM118
+ AIM_data.update(
+ {'DefaultValues': input_data['DefaultValues']}
+ )
- if 'commonFileDir' in input_data.keys():
- commonFileDir=input_data['commonFileDir']
+ if 'commonFileDir' in input_data.keys(): # noqa: SIM118
+ commonFileDir = input_data['commonFileDir'] # noqa: N806
if self.inputFilePath not in commonFileDir:
- commonFileDir = os.path.join(self.inputFilePath,input_data['commonFileDir'])
- AIM_data.update({'commonFileDir':commonFileDir})
-
-
- if 'remoteAppDir' in input_data.keys():
- AIM_data.update({'remoteAppDir':input_data['remoteAppDir']})
-
- if 'localAppDir' in input_data.keys():
- AIM_data.update({'localAppDir':input_data['localAppDir']})
-
- if self.units != None:
+ commonFileDir = os.path.join( # noqa: PTH118, N806
+ self.inputFilePath, input_data['commonFileDir']
+ )
+ AIM_data.update({'commonFileDir': commonFileDir})
+
+ if 'remoteAppDir' in input_data.keys(): # noqa: SIM118
+ AIM_data.update({'remoteAppDir': input_data['remoteAppDir']})
+
+ if 'localAppDir' in input_data.keys(): # noqa: SIM118
+ AIM_data.update({'localAppDir': input_data['localAppDir']})
+
+ if self.units != None: # noqa: E711
AIM_data.update({'units': self.units})
-
- # TODO: remove this after all apps have been updated to use the
+
+ # TODO: remove this after all apps have been updated to use the # noqa: TD002
# above location to get units
AIM_data['GeneralInformation'].update({'units': self.units})
-
+
AIM_data.update({'outputs': self.output_types})
-
+
for key, value in self.shared_data.items():
AIM_data[key] = value
-
+
# Save the asset type
AIM_data['assetType'] = asset_type
AIM_data.update(extra_input)
-
- with open(AIM_file, 'w', encoding="utf-8") as f:
+
+ with open(AIM_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_data, f, indent=2)
-
- count = count + 1
-
- log_msg('\nAsset Information Model (AIM) files successfully augmented.', prepend_timestamp=False)
+
+ count = count + 1 # noqa: PLR6104
+
+ log_msg(
+ '\nAsset Information Model (AIM) files successfully augmented.',
+ prepend_timestamp=False,
+ )
log_div()
-
- return assetFilesList
- def perform_system_performance_assessment(self, asset_type):
+ return assetFilesList
- """
- For an asset type run the system level performance assesment application
+ def perform_system_performance_assessment(self, asset_type):
+ """For an asset type run the system level performance assessment application
Longer description
@@ -1428,59 +1516,82 @@ def perform_system_performance_assessment(self, asset_type):
asset_type: string
Asset type to run perform system assessment of
- """
-
- if 'SystemPerformance' in self.workflow_apps.keys():
+ """ # noqa: D400
+ if 'SystemPerformance' in self.workflow_apps.keys(): # noqa: SIM118
performance_app = self.workflow_apps['SystemPerformance'][asset_type]
else:
- log_msg(f'No Performance application to run for asset type: {asset_type}.', prepend_timestamp=False)
+ log_msg(
+ f'No Performance application to run for asset type: {asset_type}.',
+ prepend_timestamp=False,
+ )
log_div()
return False
- if performance_app.rel_path == None:
- log_msg(f'No Performance application to run for asset type: {asset_type}.', prepend_timestamp=False)
- log_div()
+ if performance_app.rel_path == None: # noqa: E711
+ log_msg(
+ f'No Performance application to run for asset type: {asset_type}.',
+ prepend_timestamp=False,
+ )
+ log_div()
return False
- log_msg('Performing System Performance Application for asset type: ' + asset_type, prepend_timestamp=False)
+ log_msg(
+ 'Performing System Performance Application for asset type: '
+ + asset_type,
+ prepend_timestamp=False,
+ )
log_div()
- app_command_list = performance_app.get_command_list(app_path = self.app_dir_local)
+ app_command_list = performance_app.get_command_list(
+ app_path=self.app_dir_local
+ )
#
# defaults added to a system performance app are asset_type, input_dir and running_parallel (default False)
#
-
- #app_command_list.append('--asset_type')
- #app_command_list.append(asset_type)
+
+ # app_command_list.append('--asset_type')
+ # app_command_list.append(asset_type)
app_command_list.append('--input')
- app_command_list.append(self.input_file)
- #app_command_list.append('--working_dir')
- #app_command_list.append(self.working_dir)
-
-
+ app_command_list.append(self.input_file)
+ # app_command_list.append('--working_dir')
+ # app_command_list.append(self.working_dir)
+
# Sina added this part for parallel run in REWET
- if (self.parType == 'parSETUP'):
- log_msg('\nParallel settings for System Performance for asset type:' + asset_type, prepend_timestamp=False)
+ if self.parType == 'parSETUP':
+ log_msg(
+ '\nParallel settings for System Performance for asset type:'
+ + asset_type,
+ prepend_timestamp=False,
+ )
app_command_list.append('--par')
-
+
command = create_command(app_command_list)
log_msg('Output: ', prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
-
- result, returncode = run_command(command)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
-
- log_msg('System Performance Application Completed for asset type: ' + asset_type, prepend_timestamp=False)
-
-
- # end of Sina's odifications for parallel run
-
-
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ result, returncode = run_command(command) # noqa: F841
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ log_msg(
+ 'System Performance Application Completed for asset type: ' + asset_type,
+ prepend_timestamp=False,
+ )
+
+ # end of Sina's odifications for parallel run
+
# if (self.parType == 'parSETUP'):
- # log_msg('\nWriting System Performance application for asset type:' + asset_type, prepend_timestamp=False)
+ # log_msg('\nWriting System Performance application for asset type:' + asset_type, prepend_timestamp=False)
# self.parCommandFile.write("\n# Writing System Performance application for asset type:" + asset_type +"\n")
# if performance_app.runsParallel == False:
@@ -1489,274 +1600,322 @@ def perform_system_performance_assessment(self, asset_type):
# self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command + " --running_parallel True\n")
# else:
-
+
# log_msg('\n{}\n'.format(command), prepend_timestamp=False,
# prepend_blank_space=False)
- # result, returncode = run_command(command)
+ # result, returncode = run_command(command)
# log_msg('Output: ', prepend_timestamp=False, prepend_blank_space=False)
# log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
# log_msg('System Performance Application Completed for asset type: ' + asset_type, prepend_timestamp=False)
-
+
log_div()
return True
-
def perform_regional_event(self):
-
- """
- Run an application to simulate a regional-scale hazard event.
+ """Run an application to simulate a regional-scale hazard event.
Longer description
Parameters
----------
- """
-
+ """ # noqa: D414
log_msg('Simulating regional event...')
- if 'RegionalEvent' in self.workflow_apps.keys():
+ if 'RegionalEvent' in self.workflow_apps.keys(): # noqa: SIM118
reg_event_app = self.workflow_apps['RegionalEvent']
else:
log_msg('No Regional Event Application to run.', prepend_timestamp=False)
log_div()
- return;
+ return
- if reg_event_app.rel_path == None:
+ if reg_event_app.rel_path == None: # noqa: E711
log_msg('No regional Event Application to run.', prepend_timestamp=False)
- log_div()
- return;
+ log_div()
+ return
- reg_event_command_list = reg_event_app.get_command_list(app_path = self.app_dir_local)
+ reg_event_command_list = reg_event_app.get_command_list(
+ app_path=self.app_dir_local
+ )
command = create_command(reg_event_command_list)
- if (self.parType == 'parSETUP'):
-
- log_msg('\nWriting Regional Event Command to script', prepend_timestamp=False)
- self.parCommandFile.write("\n# Perform Regional Event Simulation\n")
+ if self.parType == 'parSETUP':
+ log_msg(
+ '\nWriting Regional Event Command to script', prepend_timestamp=False
+ )
+ self.parCommandFile.write('\n# Perform Regional Event Simulation\n')
- if reg_event_app.runsParallel == False:
- self.parCommandFile.write(command + "\n")
+ if reg_event_app.runsParallel == False: # noqa: E712
+ self.parCommandFile.write(command + '\n')
else:
- self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command + "\n")
+ self.parCommandFile.write(
+ self.mpiExec + ' -n ' + str(self.numProc) + ' ' + command + '\n'
+ )
else:
-
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- result, returncode = run_command(command)
+ result, returncode = run_command(command) # noqa: F841
log_msg('Output: ', prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- log_msg('Regional event successfully simulated.', prepend_timestamp=False)
+ log_msg(
+ 'Regional event successfully simulated.', prepend_timestamp=False
+ )
log_div()
- def perform_regional_recovery(self, asset_keys):
- """
- Run an application to simulate regional recovery
+ def perform_regional_recovery(self, asset_keys): # noqa: ARG002
+ """Run an application to simulate regional recovery
Longer description
Parameters
----------
- """
-
+ """ # noqa: D400, D414
log_msg('Simulating Regional Recovery ...')
- if 'Recovery' in self.workflow_apps.keys():
+ if 'Recovery' in self.workflow_apps.keys(): # noqa: SIM118
reg_recovery_app = self.workflow_apps['Recovery']
else:
log_msg('No Recovery Application to run.', prepend_timestamp=False)
log_div()
- return;
+ return
- if reg_recovery_app.rel_path == None:
+ if reg_recovery_app.rel_path == None: # noqa: E711
log_msg('No regional Event Application to run.', prepend_timestamp=False)
- log_div()
- return;
+ log_div()
+ return
- reg_recovery_command_list = reg_recovery_app.get_command_list(app_path = self.app_dir_local)
+ reg_recovery_command_list = reg_recovery_app.get_command_list(
+ app_path=self.app_dir_local
+ )
command = create_command(reg_recovery_command_list)
- if (self.parType == 'parSETUP'):
-
- log_msg('\nWriting Regional Event Command to script', prepend_timestamp=False)
- self.parCommandFile.write("\n# Perform Regional Recovery Simulation\n")
+ if self.parType == 'parSETUP':
+ log_msg(
+ '\nWriting Regional Event Command to script', prepend_timestamp=False
+ )
+ self.parCommandFile.write('\n# Perform Regional Recovery Simulation\n')
- if reg_recovery_app.runsParallel == False:
- self.parCommandFile.write(command + "\n")
+ if reg_recovery_app.runsParallel == False: # noqa: E712
+ self.parCommandFile.write(command + '\n')
else:
- self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command + "\n")
+ self.parCommandFile.write(
+ self.mpiExec + ' -n ' + str(self.numProc) + ' ' + command + '\n'
+ )
else:
-
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- result, returncode = run_command(command)
+ result, returncode = run_command(command) # noqa: F841
log_msg('Output: ', prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- log_msg('Regional Recovery Successfully Simulated.', prepend_timestamp=False)
+ log_msg(
+ 'Regional Recovery Successfully Simulated.', prepend_timestamp=False
+ )
log_div()
-
- def perform_regional_mapping(self, AIM_file_path, assetType, doParallel=True):
-
- """
- Performs the regional mapping between the asset and a hazard event.
-
+ def perform_regional_mapping(self, AIM_file_path, assetType, doParallel=True): # noqa: FBT002, N803
+ """Performs the regional mapping between the asset and a hazard event.
Parameters
----------
- """
-
+ """ # noqa: D401, D414
log_msg('', prepend_timestamp=False, prepend_blank_space=False)
log_msg('Creating regional mapping...')
reg_mapping_app = self.workflow_apps['RegionalMapping'][assetType]
- # TODO: not elegant code, fix later
+ # TODO: not elegant code, fix later # noqa: TD002
for input_ in reg_mapping_app.inputs:
if input_['id'] == 'assetFile':
input_['default'] = str(AIM_file_path)
- reg_mapping_app.inputs.append({
- 'id': 'filenameEVENTgrid',
- 'type': 'path',
- 'default': resolve_path(
- self.shared_data['RegionalEvent']['eventFile'],
- self.reference_dir)
- })
+ reg_mapping_app.inputs.append(
+ {
+ 'id': 'filenameEVENTgrid',
+ 'type': 'path',
+ 'default': resolve_path(
+ self.shared_data['RegionalEvent']['eventFile'],
+ self.reference_dir,
+ ),
+ }
+ )
reg_mapping_command_list = reg_mapping_app.get_command_list(
- app_path = self.app_dir_local)
+ app_path=self.app_dir_local
+ )
command = create_command(reg_mapping_command_list)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- if (self.parType == 'parSETUP'):
+ if self.parType == 'parSETUP':
+ self.parCommandFile.write(
+ '\n# Regional Mapping for asset type: ' + assetType + ' \n'
+ )
- self.parCommandFile.write("\n# Regional Mapping for asset type: "
- + assetType + " \n")
-
- if reg_mapping_app.runsParallel == False:
- self.parCommandFile.write(command + "\n")
+ if reg_mapping_app.runsParallel == False: # noqa: E712
+ self.parCommandFile.write(command + '\n')
else:
- self.parCommandFile.write(self.mpiExec + " -n " + str(self.numProc) + " " + command + " --doParallel " + str(doParallel) + " -m " + self.mpiExec + " --numP " + str(self.numProc) + "\n")
+ self.parCommandFile.write(
+ self.mpiExec
+ + ' -n '
+ + str(self.numProc)
+ + ' '
+ + command
+ + ' --doParallel '
+ + str(doParallel)
+ + ' -m '
+ + self.mpiExec
+ + ' --numP '
+ + str(self.numProc)
+ + '\n'
+ )
+
+ log_msg(
+ 'Regional mapping command added to parallel script.',
+ prepend_timestamp=False,
+ )
- log_msg('Regional mapping command added to parallel script.', prepend_timestamp=False)
-
else:
-
result, returncode = run_command(command)
- log_msg('Output: ' + str(returncode), prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ 'Output: ' + str(returncode),
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ log_msg(
+ 'Regional mapping successfully created.', prepend_timestamp=False
+ )
- log_msg('Regional mapping successfully created.', prepend_timestamp=False)
-
log_div()
+ def init_simdir(self, asst_id=None, AIM_file_path='AIM.json'): # noqa: C901, N803
+ """Initializes the simulation directory for each asset.
- def init_simdir(self, asst_id=None, AIM_file_path = 'AIM.json'):
- """
- Initializes the simulation directory for each asset.
-
In the current directory where the Asset Information Model (AIM) file resides, e.g., ./Buildings/2000-AIM.json, a new directory is created with the asset id, e.g., ./Buildings/2000, and within that directory a template directory is created (templatedir) ./Buildings/2000/templatedir. The AIM file is copied over to the template dir. It is within this template dir that the analysis is run for the individual asset.
Parameters
----------
-
asst_id - the asset id
AIM_file - file path to the existing AIM file
- """
+
+ """ # noqa: D401
log_msg('Initializing the simulation directory\n')
- aimDir = os.path.dirname(AIM_file_path)
- aimFileName = os.path.basename(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFileName = os.path.basename(AIM_file_path) # noqa: PTH119, N806
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
- aimFileName = AIM_file_path
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
+ aimFileName = AIM_file_path # noqa: N806
os.chdir(aimDir)
if asst_id is not None:
-
# if the directory already exists, remove its contents
if asst_id in os.listdir(aimDir):
shutil.rmtree(asst_id, ignore_errors=True)
# create the asset_id dir and the template dir
- os.mkdir(asst_id)
+ os.mkdir(asst_id) # noqa: PTH102
os.chdir(asst_id)
- os.mkdir('templatedir')
+ os.mkdir('templatedir') # noqa: PTH102
os.chdir('templatedir')
# Make a copy of the AIM file
- src = posixpath.join(aimDir,aimFileName)
+ src = posixpath.join(aimDir, aimFileName)
dst = posixpath.join(aimDir, f'{asst_id}/templatedir/{aimFileName}')
# dst = posixpath.join(aimDir, f'{asst_id}/templatedir/AIM.json')
-
+
try:
- shutil.copy(src,dst)
-
- print("Copied AIM file to: ",dst)
+ shutil.copy(src, dst)
+
+ print('Copied AIM file to: ', dst) # noqa: T201
# os.remove(src)
-
- except:
- print("Error occurred while copying file: ",dst)
-
- else:
- for dir_or_file in os.listdir(os.getcwd()):
- if dir_or_file not in ['log.txt', 'templatedir', 'input_data']:
- if os.path.isdir(dir_or_file):
+ except: # noqa: E722
+ print('Error occurred while copying file: ', dst) # noqa: T201
+
+ else:
+ for dir_or_file in os.listdir(os.getcwd()): # noqa: PTH109
+ if dir_or_file not in ['log.txt', 'templatedir', 'input_data']: # noqa: PLR6201
+ if os.path.isdir(dir_or_file): # noqa: PTH112
shutil.rmtree(dir_or_file)
else:
- os.remove(dir_or_file)
+ os.remove(dir_or_file) # noqa: PTH107
- os.chdir('templatedir') #TODO: we might want to add a generic id dir to be consistent with the regional workflow here
+ os.chdir(
+ 'templatedir'
+ ) # TODO: we might want to add a generic id dir to be consistent with the regional workflow here # noqa: TD002
# Remove files with .j extensions that might be there from previous runs
- for file in os.listdir(os.getcwd()):
+ for file in os.listdir(os.getcwd()): # noqa: PTH109
if file.endswith('.j'):
- os.remove(file)
+ os.remove(file) # noqa: PTH107
# Make a copy of the input file and rename it to AIM.json
# This is a temporary fix, will be removed eventually.
- dst = Path(os.getcwd()) / AIM_file_path
- #dst = posixpath.join(os.getcwd(),AIM_file)
+ dst = Path(os.getcwd()) / AIM_file_path # noqa: PTH109
+ # dst = posixpath.join(os.getcwd(),AIM_file)
if AIM_file_path != self.input_file:
- shutil.copy(src = self.input_file, dst = dst)
-
- log_msg('Simulation directory successfully initialized.\n',prepend_timestamp=False)
+ shutil.copy(src=self.input_file, dst=dst)
+
+ log_msg(
+ 'Simulation directory successfully initialized.\n',
+ prepend_timestamp=False,
+ )
log_div()
return dst
def cleanup_simdir(self, asst_id):
- """
- Short description
+ """Short description
Longer description
Parameters
----------
- """
+ """ # noqa: D400, D414
log_msg('Cleaning up the simulation directory.')
os.chdir(self.run_dir)
@@ -1764,50 +1923,50 @@ def cleanup_simdir(self, asst_id):
if asst_id is not None:
os.chdir(asst_id)
- workdirs = os.listdir(os.getcwd())
+ workdirs = os.listdir(os.getcwd()) # noqa: PTH109
for workdir in workdirs:
if 'workdir' in workdir:
shutil.rmtree(workdir, ignore_errors=True)
- log_msg('Simulation directory successfully cleaned up.',
- prepend_timestamp=False)
+ log_msg(
+ 'Simulation directory successfully cleaned up.', prepend_timestamp=False
+ )
log_div()
def init_workdir(self):
- """
- Short description
+ """Short description
Longer description
Parameters
----------
- """
+ """ # noqa: D400, D414
log_msg('Initializing the working directory.')
os.chdir(self.run_dir)
- for dir_or_file in os.listdir(os.getcwd()):
+ for dir_or_file in os.listdir(os.getcwd()): # noqa: PTH109
if dir_or_file != 'log.txt':
- if os.path.isdir(dir_or_file):
+ if os.path.isdir(dir_or_file): # noqa: PTH112
shutil.rmtree(dir_or_file)
else:
- os.remove(dir_or_file)
+ os.remove(dir_or_file) # noqa: PTH107
- log_msg('Working directory successfully initialized.',
- prepend_timestamp=False)
+ log_msg(
+ 'Working directory successfully initialized.', prepend_timestamp=False
+ )
log_div()
def cleanup_workdir(self):
- """
- Short description
+ """Short description
Longer description
Parameters
----------
- """
+ """ # noqa: D400, D414
log_msg('Cleaning up the working directory.')
os.chdir(self.run_dir)
@@ -1815,33 +1974,36 @@ def cleanup_workdir(self):
workdir_contents = os.listdir(self.run_dir)
for file_or_dir in workdir_contents:
if (self.run_dir / file_or_dir).is_dir():
- #if os.path.isdir(posixpath.join(self.run_dir, file_or_dir)):
+ # if os.path.isdir(posixpath.join(self.run_dir, file_or_dir)):
shutil.rmtree(file_or_dir, ignore_errors=True)
log_msg('Working directory successfully cleaned up.')
log_div()
-
- def preprocess_inputs(self, app_sequence, AIM_file_path = 'AIM.json', asst_id=None, asset_type = None) :
- """
- Short description
+ def preprocess_inputs( # noqa: C901
+ self,
+ app_sequence,
+ AIM_file_path='AIM.json', # noqa: N803
+ asst_id=None,
+ asset_type=None,
+ ):
+ """Short description
Longer description
Parameters
----------
- """
-
+ """ # noqa: D400, D414
log_msg('Running preprocessing step random variables')
# Get the directory to the asset class dir, e.g., buildings
- aimDir = os.path.dirname(AIM_file_path)
- aimFileName = os.path.basename(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFileName = os.path.basename(AIM_file_path) # noqa: PTH119, N806, F841
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
os.chdir(aimDir)
@@ -1852,85 +2014,116 @@ def preprocess_inputs(self, app_sequence, AIM_file_path = 'AIM.json', asst_id=No
os.chdir('templatedir')
for app_type in self.optional_apps:
- if ((app_type in app_sequence) and
- (app_type not in self.workflow_apps.keys())):
+ if (app_type in app_sequence) and (
+ app_type not in self.workflow_apps.keys() # noqa: SIM118
+ ):
app_sequence.remove(app_type)
- for app_type in app_sequence:
-
+ for app_type in app_sequence: # noqa: PLR1702
workflow_app = self.workflow_apps[app_type]
-
- if (app_type != 'FEM'):
-
+
+ if app_type != 'FEM':
if AIM_file_path is not None:
-
- if type(workflow_app) is dict :
-
- for itemKey, item in workflow_app.items() :
- if asset_type is not None and asset_type != itemKey :
+ if type(workflow_app) is dict:
+ for itemKey, item in workflow_app.items(): # noqa: N806
+ if asset_type is not None and asset_type != itemKey:
continue
-
+
item.defaults['filenameAIM'] = AIM_file_path
-
- command_list = item.get_command_list(app_path = self.app_dir_local)
- command_list.append(u'--getRV')
-
+ command_list = item.get_command_list(
+ app_path=self.app_dir_local
+ )
+
+ command_list.append('--getRV')
+
command = create_command(command_list)
-
- log_msg('\nRunning {} app at preprocessing step...'.format(app_type), prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
-
+
+ log_msg(
+ f'\nRunning {app_type} app at preprocessing step...',
+ prepend_timestamp=False,
+ )
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
result, returncode = run_command(command)
- log_msg('Output: '+str(returncode), prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ 'Output: ' + str(returncode),
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- #if returncode==0:
+ # if returncode==0:
# log_msg('Preprocessing successfully completed.', prepend_timestamp=False)
- #else:
+ # else:
# log_msg('Error in the preprocessor.', prepend_timestamp=False)
# exit(-1)
-
log_div()
-
+
else:
workflow_app.defaults['filenameAIM'] = AIM_file_path
-
- command_list = workflow_app.get_command_list(app_path = self.app_dir_local)
-
- command_list.append(u'--getRV')
-
+
+ command_list = workflow_app.get_command_list(
+ app_path=self.app_dir_local
+ )
+
+ command_list.append('--getRV')
+
command = create_command(command_list)
-
- log_msg('\nRunning {} app at preprocessing step...'.format(app_type), prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
-
+
+ log_msg(
+ f'\nRunning {app_type} app at preprocessing step...',
+ prepend_timestamp=False,
+ )
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
result, returncode = run_command(command)
-
- log_msg('Output: '+str(returncode), prepend_timestamp=False, prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False, prepend_blank_space=False)
-
- #if returncode==0:
+
+ log_msg(
+ 'Output: ' + str(returncode),
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ # if returncode==0:
# log_msg('Preprocessing successfully completed.', prepend_timestamp=False)
- #else:
+ # else:
# log_msg('Error in the preprocessor.', prepend_timestamp=False)
# exit(-1)
log_div()
else:
- old_command_list = workflow_app.get_command_list(app_path = self.app_dir_local)
- old_command_list.append('--appKey')
- old_command_list.append('FEM')
+ old_command_list = workflow_app.get_command_list(
+ app_path=self.app_dir_local
+ )
+ old_command_list.append('--appKey')
+ old_command_list.append('FEM')
if old_command_list[0] == 'python':
-
- if self.run_type in ['set_up', 'runningRemote']:
- old_command_list.append('--runType')
+ if self.run_type in ['set_up', 'runningRemote']: # noqa: PLR6201
+ old_command_list.append('--runType')
old_command_list.append('runningRemote')
old_command_list.append('--osType')
- old_command_list.append('MacOS')
+ old_command_list.append('MacOS')
else:
old_command_list.append('--runType')
old_command_list.append('runningLocal')
@@ -1938,22 +2131,21 @@ def preprocess_inputs(self, app_sequence, AIM_file_path = 'AIM.json', asst_id=No
old_command_list.append('--osType')
old_command_list.append('Windows')
else:
- old_command_list.append('--osType')
+ old_command_list.append('--osType')
old_command_list.append('MacOS')
-
- command = create_command(old_command_list)
+
+ command = create_command(old_command_list)
else:
-
#
# FMK to modify C++ applications to take above
#
-
+
command_list = []
- command_list.append(old_command_list[0])
+ command_list.append(old_command_list[0])
command_list.append(self.input_file)
-
- if self.run_type in ['set_up', 'runningRemote']:
+
+ if self.run_type in ['set_up', 'runningRemote']: # noqa: PLR6201
command_list.append('runningRemote')
command_list.append('MacOS')
else:
@@ -1962,125 +2154,139 @@ def preprocess_inputs(self, app_sequence, AIM_file_path = 'AIM.json', asst_id=No
command_list.append('Windows')
else:
command_list.append('MacOS')
-
+
command_list.append(old_command_list[4])
-
+
command = create_command(command_list)
log_msg('\nRunning FEM app', prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False, prepend_blank_space=False)
-
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
result, returncode = run_command(command)
- log_msg('Output: '+str(returncode), prepend_timestamp=False,
- prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False,
- prepend_blank_space=False)
+ log_msg(
+ 'Output: ' + str(returncode),
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
# sy - trying adding exit command
- #if platform.system() == 'Windows':
+ # if platform.system() == 'Windows':
# with open("driver.bat","r", encoding="utf-8") as f:
# lines = f.readlines()
# #lines.append(r'if %errorlevel% neq 0 exit /b -1')
# with open("driver.bat","w", encoding="utf-8") as f:
# f.writelines(lines)
- #else:
+ # else:
# pass
-
- log_msg('Successfully Created Driver File for Workflow.',
- prepend_timestamp=False)
+
+ log_msg(
+ 'Successfully Created Driver File for Workflow.',
+ prepend_timestamp=False,
+ )
log_div()
- def gather_workflow_inputs(self, asst_id=None, AIM_file_path = 'AIM.json'):
+ def gather_workflow_inputs(self, asst_id=None, AIM_file_path='AIM.json'): # noqa: N803, D102
+ log_msg('Gathering Workflow Inputs.', prepend_timestamp=False)
- log_msg('Gathering Workflow Inputs.',prepend_timestamp=False)
-
-
- if 'UQ' in self.workflow_apps.keys():
-
+ if 'UQ' in self.workflow_apps.keys(): # noqa: SIM118
# Get the directory to the asset class dir, e.g., buildings
- aimDir = os.path.dirname(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
-
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
+
os.chdir(aimDir)
-
+
if asst_id is not None:
os.chdir(asst_id)
-
+
os.chdir('templatedir')
- relPathCreateCommon = 'applications/performUQ/common/createStandardUQ_Input'
+ relPathCreateCommon = ( # noqa: N806
+ 'applications/performUQ/common/createStandardUQ_Input'
+ )
abs_path = Path(self.app_dir_local) / relPathCreateCommon
-
+
arg_list = []
- arg_list.append(u'{}'.format(abs_path.as_posix()))
+ arg_list.append(f'{abs_path.as_posix()}')
# arg_list.append(u'{}'.format(abs_path))
-
- #inputFilePath = os.path.dirname(self.input_file)
- inputFilePath = os.getcwd()
- inputFilename = os.path.basename(self.input_file)
- pathToScFile = posixpath.join(inputFilePath,'sc_'+inputFilename)
-
-
- #arg_list.append(u'{}'.format(self.input_file))
- arg_list.append(u'{}'.format(AIM_file_path))
- arg_list.append(u'{}'.format(pathToScFile))
- arg_list.append(u'{}'.format(self.default_values['driverFile']))
- arg_list.append(u'{}'.format('sc_'+self.default_values['driverFile']))
- arg_list.append(u'{}'.format(self.run_type))
-
+
+ # inputFilePath = os.path.dirname(self.input_file)
+ inputFilePath = os.getcwd() # noqa: PTH109, N806
+ inputFilename = os.path.basename(self.input_file) # noqa: PTH119, N806
+ pathToScFile = posixpath.join(inputFilePath, 'sc_' + inputFilename) # noqa: N806
+
+ # arg_list.append(u'{}'.format(self.input_file))
+ arg_list.append(f'{AIM_file_path}') # noqa: FURB113
+ arg_list.append(f'{pathToScFile}')
+ arg_list.append('{}'.format(self.default_values['driverFile']))
+ arg_list.append('{}'.format('sc_' + self.default_values['driverFile']))
+ arg_list.append(f'{self.run_type}')
+
if any(platform.win32_ver()):
arg_list.append('Windows')
else:
arg_list.append('MacOS')
-
- self.default_values['workflowInput']=pathToScFile
- #self.default_values['driverFile']='sc_'+self.default_values['driverFile']
- self.default_values['modDriverFile']='sc_'+self.default_values['driverFile']
- #self.default_values['driverFile']='driver'
- self.modifiedRun = True # ADAM to fix
+ self.default_values['workflowInput'] = pathToScFile
+ # self.default_values['driverFile']='sc_'+self.default_values['driverFile']
+ self.default_values['modDriverFile'] = (
+ 'sc_' + self.default_values['driverFile']
+ )
+ # self.default_values['driverFile']='driver'
+
+ self.modifiedRun = True # ADAM to fix
command = create_command(arg_list)
-
+
# print('FMK- gather command:', command)
- result, returncode = run_command(command)
+ result, returncode = run_command(command) # noqa: F841
- log_msg('Output: ', prepend_timestamp=False,
- prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False,
- prepend_blank_space=False)
-
- log_msg('Successfully Gathered Inputs.',
- prepend_timestamp=False)
+ log_msg('Output: ', prepend_timestamp=False, prepend_blank_space=False)
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ log_msg('Successfully Gathered Inputs.', prepend_timestamp=False)
log_div()
-
-
- def create_driver_file(self, app_sequence, asst_id=None, AIM_file_path = 'AIM.json'):
- """
- This functipon creates a UQ driver file. This is only done if UQ is in the workflow apps
+ def create_driver_file( # noqa: C901
+ self,
+ app_sequence,
+ asst_id=None,
+ AIM_file_path='AIM.json', # noqa: N803
+ ):
+ """This functipon creates a UQ driver file. This is only done if UQ is in the workflow apps
Parameters
----------
- """
- if 'UQ' in self.workflow_apps.keys():
-
+ """ # noqa: D400, D401, D404, D414
+ if 'UQ' in self.workflow_apps.keys(): # noqa: SIM118
log_msg('Creating the workflow driver file')
- #print('ASSET_ID', asst_id)
- #print('AIM_FILE_PATH', AIM_file_path)
-
- aimDir = os.path.dirname(AIM_file_path)
- aimFile = os.path.basename(AIM_file_path)
-
+ # print('ASSET_ID', asst_id)
+ # print('AIM_FILE_PATH', AIM_file_path)
+
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFile = os.path.basename(AIM_file_path) # noqa: PTH119, N806, F841
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
-
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
+
os.chdir(aimDir)
if asst_id is not None:
@@ -2088,199 +2294,215 @@ def create_driver_file(self, app_sequence, asst_id=None, AIM_file_path = 'AIM.js
os.chdir('templatedir')
- #print('PWD', os.getcwd())
+ # print('PWD', os.getcwd())
- driver_script = u''
+ driver_script = ''
for app_type in self.optional_apps:
- if ((app_type in app_sequence) and
- (app_type not in self.workflow_apps.keys())):
+ if (app_type in app_sequence) and (
+ app_type not in self.workflow_apps.keys() # noqa: SIM118
+ ):
app_sequence.remove(app_type)
for app_type in app_sequence:
-
workflow_app = self.workflow_apps[app_type]
- #print('FMK runtype', self.run_type)
- if self.run_type in ['set_up', 'runningRemote', 'parSETUP']:
-
- if type(workflow_app) is dict :
- for itemKey, item in workflow_app.items() :
-
- command_list = item.get_command_list(app_path = self.app_dir_remote, force_posix = True)
- driver_script += create_command(command_list, enforced_python='python3') + u'\n'
-
- else :
- command_list = workflow_app.get_command_list(app_path = self.app_dir_remote, force_posix = True)
- driver_script += create_command(command_list, enforced_python='python3') + u'\n'
-
- else:
-
- if type(workflow_app) is dict :
- for itemKey, item in workflow_app.items() :
-
- command_list = item.get_command_list(app_path = self.app_dir_local)
- driver_script += create_command(command_list) + u'\n'
-
+ # print('FMK runtype', self.run_type)
+ if self.run_type in ['set_up', 'runningRemote', 'parSETUP']: # noqa: PLR6201
+ if type(workflow_app) is dict:
+ for itemKey, item in workflow_app.items(): # noqa: B007, N806, PERF102
+ command_list = item.get_command_list(
+ app_path=self.app_dir_remote, force_posix=True
+ )
+ driver_script += (
+ create_command(
+ command_list, enforced_python='python3'
+ )
+ + '\n'
+ )
+
else:
- command_list = workflow_app.get_command_list(app_path = self.app_dir_local)
+ command_list = workflow_app.get_command_list(
+ app_path=self.app_dir_remote, force_posix=True
+ )
+ driver_script += (
+ create_command(command_list, enforced_python='python3')
+ + '\n'
+ )
+
+ elif type(workflow_app) is dict:
+ for itemKey, item in workflow_app.items(): # noqa: B007, N806, PERF102
+ command_list = item.get_command_list(
+ app_path=self.app_dir_local
+ )
+ driver_script += create_command(command_list) + '\n'
- driver_script += create_command(command_list) + u'\n'
+ else:
+ command_list = workflow_app.get_command_list(
+ app_path=self.app_dir_local
+ )
+
+ driver_script += create_command(command_list) + '\n'
# sy - trying adding exit command
-
- #if platform.system() == 'Windows':
+
+ # if platform.system() == 'Windows':
# #driver_script += 'if %errorlevel% neq 0 exit /b -1 \n'
# pass
- #else:
+ # else:
# pass
- #log_msg('Workflow driver script:', prepend_timestamp=False)
- #log_msg('\n{}\n'.format(driver_script), prepend_timestamp=False, prepend_blank_space=False)
-
- driverFile = self.default_values['driverFile']
-
+ # log_msg('Workflow driver script:', prepend_timestamp=False)
+ # log_msg('\n{}\n'.format(driver_script), prepend_timestamp=False, prepend_blank_space=False)
+
+ driverFile = self.default_values['driverFile'] # noqa: N806
+
# KZ: for windows, to write bat
if platform.system() == 'Windows':
- driverFile = driverFile+'.bat'
+ driverFile = driverFile + '.bat' # noqa: N806, PLR6104
log_msg(driverFile)
- with open(driverFile,'w', newline='\n', encoding="utf-8") as f:
+ with open(driverFile, 'w', newline='\n', encoding='utf-8') as f: # noqa: PTH123
f.write(driver_script)
- log_msg('Workflow driver file successfully created.',prepend_timestamp=False)
+ log_msg(
+ 'Workflow driver file successfully created.', prepend_timestamp=False
+ )
log_div()
else:
log_msg('No UQ requested, workflow driver is not needed.')
log_div()
-
-
- def simulate_response(self, AIM_file_path = 'AIM.json', asst_id=None):
- """
- Short description
+ def simulate_response(self, AIM_file_path='AIM.json', asst_id=None): # noqa: C901, N803
+ """Short description
Longer description
Parameters
----------
- """
-
+
+ """ # noqa: D400, D414
# Get the directory to the asset class dir, e.g., buildings
- aimDir = os.path.dirname(AIM_file_path)
- aimFileName = os.path.basename(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFileName = os.path.basename(AIM_file_path) # noqa: PTH119, N806, F841
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
os.chdir(aimDir)
-
+
if asst_id is not None:
os.chdir(asst_id)
- if 'UQ' in self.workflow_apps.keys():
-
+ if 'UQ' in self.workflow_apps.keys(): # noqa: PLR1702, SIM118
log_msg('Running response simulation')
os.chdir('templatedir')
-
+
workflow_app = self.workflow_apps['UQ']
# FMK
if asst_id is not None:
- workflow_app=workflow_app['Buildings']
-
+ workflow_app = workflow_app['Buildings']
+
if AIM_file_path is not None:
-
workflow_app.defaults['filenameAIM'] = AIM_file_path
- #for input_var in workflow_app.inputs:
+ # for input_var in workflow_app.inputs:
# if input_var['id'] == 'filenameAIM':
# input_var['default'] = AIM_file_path
- command_list = workflow_app.get_command_list(
- app_path=self.app_dir_local)
+ command_list = workflow_app.get_command_list(app_path=self.app_dir_local)
- #ADAM to fix FMK
- if (self.modifiedRun):
+ # ADAM to fix FMK
+ if self.modifiedRun:
command_list[3] = self.default_values['workflowInput']
-
+
command_list[5] = self.default_values['modDriverFile']
-
+
# add the run type to the uq command list
- command_list.append(u'--runType')
- command_list.append(u'{}'.format(self.run_type))
+ command_list.append('--runType')
+ command_list.append(f'{self.run_type}')
- #if ('rvFiles' in self.default_values.keys()):
- # command_list.append('--filesWithRV')
+ # if ('rvFiles' in self.default_values.keys()):
+ # command_list.append('--filesWithRV')
# rvFiles = self.default_values['rvFiles']
# for rvFile in rvFiles:
# command_list.append(rvFile)
- #if ('edpFiles' in self.default_values.keys()):
- # command_list.append('--filesWithEDP')
+ # if ('edpFiles' in self.default_values.keys()):
+ # command_list.append('--filesWithEDP')
# edpFiles = self.default_values['edpFiles']
# for edpFile in edpFiles:
# command_list.append(edpFile)
-
+
command = create_command(command_list)
log_msg('Simulation command:', prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
-
- result, returncode = run_command(command)
-
- if self.run_type in ['run', 'runningLocal']:
-
- log_msg('Output: ', prepend_timestamp=False,
- prepend_blank_space=False)
- log_msg('\n{}\n'.format(result), prepend_timestamp=False,
- prepend_blank_space=False)
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ result, returncode = run_command(command) # noqa: F841
+
+ if self.run_type in ['run', 'runningLocal']: # noqa: PLR6201
+ log_msg(
+ 'Output: ', prepend_timestamp=False, prepend_blank_space=False
+ )
+ log_msg(
+ f'\n{result}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
# create the response.csv file from the dakotaTab.out file
os.chdir(aimDir)
-
+
if asst_id is not None:
os.chdir(asst_id)
-
+
try:
- # sy, abs - added try-statement because dakota-reliability does not write DakotaTab.out
- dakota_out = pd.read_csv('dakotaTab.out', sep=r'\s+', header=0, index_col=0)
+ # sy, abs - added try-statement because dakota-reliability does not write DakotaTab.out
+ dakota_out = pd.read_csv(
+ 'dakotaTab.out', sep=r'\s+', header=0, index_col=0
+ )
# if the DL is coupled with response estimation, we need to sort the results
- DL_app = self.workflow_apps.get('DL', None)
+ DL_app = self.workflow_apps.get('DL', None) # noqa: N806
# FMK
- #if asst_id is not None:
+ # if asst_id is not None:
# KZ: 10/19/2022, minor patch
if asst_id is not None and DL_app is not None:
- DL_app=DL_app['Buildings']
+ DL_app = DL_app['Buildings'] # noqa: N806
if DL_app is not None:
-
is_coupled = DL_app.pref.get('coupled_EDP', None)
if is_coupled:
if 'eventID' in dakota_out.columns:
- events = dakota_out['eventID'].values
+ events = dakota_out['eventID'].values # noqa: PD011
events = [int(e.split('x')[-1]) for e in events]
sorter = np.argsort(events)
dakota_out = dakota_out.iloc[sorter, :]
dakota_out.index = np.arange(dakota_out.shape[0])
-
dakota_out.to_csv('response.csv')
- #log_msg('Response simulation finished successfully.', prepend_timestamp=False)# sy - this message was showing up when quoFEM analysis failed
-
- except:
- log_msg('dakotaTab.out not found. Response.csv not created.',
- prepend_timestamp=False)
+ # log_msg('Response simulation finished successfully.', prepend_timestamp=False)# sy - this message was showing up when quoFEM analysis failed
- elif self.run_type in ['set_up', 'runningRemote']:
+ except: # noqa: E722
+ log_msg(
+ 'dakotaTab.out not found. Response.csv not created.',
+ prepend_timestamp=False,
+ )
- log_msg('Response simulation set up successfully',
- prepend_timestamp=False)
+ elif self.run_type in ['set_up', 'runningRemote']: # noqa: PLR6201
+ log_msg(
+ 'Response simulation set up successfully',
+ prepend_timestamp=False,
+ )
log_div()
@@ -2288,207 +2510,225 @@ def simulate_response(self, AIM_file_path = 'AIM.json', asst_id=None):
log_msg('No UQ requested, response simulation step is skipped.')
# copy the response.csv from the templatedir to the run dir
- shutil.copy(src = 'templatedir/response.csv', dst = 'response.csv')
+ shutil.copy(src='templatedir/response.csv', dst='response.csv')
log_div()
- def perform_asset_performance(asset_type):
-
- performanceWfapps = self.workflow_apps.get('Performance', None)
+ def perform_asset_performance(asset_type): # noqa: N805, D102
+ performanceWfapps = self.workflow_apps.get('Performance', None) # noqa: N806, F821
performance_app = performanceWfapps[asset_type]
- app_command_list = performance_app.get_command_list(app_path = self.app_dir_local)
+ app_command_list = performance_app.get_command_list(
+ app_path=self.app_dir_local # noqa: F821
+ )
command = create_command(app_command_list)
- result, returncode = run_command(command)
-
-
- def estimate_losses(self, AIM_file_path = 'AIM.json', asst_id = None,
- asset_type = None, input_file = None, copy_resources=False):
- """
- Short description
+ result, returncode = run_command(command) # noqa: F841
+
+ def estimate_losses( # noqa: C901
+ self,
+ AIM_file_path='AIM.json', # noqa: N803
+ asst_id=None,
+ asset_type=None,
+ input_file=None,
+ copy_resources=False, # noqa: FBT002
+ ):
+ """Short description
Longer description
Parameters
----------
- """
- if 'DL' in self.workflow_apps.keys():
-
+ """ # noqa: D400, D414
+ if 'DL' in self.workflow_apps.keys(): # noqa: SIM118
log_msg('Running damage and loss assessment')
-
+
# Get the directory to the asset class dir, e.g., buildings
- aimDir = os.path.dirname(AIM_file_path)
- aimFileName = os.path.basename(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFileName = os.path.basename(AIM_file_path) # noqa: PTH119, N806
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
- aimFileName = AIM_file_path
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
+ aimFileName = AIM_file_path # noqa: N806
os.chdir(aimDir)
if 'Assets' not in self.app_type_list:
-
# Copy the dakota.json file from the templatedir to the run_dir so that
# all the required inputs are in one place.
input_file = PurePath(input_file).name
- #input_file = ntpath.basename(input_file)
+ # input_file = ntpath.basename(input_file)
shutil.copy(
- src = aimDir / f'templatedir/{input_file}',
- dst = posixpath.join(aimDir,aimFileName))
- #src = posixpath.join(self.run_dir,'templatedir/{}'.format(input_file)),
- #dst = posixpath.join(self.run_dir,AIM_file_path))
+ src=aimDir / f'templatedir/{input_file}',
+ dst=posixpath.join(aimDir, aimFileName),
+ )
+ # src = posixpath.join(self.run_dir,'templatedir/{}'.format(input_file)),
+ # dst = posixpath.join(self.run_dir,AIM_file_path))
else:
-
- src = posixpath.join(aimDir,aimFileName)
+ src = posixpath.join(aimDir, aimFileName)
dst = posixpath.join(aimDir, f'{asst_id}/{aimFileName}')
-
+
# copy the AIM file from the main dir to the building dir
- shutil.copy(src,dst)
-
- #src = posixpath.join(self.run_dir, AIM_file_path),
- #dst = posixpath.join(self.run_dir,
+ shutil.copy(src, dst)
+
+ # src = posixpath.join(self.run_dir, AIM_file_path),
+ # dst = posixpath.join(self.run_dir,
# '{}/{}'.format(asst_id, AIM_file_path)))
os.chdir(str(asst_id))
workflow_app = self.workflow_apps['DL']
-
-
- if type(workflow_app) is dict :
-
- for itemKey, item in workflow_app.items() :
-
- if AIM_file_path is not None:
- item.defaults['filenameDL'] = AIM_file_path
- #for input_var in workflow_app.inputs:
- # if input_var['id'] == 'filenameDL':
- # input_var['default'] = AIM_file_path
-
- if asset_type != itemKey :
- continue
-
- command_list = item.get_command_list(app_path=self.app_dir_local)
-
- if copy_resources:
- command_list.append('--resource_dir')
- command_list.append(self.working_dir)
-
- command_list.append('--dirnameOutput')
- # Only add asset id if we are running a regional assessment
- if asst_id != None:
- command_list.append(f'{aimDir}/{asst_id}')
- else:
- command_list.append(f'{aimDir}')
-
- command = create_command(command_list)
-
- log_msg('Damage and loss assessment command (1):', prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
- result, returncode = run_command(command)
-
- log_msg(result, prepend_timestamp=False)
-
- # if multiple buildings are analyzed, copy the pelicun_log file to the root dir
- if 'Assets' in self.app_type_list:
-
- try:
- shutil.copy(
- src = aimDir / f'{asst_id}/{"pelicun_log.txt"}',
- dst = aimDir / f'pelicun_log_{asst_id}.txt')
-
- #src = posixpath.join(self.run_dir, '{}/{}'.format(asst_id, 'pelicun_log.txt')),
- #dst = posixpath.join(self.run_dir, 'pelicun_log_{}.txt'.format(asst_id)))
- except:
- pass
-
+ if type(workflow_app) is dict:
+ for itemKey, item in workflow_app.items(): # noqa: N806
+ if AIM_file_path is not None:
+ item.defaults['filenameDL'] = AIM_file_path
+ # for input_var in workflow_app.inputs:
+ # if input_var['id'] == 'filenameDL':
+ # input_var['default'] = AIM_file_path
+
+ if asset_type != itemKey:
+ continue
+
+ command_list = item.get_command_list(app_path=self.app_dir_local)
+
+ if copy_resources:
+ command_list.append('--resource_dir')
+ command_list.append(self.working_dir)
+
+ command_list.append('--dirnameOutput')
+ # Only add asset id if we are running a regional assessment
+ if asst_id != None: # noqa: E711
+ command_list.append(f'{aimDir}/{asst_id}')
+ else:
+ command_list.append(f'{aimDir}')
+
+ command = create_command(command_list)
+
+ log_msg(
+ 'Damage and loss assessment command (1):',
+ prepend_timestamp=False,
+ )
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ result, returncode = run_command(command)
+
+ log_msg(result, prepend_timestamp=False)
+
+ # if multiple buildings are analyzed, copy the pelicun_log file to the root dir
+ if 'Assets' in self.app_type_list:
+ try: # noqa: SIM105
+ shutil.copy(
+ src=aimDir / f'{asst_id}/{"pelicun_log.txt"}',
+ dst=aimDir / f'pelicun_log_{asst_id}.txt',
+ )
+
+ # src = posixpath.join(self.run_dir, '{}/{}'.format(asst_id, 'pelicun_log.txt')),
+ # dst = posixpath.join(self.run_dir, 'pelicun_log_{}.txt'.format(asst_id)))
+ except: # noqa: S110, E722
+ pass
+
else:
-
if AIM_file_path is not None:
workflow_app.defaults['filenameDL'] = AIM_file_path
- #for input_var in workflow_app.inputs:
+ # for input_var in workflow_app.inputs:
# if input_var['id'] == 'filenameDL':
# input_var['default'] = AIM_file_path
-
+
command_list = self.workflow_apps['DL'].get_command_list(
- app_path=self.app_dir_local)
-
- command_list.append('--dirnameOutput')
+ app_path=self.app_dir_local
+ )
+
+ command_list.append('--dirnameOutput')
# Only add asset id if we are running a regional assessment
- if asst_id != None:
+ if asst_id != None: # noqa: E711
command_list.append(f'{aimDir}/{asst_id}')
else:
command_list.append(f'{aimDir}')
-
+
if copy_resources:
command_list.append('--resource_dir')
command_list.append(self.working_dir)
-
+
command = create_command(command_list)
-
- log_msg('Damage and loss assessment command (2):',
- prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
-
- result, returncode = run_command(command)
-
+
+ log_msg(
+ 'Damage and loss assessment command (2):',
+ prepend_timestamp=False,
+ )
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
+
+ result, returncode = run_command(command) # noqa: F841
+
log_msg(result, prepend_timestamp=False)
-
+
# if multiple buildings are analyzed, copy the pelicun_log file to the root dir
if 'Building' in self.app_type_list:
-
- try:
+ try: # noqa: SIM105
shutil.copy(
- src = self.run_dir / f'{asst_id}/{"pelicun_log.txt"}',
- dst = self.run_dir / f'pelicun_log_{asst_id}.txt')
- #src = posixpath.join(self.run_dir, '{}/{}'.format(asst_id, 'pelicun_log.txt')),
- #dst = posixpath.join(self.run_dir, 'pelicun_log_{}.txt'.format(asst_id)))
- except:
+ src=self.run_dir / f'{asst_id}/{"pelicun_log.txt"}',
+ dst=self.run_dir / f'pelicun_log_{asst_id}.txt',
+ )
+ # src = posixpath.join(self.run_dir, '{}/{}'.format(asst_id, 'pelicun_log.txt')),
+ # dst = posixpath.join(self.run_dir, 'pelicun_log_{}.txt'.format(asst_id)))
+ except: # noqa: S110, E722
pass
# Remove the copied AIM since it is not used anymore
try:
dst = posixpath.join(aimDir, f'{asst_id}/{aimFileName}')
- os.remove(dst)
- except:
+ os.remove(dst) # noqa: PTH107
+ except: # noqa: S110, E722
pass
- log_msg('Damage and loss assessment finished successfully.',
- prepend_timestamp=False)
+ log_msg(
+ 'Damage and loss assessment finished successfully.',
+ prepend_timestamp=False,
+ )
log_div()
else:
log_msg('No DL requested, loss assessment step is skipped.')
# Only regional simulations send in a asst id
- if asst_id != None:
-
- EDP_df = pd.read_csv('response.csv', header=0, index_col=0)
+ if asst_id != None: # noqa: E711
+ EDP_df = pd.read_csv('response.csv', header=0, index_col=0) # noqa: N806
col_info = []
for col in EDP_df.columns:
try:
# KZ: 10/19/2022, patches for masking dummy edps (TODO: this part could be optimized)
- if col in ['dummy']:
- col_info.append(['dummy','1','1'])
+ if col == 'dummy':
+ col_info.append(['dummy', '1', '1'])
continue
split_col = col.split('-')
- if len(split_col[1]) == 3:
+ if len(split_col[1]) == 3: # noqa: PLR2004
col_info.append(split_col[1:])
- except:
+ except: # noqa: S112, E722
continue
col_info = np.transpose(col_info)
- EDP_types = np.unique(col_info[0])
- EDP_locs = np.unique(col_info[1])
- EDP_dirs = np.unique(col_info[2])
+ EDP_types = np.unique(col_info[0]) # noqa: N806
+ EDP_locs = np.unique(col_info[1]) # noqa: N806
+ EDP_dirs = np.unique(col_info[2]) # noqa: N806
- MI = pd.MultiIndex.from_product(
+ MI = pd.MultiIndex.from_product( # noqa: N806
[EDP_types, EDP_locs, EDP_dirs, ['median', 'beta']],
- names=['type', 'loc', 'dir', 'stat'])
-
- df_res = pd.DataFrame(columns=MI, index=[0, ])
+ names=['type', 'loc', 'dir', 'stat'],
+ )
+
+ df_res = pd.DataFrame(
+ columns=MI,
+ index=[
+ 0,
+ ],
+ )
if ('PID', '0') in df_res.columns:
del df_res[('PID', '0')]
@@ -2496,15 +2736,21 @@ def estimate_losses(self, AIM_file_path = 'AIM.json', asst_id = None,
for col in np.transpose(col_info):
# KZ: 10/19/2022, patches for masking dummy edps (TODO: this part could be optimized)
if 'dummy' in col:
- df_res.loc[0, (col[0], col[1], col[2], 'median')] = EDP_df['dummy'].median()
- df_res.loc[0, (col[0], col[1], col[2], 'beta')] = np.log(EDP_df['dummy']).std()
+ df_res.loc[0, (col[0], col[1], col[2], 'median')] = EDP_df[
+ 'dummy'
+ ].median()
+ df_res.loc[0, (col[0], col[1], col[2], 'beta')] = np.log(
+ EDP_df['dummy']
+ ).std()
continue
df_res.loc[0, (col[0], col[1], col[2], 'median')] = EDP_df[
- '1-{}-{}-{}'.format(col[0], col[1], col[2])].median()
+ f'1-{col[0]}-{col[1]}-{col[2]}'
+ ].median()
df_res.loc[0, (col[0], col[1], col[2], 'beta')] = np.log(
- EDP_df['1-{}-{}-{}'.format(col[0], col[1], col[2])]).std()
+ EDP_df[f'1-{col[0]}-{col[1]}-{col[2]}']
+ ).std()
- df_res.dropna(axis=1, how='all', inplace=True)
+ df_res.dropna(axis=1, how='all', inplace=True) # noqa: PD002
df_res = df_res.astype(float)
@@ -2513,78 +2759,82 @@ def estimate_losses(self, AIM_file_path = 'AIM.json', asst_id = None,
log_div()
-
- def estimate_performance(self,
- AIM_file_path = 'AIM.json',
- asst_id = None,
- asset_type = None,
- input_file = None,
- copy_resources=False) :
-
- if 'Performance' not in self.workflow_apps.keys():
- log_msg('No performance assessment requested, performance assessment step is skipped.')
+ def estimate_performance( # noqa: D102
+ self,
+ AIM_file_path='AIM.json', # noqa: N803
+ asst_id=None,
+ asset_type=None, # noqa: ARG002
+ input_file=None, # noqa: ARG002
+ copy_resources=False, # noqa: FBT002, ARG002
+ ):
+ if 'Performance' not in self.workflow_apps.keys(): # noqa: SIM118
+ log_msg(
+ 'No performance assessment requested, performance assessment step is skipped.'
+ )
log_div()
- return
-
+ return
+
log_msg('Running performance assessment')
-
+
# Get the directory to the asset class dir, e.g., buildings
- aimDir = os.path.dirname(AIM_file_path)
- aimFileName = os.path.basename(AIM_file_path)
-
+ aimDir = os.path.dirname(AIM_file_path) # noqa: PTH120, N806
+ aimFileName = os.path.basename(AIM_file_path) # noqa: PTH119, N806
+
# If the path is not provided, assume the AIM file is in the run dir
- if os.path.exists(aimDir) == False :
- aimDir = self.run_dir
- aimFileName = AIM_file_path
+ if os.path.exists(aimDir) == False: # noqa: PTH110, E712
+ aimDir = self.run_dir # noqa: N806
+ aimFileName = AIM_file_path # noqa: N806, F841
os.chdir(aimDir)
workflow_app = self.workflow_apps['Performance']
command_list = workflow_app.get_command_list(app_path=self.app_dir_local)
-
- command_list.append('--dirnameOutput')
-
+
+ command_list.append('--dirnameOutput')
+
# Only add asset id if we are running a regional assessment
- if asst_id != None:
+ if asst_id != None: # noqa: E711
command_list.append(f'{aimDir}/{asst_id}')
else:
command_list.append(f'{aimDir}')
command = create_command(command_list)
- log_msg('Performance assessment command:',
- prepend_timestamp=False)
- log_msg('\n{}\n'.format(command), prepend_timestamp=False,
- prepend_blank_space=False)
+ log_msg('Performance assessment command:', prepend_timestamp=False)
+ log_msg(
+ f'\n{command}\n',
+ prepend_timestamp=False,
+ prepend_blank_space=False,
+ )
- result, returncode = run_command(command)
+ result, returncode = run_command(command) # noqa: F841
log_msg(result, prepend_timestamp=False)
- log_msg('Performance assessment finished.',
- prepend_timestamp=False)
+ log_msg('Performance assessment finished.', prepend_timestamp=False)
log_div()
-
- def aggregate_results(self, asst_data, asset_type = '',
-
- #out_types = ['IM', 'BIM', 'EDP', 'DM', 'DV', 'every_realization'],
- out_types = ['AIM', 'EDP', 'DMG', 'DV', 'every_realization'],
- headers = None):
- """
- Short description
+ def aggregate_results( # noqa: C901, PLR0912, PLR0914, PLR0915
+ self,
+ asst_data,
+ asset_type='',
+ # out_types = ['IM', 'BIM', 'EDP', 'DM', 'DV', 'every_realization'],
+ out_types=['AIM', 'EDP', 'DMG', 'DV', 'every_realization'], # noqa: B006
+ headers=None,
+ ):
+ """Short description
Longer description
Parameters
----------
- """
- log_msg('Collecting '+asset_type+' damage and loss results')
+ """ # noqa: D400, D414
+ log_msg('Collecting ' + asset_type + ' damage and loss results')
- R2D_res_out_types = []
- with open(self.input_file, 'r') as f:
+ R2D_res_out_types = [] # noqa: N806
+ with open(self.input_file) as f: # noqa: PLW1514, PTH123
input_data = json.load(f)
requested_output = input_data['outputs']
for key, item in requested_output.items():
@@ -2592,112 +2842,119 @@ def aggregate_results(self, asst_data, asset_type = '',
R2D_res_out_types.append(key)
run_path = self.run_dir
-
- if asset_type != '' :
- run_path = posixpath.join(run_path,asset_type)
-
+
+ if asset_type != '': # noqa: PLC1901
+ run_path = posixpath.join(run_path, asset_type)
+
os.chdir(run_path)
-
- min_id = min([int(x['id']) for x in asst_data]) #min_id = int(asst_data[0]['id'])
- max_id = max([int(x['id']) for x in asst_data]) #max_id = int(asst_data[0]['id'])
+
+ min_id = min(
+ [int(x['id']) for x in asst_data] # noqa: C419
+ ) # min_id = int(asst_data[0]['id'])
+ max_id = max(
+ [int(x['id']) for x in asst_data] # noqa: C419
+ ) # max_id = int(asst_data[0]['id'])
#
- # TODO: ugly, ugly, I know.
+ # TODO: ugly, ugly, I know. # noqa: TD002
# Only temporary solution while we have both Pelicuns in parallel
# FMK - bug fix adding check on DL, not in siteResponse input file
#
-
- if 'DL' in self.workflow_apps and self.workflow_apps['DL'][asset_type].name == 'Pelicun3':
+
+ if ( # noqa: PLR1702
+ 'DL' in self.workflow_apps
+ and self.workflow_apps['DL'][asset_type].name == 'Pelicun3'
+ ):
initialize_dicts = True
for a_i, asst in enumerate(asst_data):
-
- bldg_dir = Path(os.path.dirname(asst_data[a_i]['file'])).resolve()
+ bldg_dir = Path(os.path.dirname(asst_data[a_i]['file'])).resolve() # noqa: PTH120
main_dir = bldg_dir
- assetTypeHierarchy = [bldg_dir.name]
+ assetTypeHierarchy = [bldg_dir.name] # noqa: N806
while main_dir.parent.name != 'Results':
main_dir = bldg_dir.parent
- assetTypeHierarchy = [main_dir.name] + assetTypeHierarchy
+ assetTypeHierarchy = [main_dir.name] + assetTypeHierarchy # noqa: N806, RUF005
asset_id = asst['id']
- asset_dir = bldg_dir/asset_id
+ asset_dir = bldg_dir / asset_id
# always get the AIM info
- AIM_file = None
+ AIM_file = None # noqa: N806
- if f"{asset_id}-AIM_ap.json" in os.listdir(asset_dir):
- AIM_file = asset_dir / f"{asset_id}-AIM_ap.json"
+ if f'{asset_id}-AIM_ap.json' in os.listdir(asset_dir):
+ AIM_file = asset_dir / f'{asset_id}-AIM_ap.json' # noqa: N806
- elif f"{asset_id}-AIM.json" in os.listdir(asset_dir):
- AIM_file = asset_dir / f"{asset_id}-AIM.json"
+ elif f'{asset_id}-AIM.json' in os.listdir(asset_dir):
+ AIM_file = asset_dir / f'{asset_id}-AIM.json' # noqa: N806
else:
# skip this asset if there is no AIM file available
show_warning(
- f"Couldn't find AIM file for {assetTypeHierarchy[-1]} {asset_id}")
+ f"Couldn't find AIM file for {assetTypeHierarchy[-1]} {asset_id}"
+ )
continue
- with open(AIM_file, 'r', encoding="utf-8") as f:
- AIM_data_i = json.load(f)
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
+ AIM_data_i = json.load(f) # noqa: N806
- sample_size = AIM_data_i['Applications']['DL']['ApplicationData']['Realizations']
+ sample_size = AIM_data_i['Applications']['DL']['ApplicationData'][
+ 'Realizations'
+ ]
# initialize the output dict if this is the first asset
if initialize_dicts:
-
# We assume all assets have the same output sample size
# Variable sample size doesn't seem to make sense
- realizations = {rlz_i:{asset_type:{}}\
- for rlz_i in range(sample_size)}
+ realizations = {
+ rlz_i: {asset_type: {}} for rlz_i in range(sample_size)
+ }
# We also create a dict to collect deterministic info, i.e.,
# data that is identical for all realizations
deterministic = {asset_type: {}}
initialize_dicts = False
- # Check if the asset type hierarchy exist in deterministic and
+ # Check if the asset type hierarchy exist in deterministic and
# realizations. Create a hierarchy if it doesn't exist.
deter_pointer = deterministic
- rlzn_pointer = {rlz_i:realizations[rlz_i]\
- for rlz_i in range(sample_size)}
- for assetTypeIter in assetTypeHierarchy:
- if assetTypeIter not in deter_pointer.keys():
+ rlzn_pointer = {
+ rlz_i: realizations[rlz_i] for rlz_i in range(sample_size)
+ }
+ for assetTypeIter in assetTypeHierarchy: # noqa: N806
+ if assetTypeIter not in deter_pointer.keys(): # noqa: SIM118
deter_pointer.update({assetTypeIter: {}})
deter_pointer = deter_pointer[assetTypeIter]
for rlz_i in range(sample_size):
- if assetTypeIter not in rlzn_pointer[rlz_i].keys():
+ if assetTypeIter not in rlzn_pointer[rlz_i].keys(): # noqa: SIM118
rlzn_pointer[rlz_i].update({assetTypeIter: {}})
rlzn_pointer[rlz_i] = rlzn_pointer[rlz_i][assetTypeIter]
- # Currently, all GI data is deterministic
- GI_data_i_det = AIM_data_i['GeneralInformation']
-
- # TODO: later update this to handle probabilistic GI attributes
- GI_data_i_prob = {}
+ # Currently, all GI data is deterministic
+ GI_data_i_det = AIM_data_i['GeneralInformation'] # noqa: N806
+
+ # TODO: later update this to handle probabilistic GI attributes # noqa: TD002
+ GI_data_i_prob = {} # noqa: N806
for rlz_i in range(sample_size):
rlzn_pointer[rlz_i].update(
- {asset_id:{'GeneralInformation':GI_data_i_prob}})
-
+ {asset_id: {'GeneralInformation': GI_data_i_prob}}
+ )
- deter_pointer.update({asset_id:
- {'GeneralInformation':GI_data_i_det}})
- deter_pointer[asset_id].update({
- "R2Dres":{}
- })
+ deter_pointer.update(
+ {asset_id: {'GeneralInformation': GI_data_i_det}}
+ )
+ deter_pointer[asset_id].update({'R2Dres': {}})
if 'EDP' in out_types:
-
edp_out_file_i = 'DEM_sample.json'
if edp_out_file_i not in os.listdir(asset_dir):
-
show_warning(
- f"Couldn't find EDP file for {assetTypeHierarchy[-1]} {asset_id}")
+ f"Couldn't find EDP file for {assetTypeHierarchy[-1]} {asset_id}"
+ )
else:
-
- with open(asset_dir/edp_out_file_i, 'r', encoding="utf-8") as f:
+ with open(asset_dir / edp_out_file_i, encoding='utf-8') as f: # noqa: PTH123
edp_data_i = json.load(f)
# remove the ONE demand
@@ -2706,28 +2963,30 @@ def aggregate_results(self, asst_data, asset_type = '',
# extract EDP unit info
edp_units = edp_data_i['Units']
del edp_data_i['Units']
-
+
# parse the demand data into a DataFrame
# we assume demands are stored in JSON with a SimpleIndex
edp_data_i = pd.DataFrame(edp_data_i)
# convert to a realization-by-realization format
edp_output = {
- int(rlz_i):{col:float(edp_data_i.loc[rlz_i,col])
- for col in edp_data_i.columns
- }
+ int(rlz_i): {
+ col: float(edp_data_i.loc[rlz_i, col])
+ for col in edp_data_i.columns
+ }
for rlz_i in edp_data_i.index
}
# save the EDP intensities in each realization
for rlz_i in range(sample_size):
rlzn_pointer[rlz_i][asset_id].update(
- {'Demand':edp_output[rlz_i]})
+ {'Demand': edp_output[rlz_i]}
+ )
# save the EDP units
- deter_pointer[asset_id].update({
- "Demand": {"Units": edp_units}
- })
+ deter_pointer[asset_id].update(
+ {'Demand': {'Units': edp_units}}
+ )
if 'EDP' in R2D_res_out_types:
pass
# meanValues = edp_data_i.mean()
@@ -2744,20 +3003,18 @@ def aggregate_results(self, asst_data, asset_type = '',
# "R2Dres":r2d_res_i
# })
if 'DMG' in out_types:
-
dmg_out_file_i = 'DMG_grp.json'
if dmg_out_file_i not in os.listdir(asset_dir):
-
show_warning(
- f"Couldn't find DMG file for {assetTypeHierarchy[-1]} {asset_id}")
+ f"Couldn't find DMG file for {assetTypeHierarchy[-1]} {asset_id}"
+ )
else:
-
- with open(asset_dir/dmg_out_file_i, 'r', encoding="utf-8") as f:
+ with open(asset_dir / dmg_out_file_i, encoding='utf-8') as f: # noqa: PTH123
dmg_data_i = json.load(f)
- # remove damage unit info
+ # remove damage unit info
del dmg_data_i['Units']
# parse damage data into a DataFrame
@@ -2766,53 +3023,55 @@ def aggregate_results(self, asst_data, asset_type = '',
# convert to realization-by-realization format
dmg_output = {}
for rlz_i in dmg_data_i.index:
-
rlz_output = {}
for col in dmg_data_i.columns:
+ if not pd.isna(dmg_data_i.loc[rlz_i, col]):
+ rlz_output.update(
+ {col: int(dmg_data_i.loc[rlz_i, col])}
+ )
- if not pd.isna(dmg_data_i.loc[rlz_i,col]):
- rlz_output.update({col: int(dmg_data_i.loc[rlz_i,col])})
-
- dmg_output.update({rlz_i: rlz_output})
+ dmg_output.update({rlz_i: rlz_output})
# we assume that damage information is condensed
- #TODO: implement condense_ds flag in DL_calc
+ # TODO: implement condense_ds flag in DL_calc # noqa: TD002
for rlz_i in range(sample_size):
rlzn_pointer[rlz_i][asset_id].update(
- {'Damage':dmg_output[rlz_i]})
+ {'Damage': dmg_output[rlz_i]}
+ )
if 'DM' in R2D_res_out_types:
# use forward fill in case of multiple modes
- meanValues = dmg_data_i.mode().ffill().mean()
- stdValues = dmg_data_i.std()
- r2d_res_dmg = dict()
+ meanValues = dmg_data_i.mode().ffill().mean() # noqa: N806, F841
+ stdValues = dmg_data_i.std() # noqa: N806, F841
+ r2d_res_dmg = dict() # noqa: C408
# for key in dmg_data_i.columns:
# meanKey = f'R2Dres_mode_{key}'
# stdKey = f'R2Dres_std_{key}'
# r2d_res_dmg.update({meanKey:meanValues[key],\
# stdKey:stdValues[key]})
- r2d_res_dmg.update({
- "R2Dres_MostLikelyCriticalDamageState":\
- dmg_data_i.max(axis = 1).mode().mean()})
- r2d_res_i = deter_pointer[asset_id].get('R2Dres', {})
+ r2d_res_dmg.update(
+ {
+ 'R2Dres_MostLikelyCriticalDamageState': dmg_data_i.max(
+ axis=1
+ )
+ .mode()
+ .mean()
+ }
+ )
+ r2d_res_i = deter_pointer[asset_id].get('R2Dres', {})
r2d_res_i.update(r2d_res_dmg)
- deter_pointer[asset_id].update({
- "R2Dres":r2d_res_i
- })
-
+ deter_pointer[asset_id].update({'R2Dres': r2d_res_i})
if 'DV' in out_types:
-
dv_out_file_i = 'DV_repair_grp.json'
if dv_out_file_i not in os.listdir(asset_dir):
-
show_warning(
- f"Couldn't find DV file for {assetTypeHierarchy[-1]} {asset_id}")
+ f"Couldn't find DV file for {assetTypeHierarchy[-1]} {asset_id}"
+ )
else:
-
- with open(asset_dir/dv_out_file_i, 'r', encoding="utf-8") as f:
+ with open(asset_dir / dv_out_file_i, encoding='utf-8') as f: # noqa: PTH123
dv_data_i = json.load(f)
# extract DV unit info
@@ -2821,343 +3080,433 @@ def aggregate_results(self, asst_data, asset_type = '',
# parse decision variable data into a DataFrame
dv_data_i = pd.DataFrame(dv_data_i)
-
+
# get a list of dv types
dv_types = np.unique(
- [col.split('-')[0] for col in dv_data_i.columns])
+ [col.split('-')[0] for col in dv_data_i.columns]
+ )
# convert to realization-by-realization format
dv_output = {
- int(rlz_i):{
- dv_type:{
- col[len(dv_type)+1:]:
- float(dv_data_i.loc[rlz_i,col])
- for col in dv_data_i.columns
+ int(rlz_i): {
+ dv_type: {
+ col[len(dv_type) + 1 :]: float(
+ dv_data_i.loc[rlz_i, col]
+ )
+ for col in dv_data_i.columns
if col.startswith(dv_type)
- }
+ }
for dv_type in dv_types
- }
+ }
for rlz_i in dv_data_i.index
}
# save loss data
for rlz_i in range(sample_size):
rlzn_pointer[rlz_i][asset_id].update(
- {'Loss':{'Repair':dv_output[rlz_i]}})
+ {'Loss': {'Repair': dv_output[rlz_i]}}
+ )
# save DV units
- deter_pointer[asset_id].update({
- "Loss": {"Units": dv_units}
- })
-
+ deter_pointer[asset_id].update({'Loss': {'Units': dv_units}})
+
if 'DV' in R2D_res_out_types:
- r2d_res_dv = dict()
- cost_columns = [col for col in dv_data_i.columns if col.startswith('Cost')]
- if len(cost_columns) !=0:
+ r2d_res_dv = dict() # noqa: C408
+ cost_columns = [
+ col
+ for col in dv_data_i.columns
+ if col.startswith('Cost')
+ ]
+ if len(cost_columns) != 0:
cost_data = dv_data_i[cost_columns].mean()
cost_data_std = dv_data_i[cost_columns].std()
cost_key = cost_data.idxmax()
- meanKey = f'R2Dres_mean_RepairCost_{dv_units[cost_key]}'
- stdKey = f'R2Dres_std_RepairCost_{dv_units[cost_key]}'
- r2d_res_dv.update({meanKey:cost_data[cost_key],\
- stdKey:cost_data_std[cost_key]})
- time_columns = [col for col in dv_data_i.columns if col.startswith('Time')]
- if len(time_columns) !=0:
+ meanKey = ( # noqa: N806
+ f'R2Dres_mean_RepairCost_{dv_units[cost_key]}'
+ )
+ stdKey = ( # noqa: N806
+ f'R2Dres_std_RepairCost_{dv_units[cost_key]}'
+ )
+ r2d_res_dv.update(
+ {
+ meanKey: cost_data[cost_key],
+ stdKey: cost_data_std[cost_key],
+ }
+ )
+ time_columns = [
+ col
+ for col in dv_data_i.columns
+ if col.startswith('Time')
+ ]
+ if len(time_columns) != 0:
time_data = dv_data_i[time_columns].mean()
time_data_std = dv_data_i[time_columns].std()
time_key = time_data.idxmax()
- meanKey = f'R2Dres_mean_RepairTime_{dv_units[time_key]}'
- stdKey = f'R2Dres_std_RepairTime_{dv_units[time_key]}'
- r2d_res_dv.update({meanKey:time_data[time_key],\
- stdKey:time_data_std[time_key]})
-
- r2d_res_i = deter_pointer[asset_id].get('R2Dres', {})
+ meanKey = ( # noqa: N806
+ f'R2Dres_mean_RepairTime_{dv_units[time_key]}'
+ )
+ stdKey = ( # noqa: N806
+ f'R2Dres_std_RepairTime_{dv_units[time_key]}'
+ )
+ r2d_res_dv.update(
+ {
+ meanKey: time_data[time_key],
+ stdKey: time_data_std[time_key],
+ }
+ )
+
+ r2d_res_i = deter_pointer[asset_id].get('R2Dres', {})
r2d_res_i.update(r2d_res_dv)
- deter_pointer[asset_id].update({
- "R2Dres":r2d_res_i
- })
+ deter_pointer[asset_id].update({'R2Dres': r2d_res_i})
- # This is also ugly but necessary for backward compatibility so that
+ # This is also ugly but necessary for backward compatibility so that
# file structure created from apps other than GeoJSON_TO_ASSET can be
# dealt with
if len(assetTypeHierarchy) == 1:
- if assetTypeHierarchy[0] == "Buildings":
- deterministic = {"Buildings":{"Building":deterministic["Buildings"]}}
- for rlz_i in realizations.keys():
- realizations[rlz_i] = {"Buildings":{"Building":realizations[rlz_i]["Buildings"]}}
+ if assetTypeHierarchy[0] == 'Buildings':
+ deterministic = {
+ 'Buildings': {'Building': deterministic['Buildings']}
+ }
+ for rlz_i in realizations:
+ realizations[rlz_i] = {
+ 'Buildings': {
+ 'Building': realizations[rlz_i]['Buildings']
+ }
+ }
else:
deterministic = {assetTypeHierarchy[0]: deterministic}
- for rlz_i in realizations.keys():
- realizations[rlz_i] = {assetTypeHierarchy[0]:realizations[rlz_i]}
+ for rlz_i in realizations: # noqa: PLC0206
+ realizations[rlz_i] = {
+ assetTypeHierarchy[0]: realizations[rlz_i]
+ }
# save outputs to JSON files
for rlz_i, rlz_data in realizations.items():
-
- with open(main_dir/f"{asset_type}_{rlz_i}.json", 'w', encoding="utf-8") as f:
+ with open( # noqa: PTH123
+ main_dir / f'{asset_type}_{rlz_i}.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(rlz_data, f, indent=2)
- with open(main_dir/f"{asset_type}_det.json", 'w', encoding="utf-8") as f:
+ with open( # noqa: PTH123
+ main_dir / f'{asset_type}_det.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(deterministic, f, indent=2)
else:
# This is legacy for Pelicun 2 runs
out_types = ['IM', 'BIM', 'EDP', 'DM', 'DV', 'every_realization']
- if headers is None :
- headers = dict(
- IM = [0, 1, 2, 3],
- AIM = [0, ],
- EDP = [0, 1, 2, 3],
- DM = [0, 1, 2],
- DV = [0, 1, 2, 3])
+ if headers is None:
+ headers = dict( # noqa: C408
+ IM=[0, 1, 2, 3],
+ AIM=[
+ 0,
+ ],
+ EDP=[0, 1, 2, 3],
+ DM=[0, 1, 2],
+ DV=[0, 1, 2, 3],
+ )
for out_type in out_types:
- if ((self.output_types is None) or
- (self.output_types.get(out_type, False))):
-
+ if (self.output_types is None) or (
+ self.output_types.get(out_type, False)
+ ):
if out_type == 'every_realization':
+ realizations_EDP = None # noqa: N806
+ realizations_DL = None # noqa: N806
- realizations_EDP = None
- realizations_DL = None
-
- for asst in asst_data:
-
- print("ASSET", asst);
-
- asst_file = asst['file']
-
- # Get the folder containing the results
- aimDir = os.path.dirname(asst_file)
-
- asst_id = asst['id']
- min_id = min(int(asst_id), min_id)
- max_id = max(int(asst_id), max_id)
-
- # save all EDP realizations
-
- df_i = pd.read_csv(aimDir+'/'+asst_id+'/response.csv', header=0, index_col=0)
-
- if realizations_EDP == None:
- realizations_EDP = dict([(col, []) for col in df_i.columns])
-
- for col in df_i.columns:
- vals = df_i.loc[:,col].to_frame().T
- vals.index = [asst_id,]
- realizations_EDP[col].append(vals)
-
- # If damage and loss assessment is part of the workflow
- # then save the DL outputs too
- if 'DL' in self.workflow_apps.keys():
-
- try:
- #if True:
- df_i = pd.read_csv(aimDir+'/'+asst_id+f'/DL_summary.csv',
- header=0, index_col=0)
-
- if realizations_DL == None:
- realizations_DL = dict([(col, []) for col in df_i.columns])
-
- for col in df_i.columns:
- vals = df_i.loc[:,col].to_frame().T
- vals.index = [asst_id,]
- realizations_DL[col].append(vals)
-
- except:
- log_msg(f'Error reading DL realization data for asset {asset_type} {asst_id}',
- prepend_timestamp=False)
-
- for d_type in realizations_EDP.keys():
- d_agg = pd.concat(realizations_EDP[d_type], axis=0, sort=False)
+ for asst in asst_data:
+ print('ASSET', asst) # noqa: T201
+ asst_file = asst['file']
+
+ # Get the folder containing the results
+ aimDir = os.path.dirname(asst_file) # noqa: PTH120, N806
+
+ asst_id = asst['id']
+ min_id = min(int(asst_id), min_id)
+ max_id = max(int(asst_id), max_id)
+
+ # save all EDP realizations
+
+ df_i = pd.read_csv(
+ aimDir + '/' + asst_id + '/response.csv',
+ header=0,
+ index_col=0,
+ )
+
+ if realizations_EDP == None: # noqa: E711
+ realizations_EDP = dict( # noqa: C404, N806
+ [(col, []) for col in df_i.columns]
+ )
+
+ for col in df_i.columns:
+ vals = df_i.loc[:, col].to_frame().T
+ vals.index = [
+ asst_id,
+ ]
+ realizations_EDP[col].append(vals)
+
+ # If damage and loss assessment is part of the workflow
+ # then save the DL outputs too
+ if 'DL' in self.workflow_apps.keys(): # noqa: SIM118
+ try:
+ # if True:
+ df_i = pd.read_csv(
+ aimDir + '/' + asst_id + '/DL_summary.csv',
+ header=0,
+ index_col=0,
+ )
+
+ if realizations_DL == None: # noqa: E711
+ realizations_DL = dict( # noqa: C404, N806
+ [(col, []) for col in df_i.columns]
+ )
+
+ for col in df_i.columns:
+ vals = df_i.loc[:, col].to_frame().T
+ vals.index = [
+ asst_id,
+ ]
+ realizations_DL[col].append(vals)
+
+ except: # noqa: E722
+ log_msg(
+ f'Error reading DL realization data for asset {asset_type} {asst_id}',
+ prepend_timestamp=False,
+ )
+
+ for d_type in realizations_EDP.keys(): # noqa: SIM118
+ d_agg = pd.concat(
+ realizations_EDP[d_type], axis=0, sort=False
+ )
+
+ with warnings.catch_warnings():
+ warnings.simplefilter(action='ignore')
+
+ d_agg.to_hdf(
+ f'realizations_{min_id}-{max_id}.hdf',
+ f'EDP-{d_type}',
+ mode='a',
+ format='fixed',
+ )
+
+ if 'DL' in self.workflow_apps.keys(): # noqa: SIM118
+ for d_type in realizations_DL.keys(): # noqa: SIM118
+ d_agg = pd.concat(
+ realizations_DL[d_type], axis=0, sort=False
+ )
+ # d_agg.sort_index(axis=0, inplace=True)
with warnings.catch_warnings():
warnings.simplefilter(action='ignore')
- d_agg.to_hdf(f'realizations_{min_id}-{max_id}.hdf', f'EDP-{d_type}', mode='a', format='fixed')
-
- if 'DL' in self.workflow_apps.keys():
- for d_type in realizations_DL.keys():
- d_agg = pd.concat(realizations_DL[d_type], axis=0, sort=False)
- #d_agg.sort_index(axis=0, inplace=True)
-
- with warnings.catch_warnings():
- warnings.simplefilter(action='ignore')
-
- d_agg.to_hdf(f'realizations_{min_id}-{max_id}.hdf', f'DL-{d_type}', mode='a', format='fixed')
-
-
+ d_agg.to_hdf(
+ f'realizations_{min_id}-{max_id}.hdf',
+ f'DL-{d_type}',
+ mode='a',
+ format='fixed',
+ )
else:
-
out_list = []
- count = 0;
+ count = 0
for asst in asst_data:
-
if count % self.numP == self.procID:
-
- print("ASSET", self.procID, self.numP, asst['file']);
-
+ print('ASSET', self.procID, self.numP, asst['file']) # noqa: T201
asst_file = asst['file']
-
+
# Get the folder containing the results
- aimDir = os.path.dirname(asst_file)
-
+ aimDir = os.path.dirname(asst_file) # noqa: PTH120, N806
+
asst_id = asst['id']
min_id = min(int(asst_id), min_id)
max_id = max(int(asst_id), max_id)
try:
- #if True:
-
- csvPath = aimDir+'/'+asst_id+f'/{out_type}.csv'
-
+ # if True:
+
+ csvPath = ( # noqa: N806
+ aimDir + '/' + asst_id + f'/{out_type}.csv'
+ )
+
# EDP data
- df_i = pd.read_csv(csvPath, header=headers[out_type], index_col=0)
-
- df_i.index = [asst_id,]
-
+ df_i = pd.read_csv(
+ csvPath,
+ header=headers[out_type],
+ index_col=0,
+ )
+
+ df_i.index = [
+ asst_id,
+ ]
+
out_list.append(df_i)
-
- except:
- log_msg(f'Error reading {out_type} data for asset {asset_type} {asst_id}', prepend_timestamp=False)
- # increment counter
- count = count + 1
+ except: # noqa: E722
+ log_msg(
+ f'Error reading {out_type} data for asset {asset_type} {asst_id}',
+ prepend_timestamp=False,
+ )
+ # increment counter
+ count = count + 1 # noqa: PLR6104
# save the collected DataFrames as csv files
if self.procID == 0:
- outPath = posixpath.join(run_path,f'{out_type}.csv')
+ outPath = posixpath.join(run_path, f'{out_type}.csv') # noqa: N806
else:
- outPath = posixpath.join(run_path,f'{out_type}_tmp_{self.procID}.csv')
+ outPath = posixpath.join( # noqa: N806
+ run_path, f'{out_type}_tmp_{self.procID}.csv'
+ )
- # if not P0 output file & barrier
+ # if not P0 output file & barrier
if self.procID != 0:
-
- out_agg = pd.DataFrame() if len(out_list) < 1 else pd.concat(out_list, axis=0, sort=False)
+ out_agg = (
+ pd.DataFrame()
+ if len(out_list) < 1
+ else pd.concat(out_list, axis=0, sort=False)
+ )
out_agg.to_csv(outPath)
self.comm.Barrier()
- else:
-
+ else:
# P0 if parallel & parallel, barrier then read other, and merge
if self.numP > 1:
-
self.comm.Barrier()
-
+
# fileList = []
- for i in range (1, self.numP):
- fileToAppend = posixpath.join(run_path,f'{out_type}_tmp_{i}.csv')
- #fileList.append(fileToAppend)
- out_list.append(pd.read_csv(fileToAppend, header=headers[out_type], index_col=0))
-
+ for i in range(1, self.numP):
+ fileToAppend = posixpath.join( # noqa: N806
+ run_path, f'{out_type}_tmp_{i}.csv'
+ )
+ # fileList.append(fileToAppend)
+ out_list.append(
+ pd.read_csv(
+ fileToAppend,
+ header=headers[out_type],
+ index_col=0,
+ )
+ )
# write file
- out_agg = pd.DataFrame() if len(out_list) < 1 else pd.concat(out_list, axis=0, sort=False)
+ out_agg = (
+ pd.DataFrame()
+ if len(out_list) < 1
+ else pd.concat(out_list, axis=0, sort=False)
+ )
out_agg.to_csv(outPath)
-
- log_msg('Damage and loss results collected successfully.', prepend_timestamp=False)
+ log_msg(
+ 'Damage and loss results collected successfully.',
+ prepend_timestamp=False,
+ )
log_div()
- def compile_r2d_results_geojson(self, asset_files):
+ def compile_r2d_results_geojson(self, asset_files): # noqa: D102
run_path = self.run_dir
- with open(self.input_file, 'r', encoding="utf-8") as f:
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
- with open(run_path/'Results_det.json', encoding="utf-8") as f:
+ with open(run_path / 'Results_det.json', encoding='utf-8') as f: # noqa: PTH123
res_det = json.load(f)
- metadata = {"Name": input_data["Name"],
- "Units": input_data["units"],
- "Author": input_data["Author"],
- "WorkflowType": input_data["WorkflowType"],
- "Time": datetime.now().strftime('%m-%d-%Y %H:%M:%S')}
- ## create the geojson for R2D visualization
+ metadata = {
+ 'Name': input_data['Name'],
+ 'Units': input_data['units'],
+ 'Author': input_data['Author'],
+ 'WorkflowType': input_data['WorkflowType'],
+ 'Time': datetime.now().strftime('%m-%d-%Y %H:%M:%S'), # noqa: DTZ005
+ }
+ # create the geojson for R2D visualization
geojson_result = {
- "type": "FeatureCollection",
- "crs": {
- "type": "name",
- "properties": {
- "name": "urn:ogc:def:crs:OGC:1.3:CRS84"
- }
+ 'type': 'FeatureCollection',
+ 'crs': {
+ 'type': 'name',
+ 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'},
},
- "metadata":metadata,
- "features":[]
+ 'metadata': metadata,
+ 'features': [],
}
- for asset_type in asset_files.keys():
- for assetSubtype, subtypeResult in res_det[asset_type].items():
- allAssetIds = sorted([int(x) for x in subtypeResult.keys()])
+ for asset_type in asset_files.keys(): # noqa: SIM118
+ for assetSubtype, subtypeResult in res_det[asset_type].items(): # noqa: N806
+ allAssetIds = sorted([int(x) for x in subtypeResult.keys()]) # noqa: SIM118, N806
for asset_id in allAssetIds:
- ft = {"type":"Feature"}
- asst_GI = subtypeResult[str(asset_id)]['GeneralInformation'].copy()
- asst_GI.update({"assetType":asset_type})
+ ft = {'type': 'Feature'}
+ asst_GI = subtypeResult[str(asset_id)][ # noqa: N806
+ 'GeneralInformation'
+ ].copy()
+ asst_GI.update({'assetType': asset_type})
try:
- if "geometry" in asst_GI:
- asst_geom = shapely.wkt.loads(asst_GI["geometry"])
+ if 'geometry' in asst_GI:
+ asst_geom = shapely.wkt.loads(asst_GI['geometry'])
asst_geom = shapely.geometry.mapping(asst_geom)
- asst_GI.pop("geometry")
- elif "Footprint" in asst_GI:
- asst_geom = json.loads(asst_GI["Footprint"])["geometry"]
- asst_GI.pop("Footprint")
+ asst_GI.pop('geometry')
+ elif 'Footprint' in asst_GI:
+ asst_geom = json.loads(asst_GI['Footprint'])['geometry']
+ asst_GI.pop('Footprint')
else:
- #raise ValueError("No valid geometric information in GI.")
+ # raise ValueError("No valid geometric information in GI.")
asst_lat = asst_GI['location']['latitude']
asst_lon = asst_GI['location']['longitude']
- asst_geom = { "type": "Point", "coordinates": [\
- asst_lon, asst_lat]}
- asst_GI.pop("location")
- except:
- warnings.warn(UserWarning(
- f"Geospatial info is missing in {assetSubtype} {asset_id}"))
+ asst_geom = {
+ 'type': 'Point',
+ 'coordinates': [asst_lon, asst_lat],
+ }
+ asst_GI.pop('location')
+ except: # noqa: E722
+ warnings.warn( # noqa: B028
+ UserWarning(
+ f'Geospatial info is missing in {assetSubtype} {asset_id}'
+ )
+ )
continue
- if asst_GI.get("units", None) is not None:
- asst_GI.pop("units")
- ft.update({"geometry":asst_geom})
- ft.update({"properties":asst_GI})
- ft["properties"].update(subtypeResult[str(asset_id)]['R2Dres'])
- geojson_result["features"].append(ft)
- with open(run_path/"R2D_results.geojson", 'w', encoding="utf-8") as f:
+ if asst_GI.get('units', None) is not None:
+ asst_GI.pop('units')
+ ft.update({'geometry': asst_geom})
+ ft.update({'properties': asst_GI})
+ ft['properties'].update(subtypeResult[str(asset_id)]['R2Dres'])
+ geojson_result['features'].append(ft)
+ with open(run_path / 'R2D_results.geojson', 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(geojson_result, f, indent=2)
-
- def combine_assets_results(self, asset_files):
+ def combine_assets_results(self, asset_files): # noqa: D102
asset_types = list(asset_files.keys())
for asset_type in asset_types:
if self.workflow_apps['DL'][asset_type].name != 'Pelicun3':
# isPelicun3 = False
asset_files.pop(asset_type)
- if asset_files: # If any asset_type uses Pelicun3 as DL app
- with open(self.input_file, 'r', encoding="utf-8") as f:
+ if asset_files: # If any asset_type uses Pelicun3 as DL app
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
sample_size = []
- for asset_type, assetIt in asset_files.items():
- sample_size.append(input_data['Applications']['DL'][asset_type]\
- ["ApplicationData"]['Realizations'])
+ for asset_type, assetIt in asset_files.items(): # noqa: B007, N806, PERF102
+ sample_size.append(
+ input_data['Applications']['DL'][asset_type]['ApplicationData'][
+ 'Realizations'
+ ]
+ )
sample_size = min(sample_size)
- ## Create the Results_det.json and Results_rlz_i.json for recoverary
+ # Create the Results_det.json and Results_rlz_i.json for recoverary
deterministic = {}
- realizations = {rlz_i:{} for rlz_i in range(sample_size)}
- for asset_type in asset_files.keys():
- asset_dir = self.run_dir/asset_type
- determine_file = asset_dir/f"{asset_type}_det.json"
- with open(determine_file, 'r', encoding="utf-8") as f:
+ realizations = {rlz_i: {} for rlz_i in range(sample_size)}
+ for asset_type in asset_files.keys(): # noqa: SIM118
+ asset_dir = self.run_dir / asset_type
+ determine_file = asset_dir / f'{asset_type}_det.json'
+ with open(determine_file, encoding='utf-8') as f: # noqa: PTH123
determ_i = json.load(f)
deterministic.update(determ_i)
for rlz_i in range(sample_size):
- rlz_i_file = asset_dir/f"{asset_type}_{rlz_i}.json"
- with open(rlz_i_file, 'r', encoding="utf-8") as f:
+ rlz_i_file = asset_dir / f'{asset_type}_{rlz_i}.json'
+ with open(rlz_i_file, encoding='utf-8') as f: # noqa: PTH123
rlz_i_i = json.load(f)
realizations[rlz_i].update(rlz_i_i)
-
- determine_file = self.run_dir/"Results_det.json"
- with open (determine_file, 'w', encoding="utf-8") as f:
+
+ determine_file = self.run_dir / 'Results_det.json'
+ with open(determine_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(deterministic, f, indent=2)
for rlz_i, rlz_data in realizations.items():
- with open(self.run_dir/f"Results_{rlz_i}.json", 'w', encoding="utf-8") as f:
+ with open( # noqa: PTH123
+ self.run_dir / f'Results_{rlz_i}.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(rlz_data, f, indent=2)
else:
pass
# print("Visualizing results of asset types besides buildings is only supported when Pelicun3 is used as the DL for all asset types")
-
-
-
-
-
diff --git a/modules/common/Units.h b/modules/common/Units.h
index 7fe471f47..2956d291a 100644
--- a/modules/common/Units.h
+++ b/modules/common/Units.h
@@ -43,13 +43,13 @@ TimeUnit ParseTimeUnit(const char* timeUnitString);
// This method parses a string to a force enumerator
ForceUnit ParseForceUnit(const char *forceUnit);
-//This method finds the convertion factor from one length unit to another
+//This method finds the conversion factor from one length unit to another
double GetLengthFactor(UnitSystem& fromUnit, UnitSystem& toUnit);
-//This method finds the convertion factor from one time unit to another
+//This method finds the conversion factor from one time unit to another
double GetTimeFactor(UnitSystem& fromUnit, UnitSystem& toUnit);
-//This method finds the convertion factor from one acceleration unit to another
+//This method finds the conversion factor from one acceleration unit to another
double GetAccelerationFactor(UnitSystem& fromUnit, UnitSystem& toUnit);
// This method finds the conversion factor from one force unit to another
diff --git a/modules/common/simcenter_common.py b/modules/common/simcenter_common.py
index 125b6266a..03f3a1054 100644
--- a/modules/common/simcenter_common.py
+++ b/modules/common/simcenter_common.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -41,22 +40,26 @@
import warnings
from datetime import datetime
+
# Monkeypatch warnings to get prettier messages
-def _warning(message, category, filename, lineno, file=None, line=None):
+def _warning(message, category, filename, lineno, file=None, line=None): # noqa: ARG001
if '\\' in filename:
file_path = filename.split('\\')
elif '/' in filename:
file_path = filename.split('/')
python_file = '/'.join(file_path[-3:])
- print('WARNING in {} at line {}\n{}\n'.format(python_file, lineno, message))
+ print(f'WARNING in {python_file} at line {lineno}\n{message}\n') # noqa: T201
+
+
warnings.showwarning = _warning
-def show_warning(warning_msg):
- warnings.warn(UserWarning(warning_msg))
-def log_msg(msg='', prepend_timestamp=True):
- """
- Print a message to the screen with the current time as prefix
+def show_warning(warning_msg): # noqa: D103
+ warnings.warn(UserWarning(warning_msg)) # noqa: B028
+
+
+def log_msg(msg='', prepend_timestamp=True): # noqa: FBT002
+ """Print a message to the screen with the current time as prefix
The time is in ISO-8601 format, e.g. 2018-06-16T20:24:04Z
@@ -65,74 +68,77 @@ def log_msg(msg='', prepend_timestamp=True):
msg: string
Message to print.
- """
+ """ # noqa: D400
if prepend_timestamp:
- formatted_msg = '{} {}'.format(datetime.utcnow().strftime(
- '%Y-%m-%dT%H:%M:%S:%fZ')[:-4], msg)
+ formatted_msg = '{} {}'.format(
+ datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S:%fZ')[:-4], # noqa: DTZ003
+ msg,
+ )
else:
formatted_msg = msg
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
if globals().get('log_file', None) is not None:
- with open(globals()['log_file'], 'a') as f:
- f.write('\n'+formatted_msg)
+ with open(globals()['log_file'], 'a') as f: # noqa: PLW1514, PTH123
+ f.write('\n' + formatted_msg)
+
# Constants for unit conversion to standard units
unit_types = {
- 'time' : ['sec', 'minute', 'h', 'day'],
- 'length' : ['m', 'mm', 'cm', 'km', 'inch', 'ft', 'mile'],
- 'area' : ['m2', 'mm2', 'cm2', 'km2', 'inch2', 'ft2', 'mile2'],
- 'volume' : ['m3', 'mm3', 'cm3', 'km3', 'inch3', 'ft3', 'mile3'],
- 'speed' : ['cmps', 'mps', 'mph', 'inchps', 'ftps', 'kph', 'fps', 'kts'],
- 'acceleration' : ['mps2', 'cmps2', 'inchps2', 'ftps2', 'g'],
- 'mass' : ['kg', 'ton', 'lb'],
- 'force' : ['N', 'kN', 'lbf', 'kip', 'kips'],
- 'pressure' : ['Pa', 'kPa', 'MPa', 'GPa', 'psi', 'ksi', 'Mpsi']
+ 'time': ['sec', 'minute', 'h', 'day'],
+ 'length': ['m', 'mm', 'cm', 'km', 'inch', 'ft', 'mile'],
+ 'area': ['m2', 'mm2', 'cm2', 'km2', 'inch2', 'ft2', 'mile2'],
+ 'volume': ['m3', 'mm3', 'cm3', 'km3', 'inch3', 'ft3', 'mile3'],
+ 'speed': ['cmps', 'mps', 'mph', 'inchps', 'ftps', 'kph', 'fps', 'kts'],
+ 'acceleration': ['mps2', 'cmps2', 'inchps2', 'ftps2', 'g'],
+ 'mass': ['kg', 'ton', 'lb'],
+ 'force': ['N', 'kN', 'lbf', 'kip', 'kips'],
+ 'pressure': ['Pa', 'kPa', 'MPa', 'GPa', 'psi', 'ksi', 'Mpsi'],
}
# time
-sec = 1.
+sec = 1.0
-minute = 60. * sec
-h = 60. * minute
-day = 24. * h
+minute = 60.0 * sec
+h = 60.0 * minute
+day = 24.0 * h
-sec2 = sec**2.
+sec2 = sec**2.0
# distance, area, volume
-m = 1.
+m = 1.0
mm = 0.001 * m
cm = 0.01 * m
-km = 1000. * m
+km = 1000.0 * m
inch = 0.0254
-ft = 12. * inch
-mile = 5280. * ft
+ft = 12.0 * inch
+mile = 5280.0 * ft
# area
-m2 = m**2.
+m2 = m**2.0
-mm2 = mm**2.
-cm2 = cm**2.
-km2 = km**2.
+mm2 = mm**2.0
+cm2 = cm**2.0
+km2 = km**2.0
-inch2 = inch**2.
-ft2 = ft**2.
-mile2 = mile**2.
+inch2 = inch**2.0
+ft2 = ft**2.0
+mile2 = mile**2.0
# volume
-m3 = m**3.
+m3 = m**3.0
-mm3 = mm**3.
-cm3 = cm**3.
-km3 = km**3.
+mm3 = mm**3.0
+cm3 = cm**3.0
+km3 = km**3.0
-inch3 = inch**3.
-ft3 = ft**3.
-mile3 = mile**3.
+inch3 = inch**3.0
+ft3 = ft**3.0
+mile3 = mile**3.0
# speed / velocity
cmps = cm / sec
@@ -154,25 +160,25 @@ def log_msg(msg='', prepend_timestamp=True):
g = 9.80665 * mps2
# mass
-kg = 1.
+kg = 1.0
-ton = 1000. * kg
+ton = 1000.0 * kg
lb = 0.453592 * kg
# force
N = kg * m / sec2
-kN = 1e3 * N
+kN = 1e3 * N # noqa: N816
lbf = lb * g
-kip = 1000. * lbf
+kip = 1000.0 * lbf
kips = kip
# pressure / stress
Pa = N / m2
-kPa = 1e3 * Pa
+kPa = 1e3 * Pa # noqa: N816
MPa = 1e6 * Pa
GPa = 1e9 * Pa
@@ -182,14 +188,14 @@ def log_msg(msg='', prepend_timestamp=True):
# KZ: unit bases decouple
unit_bases = {
- 'm2' :{'length': 'm'},
+ 'm2': {'length': 'm'},
'mm2': {'length': 'mm'},
'cm2': {'length': 'cm'},
'km2': {'length': 'km'},
'inch2': {'length': 'in'},
'ft2': {'length': 'ft'},
'mile2': {'length': 'mile'},
- 'm3' :{'length': 'm'},
+ 'm3': {'length': 'm'},
'mm3': {'length': 'mm'},
'cm3': {'length': 'cm'},
'km3': {'length': 'km'},
@@ -205,23 +211,18 @@ def log_msg(msg='', prepend_timestamp=True):
'cmps2': {'length': 'cm', 'time': 'sec'},
'inchps2': {'length': 'in', 'time': 'sec'},
'ftps2': {'length': 'ft', 'time': 'sec'},
- 'g': {}
+ 'g': {},
}
unit_decoupling_type_list = ['TH_file']
-def get_scale_factors(input_units, output_units):
- """
- Determine the scale factor to convert input event to internal event data
-
- """
+def get_scale_factors(input_units, output_units): # noqa: C901
+ """Determine the scale factor to convert input event to internal event data""" # noqa: D400
# special case: if the input unit is not specified then do not do any scaling
if input_units is None:
-
scale_factors = {'ALL': 1.0}
else:
-
# parse output units:
# if no length unit is specified, 'inch' is assumed
@@ -230,34 +231,29 @@ def get_scale_factors(input_units, output_units):
unit_length = 'inch'
f_length = globals().get(unit_length, None)
if f_length is None:
- raise ValueError(
- f"Specified length unit not recognized: {unit_length}")
+ raise ValueError(f'Specified length unit not recognized: {unit_length}') # noqa: DOC501, EM102, TRY003
# if no time unit is specified, 'sec' is assumed
unit_time = output_units.get('time', 'sec')
f_time = globals().get(unit_time, None)
if f_time is None:
- raise ValueError(
- f"Specified time unit not recognized: {unit_time}")
+ raise ValueError(f'Specified time unit not recognized: {unit_time}') # noqa: DOC501, EM102, TRY003
scale_factors = {}
for input_name, input_unit in input_units.items():
-
# exceptions
- if input_name in ['factor', ]:
+ if input_name == 'factor':
f_scale = 1.0
else:
-
# get the scale factor to standard units
if input_unit == 'in':
- input_unit = 'inch'
+ input_unit = 'inch' # noqa: PLW2901
f_in = globals().get(input_unit, None)
if f_in is None:
- raise ValueError(
- f"Input unit not recognized: {input_unit}")
+ raise ValueError(f'Input unit not recognized: {input_unit}') # noqa: DOC501, EM102, TRY003
unit_type = None
for base_unit_type, unit_set in globals()['unit_types'].items():
@@ -265,11 +261,11 @@ def get_scale_factors(input_units, output_units):
unit_type = base_unit_type
if unit_type is None:
- raise ValueError(f"Failed to identify unit type: {input_unit}")
+ raise ValueError(f'Failed to identify unit type: {input_unit}') # noqa: DOC501, EM102, TRY003
# the output unit depends on the unit type
if unit_type == 'acceleration':
- f_out = f_time ** 2.0 / f_length
+ f_out = f_time**2.0 / f_length
elif unit_type == 'speed':
f_out = f_time / f_length
@@ -278,7 +274,9 @@ def get_scale_factors(input_units, output_units):
f_out = 1.0 / f_length
else:
- raise ValueError(f"Unexpected unit type in workflow: {unit_type}")
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Unexpected unit type in workflow: {unit_type}' # noqa: EM102
+ )
# the scale factor is the product of input and output scaling
f_scale = f_in * f_out
@@ -289,14 +287,9 @@ def get_scale_factors(input_units, output_units):
def get_unit_bases(input_units):
- """
- Decouple input units
-
- """
-
+ """Decouple input units""" # noqa: D400
# special case: if the input unit is not specified then do nothing
- if input_units is None:
-
+ if input_units is None: # noqa: PLR1702
input_unit_bases = {}
else:
@@ -304,9 +297,7 @@ def get_unit_bases(input_units):
unit_bases_dict = globals()['unit_bases']
for unit_type, input_unit in input_units.items():
if unit_type in globals()['unit_decoupling_type_list']:
- cur_unit_bases = {"length": "m",
- "force": "N",
- "time": "sec"}
+ cur_unit_bases = {'length': 'm', 'force': 'N', 'time': 'sec'}
for unit_name, unit_bases in unit_bases_dict.items():
if unit_name == input_unit:
for x, y in unit_bases.items():
diff --git a/modules/createAIM/CSV_to_AIM/CSV_to_AIM.py b/modules/createAIM/CSV_to_AIM/CSV_to_AIM.py
index 9b1047a39..d5612e78b 100644
--- a/modules/createAIM/CSV_to_AIM/CSV_to_AIM.py
+++ b/modules/createAIM/CSV_to_AIM/CSV_to_AIM.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -40,158 +39,166 @@
# Wael Elhaddad
# Stevan Gavrilovic
-import argparse, sys, os
+import argparse
+import os
+import sys
-def create_asset_files(output_file, asset_source_file, asset_filter, doParallel):
+def create_asset_files(output_file, asset_source_file, asset_filter, doParallel): # noqa: C901, N803, D103
# these imports are here to save time when the app is called without
# the -getRV flag
- import json
- import numpy as np
- import pandas as pd
- import importlib
+ import importlib # noqa: PLC0415
+ import json # noqa: PLC0415
+
+ import numpy as np # noqa: PLC0415
+ import pandas as pd # noqa: PLC0415
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
# Get the out dir, may not always be in the results folder if multiple assets are used
- outDir = os.path.dirname(output_file)
-
+ outDir = os.path.dirname(output_file) # noqa: PTH120, N806
+
# check if a filter is provided
if asset_filter is not None:
assets_requested = []
for assets in asset_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- assets_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ assets_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
assets_requested.append(int(assets))
assets_requested = np.array(assets_requested)
-
+
# load the CSV file with the asset information
assets_df = pd.read_csv(asset_source_file, header=0, index_col=0)
# if there is a filter, then pull out only the required assets
if asset_filter is not None:
- assets_available = assets_df.index.values
+ assets_available = assets_df.index.values # noqa: PD011
assets_to_run = assets_requested[
- np.where(np.in1d(assets_requested, assets_available))[0]]
+ np.where(np.isin(assets_requested, assets_available))[0]
+ ]
selected_assets = assets_df.loc[assets_to_run]
else:
selected_assets = assets_df
# identify the labels
- labels = selected_assets.columns.values
+ labels = selected_assets.columns.values # noqa: PD011
assets_array = []
# for each asset...
count = 0
for asset_id, asset in selected_assets.iterrows():
-
- if runParallel == False or (count % numP) == procID:
-
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = str(int(asset_id)),
- location = {
- 'latitude': asset["Latitude"],
- 'longitude': asset["Longitude"]
- }
- )
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=str(int(asset_id)),
+ location={
+ 'latitude': asset['Latitude'],
+ 'longitude': asset['Longitude'],
+ },
+ ),
}
# save every label as-is
for label in labels:
- AIM_i["GeneralInformation"].update({label: asset[label]})
+ AIM_i['GeneralInformation'].update({label: asset[label]})
+
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
- count = count + 1
+ count = count + 1 # noqa: PLR6104
if procID != 0:
-
# if not P0, write data to output file with procID in name and barrier
- output_file = os.path.join(outDir,f'tmp_{procID}.json')
+ output_file = os.path.join(outDir, f'tmp_{procID}.json') # noqa: PTH118
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=0)
-
- comm.Barrier()
-
- else:
- if runParallel == True:
+ comm.Barrier()
+ else:
+ if runParallel == True: # noqa: E712
# if parallel & P0, barrier so that all files written above, then loop over other processor files: open, load data and append
- comm.Barrier()
+ comm.Barrier()
for i in range(1, numP):
- fileToAppend = os.path.join(outDir,f'tmp_{i}.json')
- with open(fileToAppend, 'r', encoding="utf-8") as data_file:
+ fileToAppend = os.path.join(outDir, f'tmp_{i}.json') # noqa: PTH118, N806
+ with open(fileToAppend, encoding='utf-8') as data_file: # noqa: FURB101, PTH123
json_data = data_file.read()
- assetsToAppend = json.loads(json_data)
+ assetsToAppend = json.loads(json_data) # noqa: N806
assets_array += assetsToAppend
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=2)
-
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
- parser.add_argument('--assetFile',
- help = "Path to the file that will contain a list of asset ids and "
- "corresponding AIM filenames")
- parser.add_argument('--assetSourceFile',
- help = "Path to the CSV file with the asset inventory")
- parser.add_argument('--filter',
- help = "Filter applied to select a subset of assets from the "
- "inventory",
- default=None)
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpiexec')
- parser.add_argument('--getRV',
- help = "Identifies the preparational stage of the workflow. This app "
- "is only used in that stage, so it does not do anything if "
- "called without this flag.",
+ parser.add_argument(
+ '--assetFile',
+ help='Path to the file that will contain a list of asset ids and '
+ 'corresponding AIM filenames',
+ )
+ parser.add_argument(
+ '--assetSourceFile', help='Path to the CSV file with the asset inventory'
+ )
+ parser.add_argument(
+ '--filter',
+ help='Filter applied to select a subset of assets from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
+ parser.add_argument(
+ '--getRV',
+ help='Identifies the preparational stage of the workflow. This app '
+ 'is only used in that stage, so it does not do anything if '
+ 'called without this flag.',
default=False,
- nargs='?', const=True)
+ nargs='?',
+ const=True,
+ )
args = parser.parse_args()
if args.getRV:
- sys.exit(create_asset_files(args.assetFile, args.assetSourceFile, args.filter, args.doParallel))
+ sys.exit(
+ create_asset_files(
+ args.assetFile, args.assetSourceFile, args.filter, args.doParallel
+ )
+ )
else:
- pass # not used
+ pass # not used
diff --git a/modules/createAIM/GeoJSON_to_ASSET/GeoJSON_to_ASSET.py b/modules/createAIM/GeoJSON_to_ASSET/GeoJSON_to_ASSET.py
index f00809d71..af8ec317d 100644
--- a/modules/createAIM/GeoJSON_to_ASSET/GeoJSON_to_ASSET.py
+++ b/modules/createAIM/GeoJSON_to_ASSET/GeoJSON_to_ASSET.py
@@ -1,123 +1,147 @@
-import os, sys, argparse, posixpath, ntpath, json,importlib
+import argparse # noqa: CPY001, D100, INP001
+import importlib
+import json
+import os
+import posixpath
+import shutil
+import sys
+import warnings
+
+import geopandas as gpd
+import momepy
import numpy as np
import pandas as pd
-import geopandas as gpd
-import shapely, warnings, momepy, shutil
+import shapely
+
# https://stackoverflow.com/questions/50916422/python-typeerror-object-of-type-int64-is-not-json-serializable
-class NpEncoder(json.JSONEncoder):
- def default(self, obj):
+class NpEncoder(json.JSONEncoder): # noqa: D101
+ def default(self, obj): # noqa: D102
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
- return super(NpEncoder, self).default(obj)
-
+ return super(NpEncoder, self).default(obj) # noqa: UP008
+
+
class generalAIMGenerator:
- '''
- The generator of general AIM such as buildings, bridges, tunnels
+ """The generator of general AIM such as buildings, bridges, tunnels
:param : The arg is used for ...
:type arg: str
:param `*args`: The variable arguments are used for ...
:param `**kwargs`: The keyword arguments are used for ...
:ivar arg: This is where we store arg
:vartype arg: str
- '''
+ """ # noqa: D205, D400
+
def __init__(self, output_file):
self.output_file = output_file
self.gdf = None
self.filter = None
- def load_asset_gdf(self, source_file):
+
+ def load_asset_gdf(self, source_file): # noqa: D102
asset_gdf = gpd.read_file(source_file)
self.gdf = asset_gdf
- def set_asset_gdf(self, asset_gdf):
+
+ def set_asset_gdf(self, asset_gdf): # noqa: D102
self.gdf = asset_gdf
- def selectAssets(self, filter):
+
+ def selectAssets(self, filter): # noqa: A002, N802, D102
self.filter = filter
- # check if a filter is provided for bridges
+ # check if a filter is provided for bridges
if self.filter is not None:
- asset_requested = []
+ asset_requested = [] # noqa: F841
for assets in self.filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- assets_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ assets_requested += list( # noqa: F821
+ range(int(asset_low), int(asset_high) + 1)
+ )
else:
assets_requested.append(int(assets))
assets_requested = np.array(assets_requested)
- assets_available = self.gdf.index.values
+ assets_available = self.gdf.index.values # noqa: PD011
assets_to_run = assets_requested[
- np.where(np.in1d(assets_requested, assets_available))[0]]
+ np.where(np.isin(assets_requested, assets_available))[0]
+ ]
else:
- assets_to_run = self.gdf.index.values
- self.gdf = self.gdf.loc[assets_to_run,:]
+ assets_to_run = self.gdf.index.values # noqa: PD011
+ self.gdf = self.gdf.loc[assets_to_run, :]
return assets_to_run
- def createAIM(self, asset_idx, component_type = None):
+
+ def createAIM(self, asset_idx, component_type=None): # noqa: ARG002, N802, D102
# initialize the AIM file
# if component_type is not None:
# asset_id = component_type+"_"+str(asset_idx)
# else:
# asset_id = str(asset_idx)
asset_id = asset_idx
- asset = self.gdf.loc[asset_idx,:]
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = str(asset_id),
- location = {
- 'latitude': asset["geometry"].centroid.coords[0][1],
- 'longitude': asset["geometry"].centroid.coords[0][0]
- }
- )
+ asset = self.gdf.loc[asset_idx, :]
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=str(asset_id),
+ location={
+ 'latitude': asset['geometry'].centroid.coords[0][1],
+ 'longitude': asset['geometry'].centroid.coords[0][0],
+ },
+ ),
}
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
- AIM_i["GeneralInformation"]['geometry'] = AIM_i["GeneralInformation"]['geometry'].wkt
+ AIM_i['GeneralInformation'].update(asset)
+ AIM_i['GeneralInformation']['geometry'] = AIM_i['GeneralInformation'][
+ 'geometry'
+ ].wkt
# if component_type is not None:
# AIM_i["GeneralInformation"].update({"assetSubtype":component_type})
return AIM_i
- def dumpAIM(self, AIM_i):
+
+ def dumpAIM(self, AIM_i): # noqa: N802, N803, D102
# assetSubtype = AIM_i['GeneralInformation'].get("assetSubtype", None)
- componentType = AIM_i['GeneralInformation'].get("type", None)
- outDir = os.path.dirname(self.output_file)
+ componentType = AIM_i['GeneralInformation'].get('type', None) # noqa: N806
+ outDir = os.path.dirname(self.output_file) # noqa: PTH120, N806
if componentType:
- outDir = os.path.join(outDir, componentType)
- asset_id = AIM_i["GeneralInformation"]["AIM_id"]
- AIM_file_name = "{}-AIM.json".format(asset_id)
- AIM_file_name = os.path.join(outDir,AIM_file_name)
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ outDir = os.path.join(outDir, componentType) # noqa: PTH118, N806
+ asset_id = AIM_i['GeneralInformation']['AIM_id']
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2, cls=NpEncoder)
return AIM_file_name
-class lineAIMGenerator(generalAIMGenerator):
- def breakDownLongLines(self, delta, tolerance = 10e-3):
+
+class lineAIMGenerator(generalAIMGenerator): # noqa: D101
+ def breakDownLongLines(self, delta, tolerance=10e-3): # noqa: N802, D102
edges = self.gdf
- dropedEdges = []
- newEdges = []
+ dropedEdges = [] # noqa: N806
+ newEdges = [] # noqa: N806
crs = edges.crs
- edgesOrig = edges.copy()
+ edgesOrig = edges.copy() # noqa: N806
# edgesOrig["IDbase"] = edgesOrig["OID"].apply(lambda x: x.split('_')[0])
- edgesOrig["IDbase"] = edgesOrig.index
- num_segExistingMap = edgesOrig.groupby("IDbase").count().iloc[:,0].to_dict()
- edges_dict = edges.reset_index().to_crs("epsg:6500")
+ edgesOrig['IDbase'] = edgesOrig.index
+ num_segExistingMap = edgesOrig.groupby('IDbase').count().iloc[:, 0].to_dict() # noqa: N806
+ edges_dict = edges.reset_index().to_crs('epsg:6500')
edges_dict = edges_dict.to_dict(orient='records')
for row_ind in range(len(edges_dict)):
- LS = edges_dict[row_ind]["geometry"]
- num_seg = int(np.ceil(LS.length/delta))
+ LS = edges_dict[row_ind]['geometry'] # noqa: N806
+ num_seg = int(np.ceil(LS.length / delta))
if num_seg == 1:
continue
- distances = np.linspace(0, LS.length, num_seg+1)
- points = shapely.MultiPoint([LS.interpolate(distance) for distance in \
- distances[:-1]] + [LS.coords[-1]])
- LS = shapely.ops.snap(LS, points, tolerance)
- with warnings.catch_warnings(): #Suppress the warning of points not on
+ distances = np.linspace(0, LS.length, num_seg + 1)
+ points = shapely.MultiPoint(
+ [LS.interpolate(distance) for distance in distances[:-1]]
+ + [LS.coords[-1]]
+ )
+ LS = shapely.ops.snap(LS, points, tolerance) # noqa: N806
+ with warnings.catch_warnings(): # Suppress the warning of points not on
# LS. Shaply will first project the point to the line and then split
- warnings.simplefilter("ignore")
- splittedLS = shapely.ops.split(LS,points).geoms
- currentEdge = edges_dict[row_ind].copy()
- num_segExisting = num_segExistingMap[currentEdge["id"]]
- for sLS_ind, sLS in enumerate(splittedLS):
+ warnings.simplefilter('ignore')
+ splittedLS = shapely.ops.split(LS, points).geoms # noqa: N806
+ currentEdge = edges_dict[row_ind].copy() # noqa: N806
+ num_segExisting = num_segExistingMap[currentEdge['id']] # noqa: N806, F841
+ for sLS_ind, sLS in enumerate(splittedLS): # noqa: N806
# create new edge
# if sLS_ind ==0:
# newID = currentEdge["id"]
@@ -125,178 +149,212 @@ def breakDownLongLines(self, delta, tolerance = 10e-3):
# newID = currentEdge["id"]+"_"+str(num_segExisting)
# num_segExisting +=1
# num_segExistingMap[currentEdge["id"]] += 1
- newID = currentEdge["id"]
- newGeom = sLS
- newEdge = currentEdge.copy()
- newEdge.update({"id":newID,"geometry":newGeom,\
- "segID":sLS_ind})
+ newID = currentEdge['id'] # noqa: N806
+ newGeom = sLS # noqa: N806
+ newEdge = currentEdge.copy() # noqa: N806
+ newEdge.update({'id': newID, 'geometry': newGeom, 'segID': sLS_ind})
newEdges.append(newEdge)
- dropedEdges.append(edges_dict[row_ind]["id"])
+ dropedEdges.append(edges_dict[row_ind]['id'])
edges = edges.drop(dropedEdges)
- edges = edges.reset_index() # Convert "id" from index into a column
- if len(newEdges)>0:
- edges["segID"] = 0
- newEdges = gpd.GeoDataFrame(newEdges, crs="epsg:6500").to_crs(crs)
+ edges = edges.reset_index() # Convert "id" from index into a column
+ if len(newEdges) > 0:
+ edges['segID'] = 0
+ newEdges = gpd.GeoDataFrame(newEdges, crs='epsg:6500').to_crs(crs) # noqa: N806
edges = pd.concat([edges, newEdges], ignore_index=True)
- edges = edges.sort_values(['id','segID'])
- edges = edges.reset_index().rename(columns = {
- "id":"idBeforeSegment","index":"id"}).drop(columns = ["segID"])
+ edges = edges.sort_values(['id', 'segID'])
+ edges = (
+ edges.reset_index()
+ .rename(columns={'id': 'idBeforeSegment', 'index': 'id'})
+ .drop(columns=['segID'])
+ )
# self.gdf = edges.reset_index().rename(columns={"index":"AIM_id"})
self.gdf = edges
- return
- def defineConnectivities(self, AIM_id_prefix = None, edges_file_name = None,\
- nodes_file_name = None):
+
+ def defineConnectivities( # noqa: N802, D102
+ self,
+ AIM_id_prefix=None, # noqa: N803
+ edges_file_name=None,
+ nodes_file_name=None,
+ ):
# Convert find connectivity and add start_node, end_node attributes
edges = self.gdf
datacrs = edges.crs
- graph = momepy.gdf_to_nx(edges.to_crs("epsg:6500"), approach='primal')
- with warnings.catch_warnings(): #Suppress the warning of disconnected components in the graph
- warnings.simplefilter("ignore")
- nodes, edges, sw = momepy.nx_to_gdf(graph, points=True, lines=True,
- spatial_weights=True)
+ graph = momepy.gdf_to_nx(edges.to_crs('epsg:6500'), approach='primal')
+ with warnings.catch_warnings(): # Suppress the warning of disconnected components in the graph
+ warnings.simplefilter('ignore')
+ nodes, edges, sw = momepy.nx_to_gdf( # noqa: F841
+ graph, points=True, lines=True, spatial_weights=True
+ )
# edges = edges.set_index('ind')
- ### Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
+ # Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
for ind in edges.index:
- start = nodes.loc[edges.loc[ind, "node_start"],"geometry"]
- end = nodes.loc[edges.loc[ind, "node_end"],"geometry"]
- first = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[0])
- last = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[-1])
- #check if first and last are the same
- if (start == first and end == last):
+ start = nodes.loc[edges.loc[ind, 'node_start'], 'geometry']
+ end = nodes.loc[edges.loc[ind, 'node_end'], 'geometry']
+ first = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[0])
+ last = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[-1])
+ # check if first and last are the same
+ if start == first and end == last:
continue
- elif (start == last and end == first):
- newStartID = edges.loc[ind, "node_end"]
- newEndID = edges.loc[ind, "node_start"]
- edges.loc[ind,"node_start"] = newStartID
- edges.loc[ind,"node_end"] = newEndID
+ elif start == last and end == first: # noqa: RET507
+ newStartID = edges.loc[ind, 'node_end'] # noqa: N806
+ newEndID = edges.loc[ind, 'node_start'] # noqa: N806
+ edges.loc[ind, 'node_start'] = newStartID
+ edges.loc[ind, 'node_end'] = newEndID
else:
- print(ind, "th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function")
+ print( # noqa: T201
+ ind,
+ 'th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function',
+ )
# locationGS = gpd.GeoSeries(edges["geometry"].apply(lambda x: x.centroid),crs = edges.crs).to_crs(datacrs)
- edges = edges.drop("mm_len", axis = 1).rename(columns={"node_start":\
- "StartNode", "node_end":"EndNode"}).to_crs(datacrs)
+ edges = (
+ edges.drop('mm_len', axis=1)
+ .rename(columns={'node_start': 'StartNode', 'node_end': 'EndNode'})
+ .to_crs(datacrs)
+ )
# edges["location_lon"] = locationGS.apply(lambda x:x.x)
# edges["location_lat"] = locationGS.apply(lambda x:x.y)
- edges = edges.rename(columns={"id":"AIM_id"})
+ edges = edges.rename(columns={'id': 'AIM_id'})
if AIM_id_prefix is not None:
- edges["AIM_id"] = edges["AIM_id"].apply(lambda x:AIM_id_prefix+"_"+str(x))
- outDir = os.path.dirname(self.output_file)
+ edges['AIM_id'] = edges['AIM_id'].apply(
+ lambda x: AIM_id_prefix + '_' + str(x)
+ )
+ outDir = os.path.dirname(self.output_file) # noqa: PTH120, N806
if edges_file_name is not None:
- edges.to_file(os.path.join(outDir,f"{edges_file_name}.geojson"),\
- driver = "GeoJSON")
+ edges.to_file(
+ os.path.join(outDir, f'{edges_file_name}.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
if nodes_file_name is not None:
- nodesNeeded = list(set(edges["StartNode"].values.tolist() +\
- edges["EndNode"].values.tolist()))
- nodes = nodes.loc[nodesNeeded,:]
- nodes = nodes.to_crs(datacrs)[["nodeID","geometry"]]
- nodes.to_file(os.path.join(outDir,f"{nodes_file_name}.geojson"),\
- driver = "GeoJSON")
+ nodesNeeded = list( # noqa: N806
+ set(
+ edges['StartNode'].values.tolist() # noqa: PD011
+ + edges['EndNode'].values.tolist() # noqa: PD011
+ )
+ )
+ nodes = nodes.loc[nodesNeeded, :]
+ nodes = nodes.to_crs(datacrs)[['nodeID', 'geometry']]
+ nodes.to_file(
+ os.path.join(outDir, f'{nodes_file_name}.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
self.gdf = edges
- return
-def split_and_select_components(input_config, asset_source_file):
- component_dict = dict()
- with open(asset_source_file, 'r', encoding="utf-8") as f:
+
+def split_and_select_components(input_config, asset_source_file): # noqa: C901, D103
+ component_dict = dict() # noqa: C408
+ with open(asset_source_file, encoding='utf-8') as f: # noqa: PTH123
source_data = json.load(f)
- crs = source_data["crs"]
- featureList = source_data["features"]
- requested_dict = dict()
+ crs = source_data['crs']
+ featureList = source_data['features'] # noqa: N806
+ requested_dict = dict() # noqa: C408
for key, value in input_config.items():
if isinstance(value, dict):
- filterString = value.get('filter', None)
+ filterString = value.get('filter', None) # noqa: N806
if filterString is None:
continue
assets_requested = []
- if filterString == '':
+ if filterString == '': # noqa: PLC1901
assets_requested = np.array(assets_requested)
- requested_dict.update({key:assets_requested})
- component_dict.update({key:[]})
+ requested_dict.update({key: assets_requested})
+ component_dict.update({key: []})
else:
for assets in filterString.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- assets_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ assets_requested += list(
+ range(int(asset_low), int(asset_high) + 1)
+ )
else:
assets_requested.append(int(assets))
assets_requested = np.array(assets_requested)
- requested_dict.update({key:assets_requested})
- component_dict.update({key:[]})
+ requested_dict.update({key: assets_requested})
+ component_dict.update({key: []})
for feat in featureList:
- component_type = feat["properties"].get("type", None)
- if (component_type in component_dict.keys()):
- feat_id = int(feat["id"])
+ component_type = feat['properties'].get('type', None)
+ if component_type in component_dict:
+ feat_id = int(feat['id'])
if requested_dict[component_type].size == 0:
component_dict.pop(component_type)
continue
- if (feat_id in requested_dict[component_type]):
- feat["properties"].update({"id":feat_id})
+ if feat_id in requested_dict[component_type]:
+ feat['properties'].update({'id': feat_id})
component_dict[component_type].append(feat)
- for component in component_dict.keys():
- component_dict[component] = gpd.GeoDataFrame.from_features(\
- component_dict[component],crs=crs["properties"]["name"])\
- .set_index('id')
+ for component in component_dict: # noqa: PLC0206
+ component_dict[component] = gpd.GeoDataFrame.from_features(
+ component_dict[component], crs=crs['properties']['name']
+ ).set_index('id')
return component_dict
-def init_workdir(component_dict, outDir):
+
+
+def init_workdir(component_dict, outDir): # noqa: N803, D103
os.chdir(outDir)
for dir_or_file in os.listdir(outDir):
if dir_or_file != 'log.txt':
- if os.path.isdir(dir_or_file):
+ if os.path.isdir(dir_or_file): # noqa: PTH112
shutil.rmtree(dir_or_file)
else:
- os.remove(dir_or_file)
- component_dir = dict()
- for comp in component_dict.keys():
- compDir = posixpath.join(outDir, comp)
- os.mkdir(compDir)
- component_dir.update({comp:compDir})
+ os.remove(dir_or_file) # noqa: PTH107
+ component_dir = dict() # noqa: C408
+ for comp in component_dict.keys(): # noqa: SIM118
+ compDir = posixpath.join(outDir, comp) # noqa: N806
+ os.mkdir(compDir) # noqa: PTH102
+ component_dir.update({comp: compDir})
return component_dir
-
-def create_asset_files(output_file, asset_source_file,
- asset_type, input_file, doParallel):
+
+
+def create_asset_files( # noqa: C901, D103
+ output_file,
+ asset_source_file,
+ asset_type,
+ input_file,
+ doParallel, # noqa: N803
+):
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
- outDir = os.path.dirname(output_file)
-
- with open(input_file, 'r', encoding="utf-8") as f:
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ outDir = os.path.dirname(output_file) # noqa: PTH120, N806
+
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
- input_config = input_data["Applications"]["Assets"][asset_type]\
- ["ApplicationData"]
+ input_config = input_data['Applications']['Assets'][asset_type][
+ 'ApplicationData'
+ ]
# if input_config.get("Roadway", None):
# roadSegLength = float(input_config['Roadway'].get('maxRoadLength_m', "100000"))
# assetSourceFile passed through command may be different from input_config when run on designsafe
component_dict = split_and_select_components(input_config, asset_source_file)
- component_dir = init_workdir(component_dict, outDir)
+ component_dir = init_workdir(component_dict, outDir) # noqa: F841
assets_array = []
for component_type, component_data in component_dict.items():
- geom_type = type(component_data['geometry'].values[0])
- if geom_type in [shapely.Point, shapely.Polygon]:
- # if component_type in ["HwyBridge", "HwyTunnel"]:
- AIMgenerator = generalAIMGenerator(output_file)
+ geom_type = type(component_data['geometry'].values[0]) # noqa: PD011
+ if geom_type in [shapely.Point, shapely.Polygon]: # noqa: PLR6201
+ # if component_type in ["HwyBridge", "HwyTunnel"]:
+ AIMgenerator = generalAIMGenerator(output_file) # noqa: N806
AIMgenerator.set_asset_gdf(component_data)
- selected_Asset_idxs = AIMgenerator.selectAssets(None)
+ selected_Asset_idxs = AIMgenerator.selectAssets(None) # noqa: N806
# elif component_type in ["Roadway"]:
- elif geom_type in [shapely.LineString]:
- AIMgenerator = lineAIMGenerator(output_file)
+ elif geom_type == shapely.LineString:
+ AIMgenerator = lineAIMGenerator(output_file) # noqa: N806
AIMgenerator.set_asset_gdf(component_data)
- selected_Asset_idxs = AIMgenerator.selectAssets(None)
+ selected_Asset_idxs = AIMgenerator.selectAssets(None) # noqa: N806
# AIMgenerator.breakDownLongLines(roadSegLength)
# # AIMgenerator.defineConnectivities(None, "hwy_edges",\
# # "hwy_nodes")
@@ -304,56 +362,68 @@ def create_asset_files(output_file, asset_source_file,
# # Run this to select all assets
# selected_Asset_idxs = AIMgenerator.selectAssets(None)
else:
- sys.exit((f"The geometry type {geom_type} defined for the") + \
- (f"components {component_type} is not supported in ")+\
- (f"the assets {asset_type}"))
+ sys.exit(
+ (f'The geometry type {geom_type} defined for the') # noqa: ISC003
+ + (f'components {component_type} is not supported in ')
+ + (f'the assets {asset_type}')
+ )
# for each asset...
count = 0
for asset_idx in selected_Asset_idxs:
- if runParallel == False or (count % numP) == procID:
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- AIM_i = AIMgenerator.createAIM(asset_idx, component_type)
- AIM_file_name = AIMgenerator.dumpAIM(AIM_i)
- assets_array.append(dict(id=AIM_i['GeneralInformation']['AIM_id'], file=AIM_file_name))
- count = count + 1
+ AIM_i = AIMgenerator.createAIM(asset_idx, component_type) # noqa: N806
+ AIM_file_name = AIMgenerator.dumpAIM(AIM_i) # noqa: N806
+ assets_array.append(
+ dict( # noqa: C408
+ id=AIM_i['GeneralInformation']['AIM_id'], file=AIM_file_name
+ )
+ )
+ count = count + 1 # noqa: PLR6104
if procID != 0:
- # if not P0, write data to output file with procID in name and barrier
- output_file_p = os.path.join(outDir,f'tmp_{procID}.json')
- with open(output_file_p, 'w', encoding="utf-8") as f:
+ # if not P0, write data to output file with procID in name and barrier
+ output_file_p = os.path.join(outDir, f'tmp_{procID}.json') # noqa: PTH118
+ with open(output_file_p, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=0)
- comm.Barrier()
+ comm.Barrier()
else:
- if runParallel == True:
+ if runParallel == True: # noqa: E712
# if parallel & P0, barrier so that all files written above, then loop over other processor files: open, load data and append
- comm.Barrier()
+ comm.Barrier()
for i in range(1, numP):
- fileToAppend = os.path.join(outDir,f'tmp_{i}.json')
- with open(fileToAppend, 'r', encoding="utf-8") as data_file:
+ fileToAppend = os.path.join(outDir, f'tmp_{i}.json') # noqa: PTH118, N806
+ with open(fileToAppend, encoding='utf-8') as data_file: # noqa: FURB101, PTH123
json_data = data_file.read()
- assetsToAppend = json.loads(json_data)
+ assetsToAppend = json.loads(json_data) # noqa: N806
assets_array += assetsToAppend
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=2, cls=NpEncoder)
# else:
# print(f"The asset_type {asset_type} is not one of Buildings, TransportationNetwork or WaterNetwork, and is currently not supported")
# sys.exit(1)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--assetFile')
parser.add_argument('--assetSourceFile')
parser.add_argument('--assetType')
parser.add_argument('--inputJsonFile')
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpiexec')
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
if args.getRV:
- sys.exit(create_asset_files(args.assetFile, args.assetSourceFile,\
- args.assetType,\
- args.inputJsonFile, args.doParallel))
+ sys.exit(
+ create_asset_files(
+ args.assetFile,
+ args.assetSourceFile,
+ args.assetType,
+ args.inputJsonFile,
+ args.doParallel,
+ )
+ )
else:
- pass # not used
\ No newline at end of file
+ pass # not used
diff --git a/modules/createAIM/GeoJSON_to_BIM/GeoJSON_to_BIM.py b/modules/createAIM/GeoJSON_to_BIM/GeoJSON_to_BIM.py
index 807a2adce..e92a10495 100644
--- a/modules/createAIM/GeoJSON_to_BIM/GeoJSON_to_BIM.py
+++ b/modules/createAIM/GeoJSON_to_BIM/GeoJSON_to_BIM.py
@@ -1,8 +1,9 @@
-import os, sys, argparse, posixpath, ntpath, json
+import argparse # noqa: CPY001, D100, INP001
+import json
+import sys
-def create_building_files(output_file, building_source_file,
- min_id, max_id):
+def create_building_files(output_file, building_source_file, min_id, max_id): # noqa: D103
# check if the min and max values are provided in the right order
if (min_id is not None) and (max_id is not None):
if min_id > max_id:
@@ -10,44 +11,42 @@ def create_building_files(output_file, building_source_file,
min_id = max_id
max_id = tmp
- with open(building_source_file, 'r', encoding="utf-8") as f:
- building_source_list = json.load(f)["features"]
+ with open(building_source_file, encoding='utf-8') as f: # noqa: PTH123
+ building_source_list = json.load(f)['features']
buildings_array = []
for bldg_src in building_source_list:
- bldg_id = int(bldg_src["id"])
+ bldg_id = int(bldg_src['id'])
- if (((min_id is not None) and (bldg_id < min_id)) or
- ((max_id is not None) and (bldg_id > max_id))):
+ if ((min_id is not None) and (bldg_id < min_id)) or (
+ (max_id is not None) and (bldg_id > max_id)
+ ):
continue
bldg_loc = bldg_src['geometry']['coordinates']
- BIM_i = {
- "RandomVariables": [],
- "GI": dict(
- BIM_id = str(bldg_id),
- location = {
- 'latitude': bldg_loc[1],
- 'longitude': bldg_loc[0]
- },
- **bldg_src["properties"]
- )
+ BIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GI': dict(
+ BIM_id=str(bldg_id),
+ location={'latitude': bldg_loc[1], 'longitude': bldg_loc[0]},
+ **bldg_src['properties'],
+ ),
}
- BIM_file_name = "{}-BIM.json".format(bldg_id)
+ BIM_file_name = f'{bldg_id}-BIM.json' # noqa: N806
- with open(BIM_file_name, 'w', encoding="utf-8") as f:
+ with open(BIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(BIM_i, f, indent=2)
- buildings_array.append(dict(id=str(bldg_id), file=BIM_file_name))
+ buildings_array.append(dict(id=str(bldg_id), file=BIM_file_name)) # noqa: C408
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(buildings_array, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--buildingFile')
parser.add_argument('--buildingSourceFile')
@@ -57,7 +56,13 @@ def create_building_files(output_file, building_source_file,
args = parser.parse_args()
if args.getRV:
- sys.exit(create_building_files(args.buildingFile, args.buildingSourceFile,
- int(args.Min), int(args.Max)))
+ sys.exit(
+ create_building_files(
+ args.buildingFile,
+ args.buildingSourceFile,
+ int(args.Min),
+ int(args.Max),
+ )
+ )
else:
- pass # not used
\ No newline at end of file
+ pass # not used
diff --git a/modules/createAIM/INP_FILE/INP_FILE.py b/modules/createAIM/INP_FILE/INP_FILE.py
index 19e58b1f3..a2ada986f 100644
--- a/modules/createAIM/INP_FILE/INP_FILE.py
+++ b/modules/createAIM/INP_FILE/INP_FILE.py
@@ -1,123 +1,147 @@
-import os, sys, argparse, posixpath, ntpath, json,importlib
+import argparse # noqa: CPY001, D100, INP001
+import importlib
+import json
+import os
+import posixpath
+import shutil
+import sys
+import warnings
+
+import geopandas as gpd
+import momepy
import numpy as np
import pandas as pd
-import geopandas as gpd
-import shapely, warnings, momepy, shutil
+import shapely
+
# https://stackoverflow.com/questions/50916422/python-typeerror-object-of-type-int64-is-not-json-serializable
-class NpEncoder(json.JSONEncoder):
- def default(self, obj):
+class NpEncoder(json.JSONEncoder): # noqa: D101
+ def default(self, obj): # noqa: D102
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
- return super(NpEncoder, self).default(obj)
-
+ return super(NpEncoder, self).default(obj) # noqa: UP008
+
+
class generalAIMGenerator:
- '''
- The generator of general AIM such as buildings, bridges, tunnels
+ """The generator of general AIM such as buildings, bridges, tunnels
:param : The arg is used for ...
:type arg: str
:param `*args`: The variable arguments are used for ...
:param `**kwargs`: The keyword arguments are used for ...
:ivar arg: This is where we store arg
:vartype arg: str
- '''
+ """ # noqa: D205, D400
+
def __init__(self, output_file):
self.output_file = output_file
self.gdf = None
self.filter = None
- def load_asset_gdf(self, source_file):
+
+ def load_asset_gdf(self, source_file): # noqa: D102
asset_gdf = gpd.read_file(source_file)
self.gdf = asset_gdf
- def set_asset_gdf(self, asset_gdf):
+
+ def set_asset_gdf(self, asset_gdf): # noqa: D102
self.gdf = asset_gdf
- def selectAssets(self, filter):
+
+ def selectAssets(self, filter): # noqa: A002, N802, D102
self.filter = filter
- # check if a filter is provided for bridges
+ # check if a filter is provided for bridges
if self.filter is not None:
- asset_requested = []
+ asset_requested = [] # noqa: F841
for assets in self.filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- assets_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ assets_requested += list( # noqa: F821
+ range(int(asset_low), int(asset_high) + 1)
+ )
else:
assets_requested.append(int(assets))
assets_requested = np.array(assets_requested)
- assets_available = self.gdf.index.values
+ assets_available = self.gdf.index.values # noqa: PD011
assets_to_run = assets_requested[
- np.where(np.in1d(assets_requested, assets_available))[0]]
+ np.where(np.isin(assets_requested, assets_available))[0]
+ ]
else:
- assets_to_run = self.gdf.index.values
- self.gdf = self.gdf.loc[assets_to_run,:]
+ assets_to_run = self.gdf.index.values # noqa: PD011
+ self.gdf = self.gdf.loc[assets_to_run, :]
return assets_to_run
- def createAIM(self, asset_idx, component_type = None):
+
+ def createAIM(self, asset_idx, component_type=None): # noqa: ARG002, N802, D102
# initialize the AIM file
# if component_type is not None:
# asset_id = component_type+"_"+str(asset_idx)
# else:
# asset_id = str(asset_idx)
asset_id = asset_idx
- asset = self.gdf.loc[asset_idx,:]
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = str(asset_id),
- location = {
- 'latitude': asset["geometry"].centroid.coords[0][1],
- 'longitude': asset["geometry"].centroid.coords[0][0]
- }
- )
+ asset = self.gdf.loc[asset_idx, :]
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=str(asset_id),
+ location={
+ 'latitude': asset['geometry'].centroid.coords[0][1],
+ 'longitude': asset['geometry'].centroid.coords[0][0],
+ },
+ ),
}
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
- AIM_i["GeneralInformation"]['geometry'] = AIM_i["GeneralInformation"]['geometry'].wkt
+ AIM_i['GeneralInformation'].update(asset)
+ AIM_i['GeneralInformation']['geometry'] = AIM_i['GeneralInformation'][
+ 'geometry'
+ ].wkt
# if component_type is not None:
# AIM_i["GeneralInformation"].update({"assetSubtype":component_type})
return AIM_i
- def dumpAIM(self, AIM_i):
+
+ def dumpAIM(self, AIM_i): # noqa: N802, N803, D102
# assetSubtype = AIM_i['GeneralInformation'].get("assetSubtype", None)
- componentType = AIM_i['GeneralInformation'].get("type", None)
- outDir = os.path.dirname(self.output_file)
+ componentType = AIM_i['GeneralInformation'].get('type', None) # noqa: N806
+ outDir = os.path.dirname(self.output_file) # noqa: PTH120, N806
if componentType:
- outDir = os.path.join(outDir, componentType)
- asset_id = AIM_i["GeneralInformation"]["AIM_id"]
- AIM_file_name = "{}-AIM.json".format(asset_id)
- AIM_file_name = os.path.join(outDir,AIM_file_name)
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ outDir = os.path.join(outDir, componentType) # noqa: PTH118, N806
+ asset_id = AIM_i['GeneralInformation']['AIM_id']
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2, cls=NpEncoder)
return AIM_file_name
-class lineAIMGenerator(generalAIMGenerator):
- def breakDownLongLines(self, delta, tolerance = 10e-3):
+
+class lineAIMGenerator(generalAIMGenerator): # noqa: D101
+ def breakDownLongLines(self, delta, tolerance=10e-3): # noqa: N802, D102
edges = self.gdf
- dropedEdges = []
- newEdges = []
+ dropedEdges = [] # noqa: N806
+ newEdges = [] # noqa: N806
crs = edges.crs
- edgesOrig = edges.copy()
+ edgesOrig = edges.copy() # noqa: N806
# edgesOrig["IDbase"] = edgesOrig["OID"].apply(lambda x: x.split('_')[0])
- edgesOrig["IDbase"] = edgesOrig.index
- num_segExistingMap = edgesOrig.groupby("IDbase").count().iloc[:,0].to_dict()
- edges_dict = edges.reset_index().to_crs("epsg:6500")
+ edgesOrig['IDbase'] = edgesOrig.index
+ num_segExistingMap = edgesOrig.groupby('IDbase').count().iloc[:, 0].to_dict() # noqa: N806
+ edges_dict = edges.reset_index().to_crs('epsg:6500')
edges_dict = edges_dict.to_dict(orient='records')
for row_ind in range(len(edges_dict)):
- LS = edges_dict[row_ind]["geometry"]
- num_seg = int(np.ceil(LS.length/delta))
+ LS = edges_dict[row_ind]['geometry'] # noqa: N806
+ num_seg = int(np.ceil(LS.length / delta))
if num_seg == 1:
continue
- distances = np.linspace(0, LS.length, num_seg+1)
- points = shapely.MultiPoint([LS.interpolate(distance) for distance in \
- distances[:-1]] + [LS.coords[-1]])
- LS = shapely.ops.snap(LS, points, tolerance)
- with warnings.catch_warnings(): #Suppress the warning of points not on
+ distances = np.linspace(0, LS.length, num_seg + 1)
+ points = shapely.MultiPoint(
+ [LS.interpolate(distance) for distance in distances[:-1]]
+ + [LS.coords[-1]]
+ )
+ LS = shapely.ops.snap(LS, points, tolerance) # noqa: N806
+ with warnings.catch_warnings(): # Suppress the warning of points not on
# LS. Shaply will first project the point to the line and then split
- warnings.simplefilter("ignore")
- splittedLS = shapely.ops.split(LS,points).geoms
- currentEdge = edges_dict[row_ind].copy()
- num_segExisting = num_segExistingMap[currentEdge["id"]]
- for sLS_ind, sLS in enumerate(splittedLS):
+ warnings.simplefilter('ignore')
+ splittedLS = shapely.ops.split(LS, points).geoms # noqa: N806
+ currentEdge = edges_dict[row_ind].copy() # noqa: N806
+ num_segExisting = num_segExistingMap[currentEdge['id']] # noqa: N806, F841
+ for sLS_ind, sLS in enumerate(splittedLS): # noqa: N806
# create new edge
# if sLS_ind ==0:
# newID = currentEdge["id"]
@@ -125,177 +149,205 @@ def breakDownLongLines(self, delta, tolerance = 10e-3):
# newID = currentEdge["id"]+"_"+str(num_segExisting)
# num_segExisting +=1
# num_segExistingMap[currentEdge["id"]] += 1
- newID = currentEdge["id"]
- newGeom = sLS
- newEdge = currentEdge.copy()
- newEdge.update({"id":newID,"geometry":newGeom,\
- "segID":sLS_ind})
+ newID = currentEdge['id'] # noqa: N806
+ newGeom = sLS # noqa: N806
+ newEdge = currentEdge.copy() # noqa: N806
+ newEdge.update({'id': newID, 'geometry': newGeom, 'segID': sLS_ind})
newEdges.append(newEdge)
- dropedEdges.append(edges_dict[row_ind]["id"])
+ dropedEdges.append(edges_dict[row_ind]['id'])
edges = edges.drop(dropedEdges)
- edges = edges.reset_index() # Convert "id" from index into a column
- if len(newEdges)>0:
- edges["segID"] = 0
- newEdges = gpd.GeoDataFrame(newEdges, crs="epsg:6500").to_crs(crs)
+ edges = edges.reset_index() # Convert "id" from index into a column
+ if len(newEdges) > 0:
+ edges['segID'] = 0
+ newEdges = gpd.GeoDataFrame(newEdges, crs='epsg:6500').to_crs(crs) # noqa: N806
edges = pd.concat([edges, newEdges], ignore_index=True)
- edges = edges.sort_values(['id','segID'])
- edges = edges.reset_index().rename(columns = {
- "id":"idBeforeSegment","index":"id"}).drop(columns = ["segID"])
+ edges = edges.sort_values(['id', 'segID'])
+ edges = (
+ edges.reset_index()
+ .rename(columns={'id': 'idBeforeSegment', 'index': 'id'})
+ .drop(columns=['segID'])
+ )
# self.gdf = edges.reset_index().rename(columns={"index":"AIM_id"})
self.gdf = edges
- return
- def defineConnectivities(self, AIM_id_prefix = None, edges_file_name = None,\
- nodes_file_name = None):
+
+ def defineConnectivities( # noqa: N802, D102
+ self,
+ AIM_id_prefix=None, # noqa: N803
+ edges_file_name=None,
+ nodes_file_name=None,
+ ):
# Convert find connectivity and add start_node, end_node attributes
edges = self.gdf
datacrs = edges.crs
- graph = momepy.gdf_to_nx(edges.to_crs("epsg:6500"), approach='primal')
- with warnings.catch_warnings(): #Suppress the warning of disconnected components in the graph
- warnings.simplefilter("ignore")
- nodes, edges, sw = momepy.nx_to_gdf(graph, points=True, lines=True,
- spatial_weights=True)
+ graph = momepy.gdf_to_nx(edges.to_crs('epsg:6500'), approach='primal')
+ with warnings.catch_warnings(): # Suppress the warning of disconnected components in the graph
+ warnings.simplefilter('ignore')
+ nodes, edges, sw = momepy.nx_to_gdf( # noqa: F841
+ graph, points=True, lines=True, spatial_weights=True
+ )
# edges = edges.set_index('ind')
- ### Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
+ # Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
for ind in edges.index:
- start = nodes.loc[edges.loc[ind, "node_start"],"geometry"]
- end = nodes.loc[edges.loc[ind, "node_end"],"geometry"]
- first = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[0])
- last = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[-1])
- #check if first and last are the same
- if (start == first and end == last):
+ start = nodes.loc[edges.loc[ind, 'node_start'], 'geometry']
+ end = nodes.loc[edges.loc[ind, 'node_end'], 'geometry']
+ first = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[0])
+ last = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[-1])
+ # check if first and last are the same
+ if start == first and end == last:
continue
- elif (start == last and end == first):
- newStartID = edges.loc[ind, "node_end"]
- newEndID = edges.loc[ind, "node_start"]
- edges.loc[ind,"node_start"] = newStartID
- edges.loc[ind,"node_end"] = newEndID
+ elif start == last and end == first: # noqa: RET507
+ newStartID = edges.loc[ind, 'node_end'] # noqa: N806
+ newEndID = edges.loc[ind, 'node_start'] # noqa: N806
+ edges.loc[ind, 'node_start'] = newStartID
+ edges.loc[ind, 'node_end'] = newEndID
else:
- print(ind, "th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function")
+ print( # noqa: T201
+ ind,
+ 'th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function',
+ )
# locationGS = gpd.GeoSeries(edges["geometry"].apply(lambda x: x.centroid),crs = edges.crs).to_crs(datacrs)
- edges = edges.drop("mm_len", axis = 1).rename(columns={"node_start":\
- "StartNode", "node_end":"EndNode"}).to_crs(datacrs)
+ edges = (
+ edges.drop('mm_len', axis=1)
+ .rename(columns={'node_start': 'StartNode', 'node_end': 'EndNode'})
+ .to_crs(datacrs)
+ )
# edges["location_lon"] = locationGS.apply(lambda x:x.x)
# edges["location_lat"] = locationGS.apply(lambda x:x.y)
- edges = edges.rename(columns={"id":"AIM_id"})
+ edges = edges.rename(columns={'id': 'AIM_id'})
if AIM_id_prefix is not None:
- edges["AIM_id"] = edges["AIM_id"].apply(lambda x:AIM_id_prefix+"_"+str(x))
- outDir = os.path.dirname(self.output_file)
+ edges['AIM_id'] = edges['AIM_id'].apply(
+ lambda x: AIM_id_prefix + '_' + str(x)
+ )
+ outDir = os.path.dirname(self.output_file) # noqa: PTH120, N806
if edges_file_name is not None:
- edges.to_file(os.path.join(outDir,f"{edges_file_name}.geojson"),\
- driver = "GeoJSON")
+ edges.to_file(
+ os.path.join(outDir, f'{edges_file_name}.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
if nodes_file_name is not None:
- nodesNeeded = list(set(edges["StartNode"].values.tolist() +\
- edges["EndNode"].values.tolist()))
- nodes = nodes.loc[nodesNeeded,:]
- nodes = nodes.to_crs(datacrs)[["nodeID","geometry"]]
- nodes.to_file(os.path.join(outDir,f"{nodes_file_name}.geojson"),\
- driver = "GeoJSON")
+ nodesNeeded = list( # noqa: N806
+ set(
+ edges['StartNode'].values.tolist() # noqa: PD011
+ + edges['EndNode'].values.tolist() # noqa: PD011
+ )
+ )
+ nodes = nodes.loc[nodesNeeded, :]
+ nodes = nodes.to_crs(datacrs)[['nodeID', 'geometry']]
+ nodes.to_file(
+ os.path.join(outDir, f'{nodes_file_name}.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
self.gdf = edges
- return
-def split_and_select_components(input_config):
- component_dict = dict()
- asset_source_file = input_config["assetSourceFile"]
- with open(asset_source_file, 'r', encoding="utf-8") as f:
+
+def split_and_select_components(input_config): # noqa: C901, D103
+ component_dict = dict() # noqa: C408
+ asset_source_file = input_config['assetSourceFile']
+ with open(asset_source_file, encoding='utf-8') as f: # noqa: PTH123
source_data = json.load(f)
- crs = source_data["crs"]
- featureList = source_data["features"]
- requested_dict = dict()
+ crs = source_data['crs']
+ featureList = source_data['features'] # noqa: N806
+ requested_dict = dict() # noqa: C408
for key, value in input_config.items():
if isinstance(value, dict):
- filterString = value.get('filter', None)
+ filterString = value.get('filter', None) # noqa: N806
if filterString is None:
continue
assets_requested = []
- if filterString == '':
+ if filterString == '': # noqa: PLC1901
assets_requested = np.array(assets_requested)
- requested_dict.update({key:assets_requested})
- component_dict.update({key:[]})
+ requested_dict.update({key: assets_requested})
+ component_dict.update({key: []})
else:
for assets in filterString.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- assets_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ assets_requested += list(
+ range(int(asset_low), int(asset_high) + 1)
+ )
else:
assets_requested.append(int(assets))
assets_requested = np.array(assets_requested)
- requested_dict.update({key:assets_requested})
- component_dict.update({key:[]})
+ requested_dict.update({key: assets_requested})
+ component_dict.update({key: []})
for feat in featureList:
- component_type = feat["properties"].get("type", None)
- if (component_type in component_dict.keys()):
- feat_id = int(feat["id"])
+ component_type = feat['properties'].get('type', None)
+ if component_type in component_dict:
+ feat_id = int(feat['id'])
if requested_dict[component_type].size == 0:
component_dict.pop(component_type)
continue
- if (feat_id in requested_dict[component_type]):
- feat["properties"].update({"id":feat_id})
+ if feat_id in requested_dict[component_type]:
+ feat['properties'].update({'id': feat_id})
component_dict[component_type].append(feat)
- for component in component_dict.keys():
- component_dict[component] = gpd.GeoDataFrame.from_features(\
- component_dict[component],crs=crs["properties"]["name"])\
- .set_index('id')
+ for component in component_dict: # noqa: PLC0206
+ component_dict[component] = gpd.GeoDataFrame.from_features(
+ component_dict[component], crs=crs['properties']['name']
+ ).set_index('id')
return component_dict
-def init_workdir(component_dict, outDir):
+
+
+def init_workdir(component_dict, outDir): # noqa: N803, D103
os.chdir(outDir)
for dir_or_file in os.listdir(outDir):
if dir_or_file != 'log.txt':
- if os.path.isdir(dir_or_file):
+ if os.path.isdir(dir_or_file): # noqa: PTH112
shutil.rmtree(dir_or_file)
else:
- os.remove(dir_or_file)
- component_dir = dict()
- for comp in component_dict.keys():
- compDir = posixpath.join(outDir, comp)
- os.mkdir(compDir)
- component_dir.update({comp:compDir})
+ os.remove(dir_or_file) # noqa: PTH107
+ component_dir = dict() # noqa: C408
+ for comp in component_dict.keys(): # noqa: SIM118
+ compDir = posixpath.join(outDir, comp) # noqa: N806
+ os.mkdir(compDir) # noqa: PTH102
+ component_dir.update({comp: compDir})
return component_dir
-
-def create_asset_files(output_file,
- asset_type, input_file, doParallel):
+
+
+def create_asset_files(output_file, asset_type, input_file, doParallel): # noqa: C901, N803, D103
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
- outDir = os.path.dirname(output_file)
-
- with open(input_file, 'r', encoding="utf-8") as f:
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ outDir = os.path.dirname(output_file) # noqa: PTH120, N806
+
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
input_data = json.load(f)
- input_config = input_data["Applications"]["Assets"][asset_type]\
- ["ApplicationData"]
+ input_config = input_data['Applications']['Assets'][asset_type][
+ 'ApplicationData'
+ ]
# if input_config.get("Roadway", None):
# roadSegLength = float(input_config['Roadway'].get('maxRoadLength_m', "100000"))
component_dict = split_and_select_components(input_config)
- component_dir = init_workdir(component_dict, outDir)
+ component_dir = init_workdir(component_dict, outDir) # noqa: F841
assets_array = []
for component_type, component_data in component_dict.items():
- geom_type = type(component_data['geometry'].values[0])
- if geom_type in [shapely.Point, shapely.Polygon]:
- # if component_type in ["HwyBridge", "HwyTunnel"]:
- AIMgenerator = generalAIMGenerator(output_file)
+ geom_type = type(component_data['geometry'].values[0]) # noqa: PD011
+ if geom_type in [shapely.Point, shapely.Polygon]: # noqa: PLR6201
+ # if component_type in ["HwyBridge", "HwyTunnel"]:
+ AIMgenerator = generalAIMGenerator(output_file) # noqa: N806
AIMgenerator.set_asset_gdf(component_data)
- selected_Asset_idxs = AIMgenerator.selectAssets(None)
+ selected_Asset_idxs = AIMgenerator.selectAssets(None) # noqa: N806
# elif component_type in ["Roadway"]:
- elif geom_type in [shapely.LineString]:
- AIMgenerator = lineAIMGenerator(output_file)
+ elif geom_type == shapely.LineString:
+ AIMgenerator = lineAIMGenerator(output_file) # noqa: N806
AIMgenerator.set_asset_gdf(component_data)
- selected_Asset_idxs = AIMgenerator.selectAssets(None)
+ selected_Asset_idxs = AIMgenerator.selectAssets(None) # noqa: N806
# AIMgenerator.breakDownLongLines(roadSegLength)
# # AIMgenerator.defineConnectivities(None, "hwy_edges",\
# # "hwy_nodes")
@@ -303,54 +355,63 @@ def create_asset_files(output_file,
# # Run this to select all assets
# selected_Asset_idxs = AIMgenerator.selectAssets(None)
else:
- sys.exit((f"The geometry type {geom_type} defined for the") + \
- (f"components {component_type} is not supported in ")+\
- (f"the assets {asset_type}"))
+ sys.exit(
+ (f'The geometry type {geom_type} defined for the') # noqa: ISC003
+ + (f'components {component_type} is not supported in ')
+ + (f'the assets {asset_type}')
+ )
# for each asset...
count = 0
for asset_idx in selected_Asset_idxs:
- if runParallel == False or (count % numP) == procID:
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- AIM_i = AIMgenerator.createAIM(asset_idx, component_type)
- AIM_file_name = AIMgenerator.dumpAIM(AIM_i)
- assets_array.append(dict(id=AIM_i['GeneralInformation']['AIM_id'], file=AIM_file_name))
- count = count + 1
+ AIM_i = AIMgenerator.createAIM(asset_idx, component_type) # noqa: N806
+ AIM_file_name = AIMgenerator.dumpAIM(AIM_i) # noqa: N806
+ assets_array.append(
+ dict( # noqa: C408
+ id=AIM_i['GeneralInformation']['AIM_id'], file=AIM_file_name
+ )
+ )
+ count = count + 1 # noqa: PLR6104
if procID != 0:
- # if not P0, write data to output file with procID in name and barrier
- output_file_p = os.path.join(outDir,f'tmp_{procID}.json')
- with open(output_file_p, 'w', encoding="utf-8") as f:
+ # if not P0, write data to output file with procID in name and barrier
+ output_file_p = os.path.join(outDir, f'tmp_{procID}.json') # noqa: PTH118
+ with open(output_file_p, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=0)
- comm.Barrier()
+ comm.Barrier()
else:
- if runParallel == True:
+ if runParallel == True: # noqa: E712
# if parallel & P0, barrier so that all files written above, then loop over other processor files: open, load data and append
- comm.Barrier()
+ comm.Barrier()
for i in range(1, numP):
- fileToAppend = os.path.join(outDir,f'tmp_{i}.json')
- with open(fileToAppend, 'r', encoding="utf-8") as data_file:
+ fileToAppend = os.path.join(outDir, f'tmp_{i}.json') # noqa: PTH118, N806
+ with open(fileToAppend, encoding='utf-8') as data_file: # noqa: FURB101, PTH123
json_data = data_file.read()
- assetsToAppend = json.loads(json_data)
+ assetsToAppend = json.loads(json_data) # noqa: N806
assets_array += assetsToAppend
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=2, cls=NpEncoder)
# else:
# print(f"The asset_type {asset_type} is not one of Buildings, TransportationNetwork or WaterNetwork, and is currently not supported")
# sys.exit(1)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--assetFile')
parser.add_argument('--assetType')
parser.add_argument('--inputJsonFile')
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpiexec')
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
if args.getRV:
- sys.exit(create_asset_files(args.assetFile, args.assetType,\
- args.inputJsonFile, args.doParallel))
+ sys.exit(
+ create_asset_files(
+ args.assetFile, args.assetType, args.inputJsonFile, args.doParallel
+ )
+ )
else:
- pass # not used
\ No newline at end of file
+ pass # not used
diff --git a/modules/createAIM/JSON_to_AIM/GeoJSON_to_AIM_transport.py b/modules/createAIM/JSON_to_AIM/GeoJSON_to_AIM_transport.py
index 2a4b2cb71..a67d39b10 100644
--- a/modules/createAIM/JSON_to_AIM/GeoJSON_to_AIM_transport.py
+++ b/modules/createAIM/JSON_to_AIM/GeoJSON_to_AIM_transport.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,99 +40,122 @@
# Stevan Gavrilovic
# Jinyan Zhao
-import argparse, sys, os
+import argparse
+import importlib
import json
+import os
+import sys
+import warnings
+
+import geopandas as gpd
+import momepy
import numpy as np
import pandas as pd
-import importlib
import shapely
-import geopandas as gpd
-import momepy
-import warnings
+
+
# Break down long roads according to delta
-def breakDownLongEdges(edges, delta, tolerance = 10e-3):
- dropedEdges = []
- newEdges = []
+def breakDownLongEdges(edges, delta, tolerance=10e-3): # noqa: N802, D103
+ dropedEdges = [] # noqa: N806
+ newEdges = [] # noqa: N806
crs = edges.crs
- edgesOrig = edges.copy()
- edgesOrig["IDbase"] = edgesOrig["ID"].apply(lambda x: x.split('_')[0])
- num_segExistingMap = edgesOrig.groupby("IDbase").count()["ID"].to_dict()
- edges_dict = edges.reset_index().to_crs("epsg:6500")
+ edgesOrig = edges.copy() # noqa: N806
+ edgesOrig['IDbase'] = edgesOrig['ID'].apply(lambda x: x.split('_')[0])
+ num_segExistingMap = edgesOrig.groupby('IDbase').count()['ID'].to_dict() # noqa: N806
+ edges_dict = edges.reset_index().to_crs('epsg:6500')
edges_dict = edges_dict.to_dict(orient='records')
for row_ind in range(len(edges_dict)):
- LS = edges_dict[row_ind]["geometry"]
- num_seg = int(np.ceil(LS.length/delta))
+ LS = edges_dict[row_ind]['geometry'] # noqa: N806
+ num_seg = int(np.ceil(LS.length / delta))
if num_seg == 1:
continue
- distances = np.linspace(0, LS.length, num_seg+1)
- points = shapely.MultiPoint([LS.interpolate(distance) for distance in distances[:-1]] + [LS.boundary.geoms[1]])
- LS = shapely.ops.snap(LS, points, tolerance)
- splittedLS = shapely.ops.split(LS,points).geoms
- currentEdge = edges_dict[row_ind].copy()
- num_segExisting = num_segExistingMap[currentEdge["ID"].split('_')[0]]
- for sLS_ind, sLS in enumerate(splittedLS):
+ distances = np.linspace(0, LS.length, num_seg + 1)
+ points = shapely.MultiPoint(
+ [LS.interpolate(distance) for distance in distances[:-1]]
+ + [LS.boundary.geoms[1]]
+ )
+ LS = shapely.ops.snap(LS, points, tolerance) # noqa: N806
+ splittedLS = shapely.ops.split(LS, points).geoms # noqa: N806
+ currentEdge = edges_dict[row_ind].copy() # noqa: N806
+ num_segExisting = num_segExistingMap[currentEdge['ID'].split('_')[0]] # noqa: N806
+ for sLS_ind, sLS in enumerate(splittedLS): # noqa: N806
# create new edge
- if sLS_ind ==0:
- newID = currentEdge["ID"]
+ if sLS_ind == 0:
+ newID = currentEdge['ID'] # noqa: N806
else:
- newID = currentEdge["ID"].split("_")[0]+"_"+str(num_segExisting+1)
- num_segExisting +=1
- num_segExistingMap[currentEdge["ID"].split('_')[0]] = num_segExistingMap[currentEdge["ID"].split('_')[0]]+1
- newGeom = sLS
- newEdge = currentEdge.copy()
- newEdge.update({"ID":newID,
- "roadType":currentEdge["roadType"],
- "geometry":newGeom,
- "maxMPH":currentEdge["maxMPH"],
- "lanes":currentEdge["lanes"]})
+ newID = ( # noqa: N806
+ currentEdge['ID'].split('_')[0] + '_' + str(num_segExisting + 1)
+ )
+ num_segExisting += 1 # noqa: N806
+ num_segExistingMap[currentEdge['ID'].split('_')[0]] = ( # noqa: PLR6104
+ num_segExistingMap[currentEdge['ID'].split('_')[0]] + 1
+ )
+ newGeom = sLS # noqa: N806
+ newEdge = currentEdge.copy() # noqa: N806
+ newEdge.update(
+ {
+ 'ID': newID,
+ 'roadType': currentEdge['roadType'],
+ 'geometry': newGeom,
+ 'maxMPH': currentEdge['maxMPH'],
+ 'lanes': currentEdge['lanes'],
+ }
+ )
newEdges.append(newEdge)
dropedEdges.append(row_ind)
edges = edges.drop(dropedEdges)
- if len(newEdges)>0:
- newEdges = gpd.GeoDataFrame(newEdges, crs="epsg:6500").to_crs(crs)
+ if len(newEdges) > 0:
+ newEdges = gpd.GeoDataFrame(newEdges, crs='epsg:6500').to_crs(crs) # noqa: N806
edges = pd.concat([edges, newEdges], ignore_index=True)
edges = edges.reset_index(drop=True)
- return edges
-
-def create_asset_files(output_file, asset_source_road,
- asset_source_bridge, asset_source_tunnel,
- bridge_filter,tunnel_filter, road_filter, doParallel, roadSegLength):
-
+ return edges # noqa: RET504
+
+
+def create_asset_files( # noqa: C901, D103, PLR0915
+ output_file,
+ asset_source_road,
+ asset_source_bridge,
+ asset_source_tunnel,
+ bridge_filter,
+ tunnel_filter,
+ road_filter,
+ doParallel, # noqa: N803
+ roadSegLength, # noqa: N803
+):
# these imports are here to save time when the app is called without
# the -getRV flag
-
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
# Get the out dir, may not always be in the results folder if multiple assets are used
- outDir = os.path.dirname(output_file)
-
+ outDir = os.path.dirname(output_file) # noqa: PTH120, N806
+
# check if a filter is provided for bridges
if bridge_filter is not None:
bridges_requested = []
for assets in bridge_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- bridges_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ bridges_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
bridges_requested.append(int(assets))
bridges_requested = np.array(bridges_requested)
@@ -141,9 +163,9 @@ def create_asset_files(output_file, asset_source_road,
if tunnel_filter is not None:
tunnels_requested = []
for assets in tunnel_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- tunnels_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ tunnels_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
tunnels_requested.append(int(assets))
tunnels_requested = np.array(tunnels_requested)
@@ -151,93 +173,117 @@ def create_asset_files(output_file, asset_source_road,
if road_filter is not None:
roads_requested = []
for assets in road_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- roads_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ roads_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
roads_requested.append(int(assets))
roads_requested = np.array(roads_requested)
-
+
# load the GeoJSON file with the asset information
if asset_source_road is not None:
- roadsGDF = gpd.read_file(asset_source_road)
+ roadsGDF = gpd.read_file(asset_source_road) # noqa: N806
datacrs = roadsGDF.crs
else:
- roadsGDF = gpd.GeoDataFrame.from_dict({})
+ roadsGDF = gpd.GeoDataFrame.from_dict({}) # noqa: N806
if asset_source_bridge is not None:
- bridgesGDF = gpd.read_file(asset_source_bridge)
+ bridgesGDF = gpd.read_file(asset_source_bridge) # noqa: N806
else:
- bridgesGDF = gpd.GeoDataFrame.from_dict({})
+ bridgesGDF = gpd.GeoDataFrame.from_dict({}) # noqa: N806
if asset_source_tunnel is not None:
- tunnelsGDF = gpd.read_file(asset_source_tunnel)
+ tunnelsGDF = gpd.read_file(asset_source_tunnel) # noqa: N806
else:
- tunnelsGDF = gpd.GeoDataFrame.from_dict({})
-
+ tunnelsGDF = gpd.GeoDataFrame.from_dict({}) # noqa: N806
+
# if there is a filter, then pull out only the required bridges
if bridge_filter is not None:
- assets_available = bridgesGDF.index.values
+ assets_available = bridgesGDF.index.values # noqa: PD011
bridges_to_run = bridges_requested[
- np.where(np.in1d(bridges_requested, assets_available))[0]]
+ np.where(np.isin(bridges_requested, assets_available))[0]
+ ]
selected_bridges = bridgesGDF.loc[bridges_to_run]
else:
selected_bridges = bridgesGDF
- bridges_to_run = bridgesGDF.index.values
+ bridges_to_run = bridgesGDF.index.values # noqa: PD011
# if there is a filter, then pull out only the required tunnels
if tunnel_filter is not None:
- assets_available = tunnelsGDF.index.values
+ assets_available = tunnelsGDF.index.values # noqa: PD011
tunnels_to_run = tunnels_requested[
- np.where(np.in1d(tunnels_requested, assets_available))[0]]
+ np.where(np.isin(tunnels_requested, assets_available))[0]
+ ]
selected_tunnels = tunnelsGDF.loc[tunnels_to_run]
else:
selected_tunnels = tunnelsGDF
- tunnels_to_run = tunnelsGDF.index.values
+ tunnels_to_run = tunnelsGDF.index.values # noqa: PD011
# if there is a filter, then pull out only the required roads
if road_filter is not None:
- assets_available = roadsGDF.index.values
+ assets_available = roadsGDF.index.values # noqa: PD011
roads_to_run = roads_requested[
- np.where(np.in1d(roads_requested, assets_available))[0]]
+ np.where(np.isin(roads_requested, assets_available))[0]
+ ]
selected_roads = roadsGDF.loc[roads_to_run]
else:
selected_roads = roadsGDF
- roads_to_run = roadsGDF.index.values
+ roads_to_run = roadsGDF.index.values # noqa: PD011
- if len(selected_roads)>0:
+ if len(selected_roads) > 0:
# Break down road network
edges = breakDownLongEdges(selected_roads, roadSegLength)
# Convert find connectivity and add start_node, end_node attributes
- graph = momepy.gdf_to_nx(edges.to_crs("epsg:6500"), approach='primal')
- with warnings.catch_warnings(): #Suppress the warning of disconnected components in the graph
- warnings.simplefilter("ignore")
- nodes, edges, sw = momepy.nx_to_gdf(graph, points=True, lines=True,
- spatial_weights=True)
- ### Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
+ graph = momepy.gdf_to_nx(edges.to_crs('epsg:6500'), approach='primal')
+ with warnings.catch_warnings(): # Suppress the warning of disconnected components in the graph
+ warnings.simplefilter('ignore')
+ nodes, edges, sw = momepy.nx_to_gdf( # noqa: F841
+ graph, points=True, lines=True, spatial_weights=True
+ )
+ # Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
for ind in edges.index:
- start = nodes.loc[edges.loc[ind, "node_start"],"geometry"]
- end = nodes.loc[edges.loc[ind, "node_end"],"geometry"]
- first = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[0])
- last = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[-1])
- #check if first and last are the same
- if (start == first and end == last):
+ start = nodes.loc[edges.loc[ind, 'node_start'], 'geometry']
+ end = nodes.loc[edges.loc[ind, 'node_end'], 'geometry']
+ first = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[0])
+ last = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[-1])
+ # check if first and last are the same
+ if start == first and end == last:
continue
- elif (start == last and end == first):
- newStartID = edges.loc[ind, "node_end"]
- newEndID = edges.loc[ind, "node_start"]
- edges.loc[ind,"node_start"] = newStartID
- edges.loc[ind,"node_end"] = newEndID
+ elif start == last and end == first: # noqa: RET507
+ newStartID = edges.loc[ind, 'node_end'] # noqa: N806
+ newEndID = edges.loc[ind, 'node_start'] # noqa: N806
+ edges.loc[ind, 'node_start'] = newStartID
+ edges.loc[ind, 'node_end'] = newEndID
else:
- print(ind, "th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function")
- locationGS = gpd.GeoSeries(edges["geometry"].apply(lambda x: x.centroid),crs = edges.crs).to_crs(datacrs)
- edges = edges.drop("mm_len", axis = 1).rename(columns={"node_start":"start_node", "node_end":"end_node"}).to_crs(datacrs)
- edges["location_lon"] = locationGS.apply(lambda x:x.x)
- edges["location_lat"] = locationGS.apply(lambda x:x.y)
- edges = edges.reset_index().rename(columns={"index":"AIM_id"})
- edges["AIM_id"] = edges["AIM_id"].apply(lambda x:"r"+str(x))
- edges.to_file(os.path.join(outDir,"roadNetworkEdgesSelected.geojson"), driver = "GeoJSON")
- nodesNeeded = list(set(edges["start_node"].values.tolist() + edges["end_node"].values.tolist()))
- nodes = nodes.loc[nodesNeeded,:]
- nodes = nodes.to_crs(datacrs)[["nodeID","geometry"]]
- nodes.to_file(os.path.join(outDir,"roadNetworkNodesSelected.geojson"), driver = "GeoJSON")
+ print( # noqa: T201
+ ind,
+ 'th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function',
+ )
+ locationGS = gpd.GeoSeries( # noqa: N806
+ edges['geometry'].apply(lambda x: x.centroid), crs=edges.crs
+ ).to_crs(datacrs)
+ edges = (
+ edges.drop('mm_len', axis=1)
+ .rename(columns={'node_start': 'start_node', 'node_end': 'end_node'})
+ .to_crs(datacrs)
+ )
+ edges['location_lon'] = locationGS.apply(lambda x: x.x)
+ edges['location_lat'] = locationGS.apply(lambda x: x.y)
+ edges = edges.reset_index().rename(columns={'index': 'AIM_id'})
+ edges['AIM_id'] = edges['AIM_id'].apply(lambda x: 'r' + str(x))
+ edges.to_file(
+ os.path.join(outDir, 'roadNetworkEdgesSelected.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
+ nodesNeeded = list( # noqa: N806
+ set(
+ edges['start_node'].values.tolist() # noqa: PD011
+ + edges['end_node'].values.tolist() # noqa: PD011
+ )
+ )
+ nodes = nodes.loc[nodesNeeded, :]
+ nodes = nodes.to_crs(datacrs)[['nodeID', 'geometry']]
+ nodes.to_file(
+ os.path.join(outDir, 'roadNetworkNodesSelected.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
else:
edges = gpd.GeoDataFrame.from_dict({})
@@ -245,173 +291,187 @@ def create_asset_files(output_file, asset_source_road,
ind = 0
assets_array = []
for ind, asset in selected_bridges.iterrows():
- asset_id = "b" + str(bridges_to_run[ind])
- ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ asset_id = 'b' + str(bridges_to_run[ind])
+ ind += 1 # noqa: PLW2901
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
# locationNodeID = str(asset["location"])
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': asset["geometry"].centroid.coords[0][1],
- 'longitude': asset["geometry"].centroid.coords[0][0]
- }
- )
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': asset['geometry'].centroid.coords[0][1],
+ 'longitude': asset['geometry'].centroid.coords[0][0],
+ },
+ ),
}
- asset.pop("geometry")
+ asset.pop('geometry')
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
+ AIM_i['GeneralInformation'].update(asset)
# AIM_i["GeneralInformation"].update({"locationNode":locationNodeID})
- AIM_i["GeneralInformation"].update({"assetSubtype":"hwyBridge"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'hwyBridge'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
+
+ count = count + 1 # noqa: PLR6104
- count = count + 1
-
ind = 0
for ind, asset in selected_tunnels.iterrows():
- asset_id = "t" + str(tunnels_to_run[ind])
- ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ asset_id = 't' + str(tunnels_to_run[ind])
+ ind += 1 # noqa: PLW2901
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
# locationNodeID = str(asset["location"])
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': asset["geometry"].centroid.coords[0][1],
- 'longitude': asset["geometry"].centroid.coords[0][0]
- }
- )
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': asset['geometry'].centroid.coords[0][1],
+ 'longitude': asset['geometry'].centroid.coords[0][0],
+ },
+ ),
}
- asset.pop("geometry")
+ asset.pop('geometry')
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
+ AIM_i['GeneralInformation'].update(asset)
# AIM_i["GeneralInformation"].update({"locationNode":locationNodeID})
- AIM_i["GeneralInformation"].update({"assetSubtype":"hwyTunnel"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'hwyTunnel'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
+
+ count = count + 1 # noqa: PLR6104
- count = count + 1
-
ind = 0
for row_ind in edges.index:
- asset_id = "r" + str(row_ind)
+ asset_id = 'r' + str(row_ind)
ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': edges.loc[row_ind,"location_lat"],
- 'longitude': edges.loc[row_ind,"location_lon"]
- }
- )
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': edges.loc[row_ind, 'location_lat'],
+ 'longitude': edges.loc[row_ind, 'location_lon'],
+ },
+ ),
+ }
+ AIM_i['GeneralInformation'].update(
+ edges.loc[row_ind, :]
+ .drop(['geometry', 'location_lat', 'location_lon'])
+ .to_dict()
+ )
+ geom = {
+ 'type': 'LineString',
+ 'coordinates': [
+ [pt[0], pt[1]]
+ for pt in list(edges.loc[row_ind, 'geometry'].coords)
+ ],
}
- AIM_i["GeneralInformation"].update(edges.loc[row_ind,:].drop(["geometry","location_lat","location_lon"]).to_dict())
- geom = {"type":"LineString","coordinates":[[pt[0], pt[1]] for pt in list(edges.loc[row_ind,"geometry"].coords)]}
- AIM_i["GeneralInformation"].update({"geometry":str(geom)})
- AIM_i["GeneralInformation"].update({"assetSubtype":"roadway"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update({'geometry': str(geom)})
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'roadway'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
- count = count + 1
+ count = count + 1 # noqa: PLR6104
if procID != 0:
-
# if not P0, write data to output file with procID in name and barrier
- output_file = os.path.join(outDir,f'tmp_{procID}.json')
+ output_file = os.path.join(outDir, f'tmp_{procID}.json') # noqa: PTH118
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=0)
-
- comm.Barrier()
- else:
-
- if runParallel == True:
+ comm.Barrier()
+ else:
+ if runParallel == True: # noqa: E712
# if parallel & P0, barrier so that all files written above, then loop over other processor files: open, load data and append
- comm.Barrier()
+ comm.Barrier()
for i in range(1, numP):
- fileToAppend = os.path.join(outDir,f'tmp_{i}.json')
- with open(fileToAppend, 'r', encoding="utf-8") as data_file:
+ fileToAppend = os.path.join(outDir, f'tmp_{i}.json') # noqa: PTH118, N806
+ with open(fileToAppend, encoding='utf-8') as data_file: # noqa: FURB101, PTH123
json_data = data_file.read()
- assetsToAppend = json.loads(json_data)
+ assetsToAppend = json.loads(json_data) # noqa: N806
assets_array += assetsToAppend
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=2)
-
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
- parser.add_argument('--assetFile',
- help = "Path to the file that will contain a list of asset ids and "
- "corresponding AIM filenames")
- parser.add_argument('--assetSourceFileRoad',
- help = "Path to the GIS file with the roads")
- parser.add_argument('--assetSourceFileBridge',
- help = "Path to the JSON file with the bridges")
- parser.add_argument('--assetSourceFileTunnel',
- help = "Path to the JSON file with the tunnels")
- parser.add_argument('--bridgesFilter',
- help = "Filter applied to select a subset of bridges from the "
- "inventory",
- default=None)
- parser.add_argument('--tunnelsFilter',
- help = "Filter applied to select a subset of assets from the "
- "inventory",
- default=None)
- parser.add_argument('--roadsFilter',
- help = "Filter applied to select a subset of assets from the "
- "inventory",
- default=None)
- parser.add_argument('--roadSegLength',
- help = "Maximum length (m) of road segments in the created AIM "
- "files",
+ parser.add_argument(
+ '--assetFile',
+ help='Path to the file that will contain a list of asset ids and '
+ 'corresponding AIM filenames',
+ )
+ parser.add_argument(
+ '--assetSourceFileRoad', help='Path to the GIS file with the roads'
+ )
+ parser.add_argument(
+ '--assetSourceFileBridge', help='Path to the JSON file with the bridges'
+ )
+ parser.add_argument(
+ '--assetSourceFileTunnel', help='Path to the JSON file with the tunnels'
+ )
+ parser.add_argument(
+ '--bridgesFilter',
+ help='Filter applied to select a subset of bridges from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--tunnelsFilter',
+ help='Filter applied to select a subset of assets from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--roadsFilter',
+ help='Filter applied to select a subset of assets from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--roadSegLength',
+ help='Maximum length (m) of road segments in the created AIM ' 'files',
type=float,
- default=100)
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpiexec')
- parser.add_argument('--getRV',
- help = "Identifies the preparational stage of the workflow. This app "
- "is only used in that stage, so it does not do anything if "
- "called without this flag.",
+ default=100,
+ )
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
+ parser.add_argument(
+ '--getRV',
+ help='Identifies the preparational stage of the workflow. This app '
+ 'is only used in that stage, so it does not do anything if '
+ 'called without this flag.',
default=False,
- nargs='?', const=True)
+ nargs='?',
+ const=True,
+ )
# parser.add_argument('--saveFullNetwork',
# help = "Save the full network into edges and nodes.",
# default=False,
@@ -420,8 +480,18 @@ def create_asset_files(output_file, asset_source_road,
args = parser.parse_args()
if args.getRV:
- sys.exit(create_asset_files(args.assetFile, args.assetSourceFileRoad,
- args.assetSourceFileBridge, args.assetSourceFileTunnel,
- args.bridgesFilter,args.tunnelsFilter, args.roadsFilter, args.doParallel, args.roadSegLength))
+ sys.exit(
+ create_asset_files(
+ args.assetFile,
+ args.assetSourceFileRoad,
+ args.assetSourceFileBridge,
+ args.assetSourceFileTunnel,
+ args.bridgesFilter,
+ args.tunnelsFilter,
+ args.roadsFilter,
+ args.doParallel,
+ args.roadSegLength,
+ )
+ )
else:
- pass # not used
+ pass # not used
diff --git a/modules/createAIM/JSON_to_AIM/JSON_to_AIM_transport.py b/modules/createAIM/JSON_to_AIM/JSON_to_AIM_transport.py
index c906ee9ab..35281c681 100644
--- a/modules/createAIM/JSON_to_AIM/JSON_to_AIM_transport.py
+++ b/modules/createAIM/JSON_to_AIM/JSON_to_AIM_transport.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -41,170 +40,214 @@
# Stevan Gavrilovic
# Jinyan Zhao
-import argparse, sys, os
+import argparse
+import importlib
import json
+import os
+import sys
+import warnings
+
+import geopandas as gpd
+import momepy
import numpy as np
import pandas as pd
-import importlib
import shapely
-import geopandas as gpd
-import momepy
-import warnings
-# Remove the nodes with 2 neibours
+
+
+# Remove the nodes with 2 neibours
# https://stackoverflow.com/questions/56380053/combine-edges-when-node-degree-is-n-in-networkx
# Needs parallel
-def remove2neibourEdges(nodesID_to_remove, nodes_to_remove, edges, graph):
+def remove2neibourEdges(nodesID_to_remove, nodes_to_remove, edges, graph): # noqa: N802, N803, D103
# For each of those nodes
- removedID_list = [] # nodes with two neighbors. Removed from graph
- skippedID_list = [] # nodes involved in loops. Skipped removing.
- error_list = [] #nodes run into error. Left the node in the graph as is.
+ removedID_list = [] # nodes with two neighbors. Removed from graph # noqa: N806
+ skippedID_list = [] # nodes involved in loops. Skipped removing. # noqa: N806
+ error_list = [] # nodes run into error. Left the node in the graph as is.
for i in range(len(nodesID_to_remove)):
nodeid = nodesID_to_remove[i]
node = nodes_to_remove[i]
- edge1 = edges[edges["node_end"] == nodeid]
- edge2 = edges[edges["node_start"] == nodeid]
- if (edge1.shape[0]==1 and edge2.shape[0]==1 and
- edge1["node_start"].values[0]!= edge2["node_end"].values[0]):
- pass # Do things after continue
- elif(edge1.shape[0]==0 and edge2.shape[0]==2):
- ns = edges.loc[edge2.index[0],"node_start"]
- ne = edges.loc[edge2.index[0],"node_end"]
- edges.loc[edge2.index[0],"node_start"] = ne
- edges.loc[edge2.index[0],"node_end"] = ns
+ edge1 = edges[edges['node_end'] == nodeid]
+ edge2 = edges[edges['node_start'] == nodeid]
+ if (
+ edge1.shape[0] == 1
+ and edge2.shape[0] == 1
+ and edge1['node_start'].values[0] != edge2['node_end'].values[0] # noqa: PD011
+ ):
+ pass # Do things after continue
+ elif edge1.shape[0] == 0 and edge2.shape[0] == 2: # noqa: PLR2004
+ ns = edges.loc[edge2.index[0], 'node_start']
+ ne = edges.loc[edge2.index[0], 'node_end']
+ edges.loc[edge2.index[0], 'node_start'] = ne
+ edges.loc[edge2.index[0], 'node_end'] = ns
# edges.loc[edge2.index[0],"geometry"] = shapely.LineString(list(edges.loc[edge2.index[0],"geometry"].coords)[::-1])
- edges.loc[edge2.index[0],"geometry"] = edges.loc[edge2.index[0],"geometry"].reverse()
- edge1 = edges[edges["node_end"] == nodeid]
- edge2 = edges[edges["node_start"] == nodeid]
- elif(edge1.shape[0]==2 and edge2.shape[0]==0):
- ns = edges.loc[edge1.index[1],"node_start"]
- ne = edges.loc[edge1.index[1],"node_end"]
- edges.loc[edge1.index[1],"node_start"] = ne
- edges.loc[edge1.index[1],"node_end"] = ns
+ edges.loc[edge2.index[0], 'geometry'] = edges.loc[
+ edge2.index[0], 'geometry'
+ ].reverse()
+ edge1 = edges[edges['node_end'] == nodeid]
+ edge2 = edges[edges['node_start'] == nodeid]
+ elif edge1.shape[0] == 2 and edge2.shape[0] == 0: # noqa: PLR2004
+ ns = edges.loc[edge1.index[1], 'node_start']
+ ne = edges.loc[edge1.index[1], 'node_end']
+ edges.loc[edge1.index[1], 'node_start'] = ne
+ edges.loc[edge1.index[1], 'node_end'] = ns
# edges.loc[edge1.index[1],"geometry"] = shapely.LineString(list(edges.loc[edge1.index[1],"geometry"].coords)[::-1])
- edges.loc[edge1.index[1],"geometry"] = edges.loc[edge1.index[1],"geometry"].reverse()
- edge1 = edges[edges["node_end"] == nodeid]
- edge2 = edges[edges["node_start"] == nodeid]
+ edges.loc[edge1.index[1], 'geometry'] = edges.loc[
+ edge1.index[1], 'geometry'
+ ].reverse()
+ edge1 = edges[edges['node_end'] == nodeid]
+ edge2 = edges[edges['node_start'] == nodeid]
else:
skippedID_list.append(nodeid)
continue
try:
removedID_list.append(nodeid)
- newLineCoords = list(edge1["geometry"].values[0].coords)+list(edge2["geometry"].values[0].coords[1:])
+ newLineCoords = ( # noqa: N806
+ list(
+ edge1['geometry'].values[0].coords # noqa: PD011
+ )
+ + list(
+ edge2['geometry'].values[0].coords[1:] # noqa: PD011
+ )
+ )
# newLineCoords.append(edge2["geometry"].values[0].coords[1:])
- edges.loc[edge1.index,"geometry"] = shapely.LineString(newLineCoords)
- edges.loc[edge1.index, "node_end"] = edge2["node_end"].values[0]
- edges.drop(edge2.index, axis = 0, inplace=True)
- newEdge = list(graph.neighbors(node))
+ edges.loc[edge1.index, 'geometry'] = shapely.LineString(newLineCoords)
+ edges.loc[edge1.index, 'node_end'] = edge2['node_end'].values[0] # noqa: PD011
+ edges.drop(edge2.index, axis=0, inplace=True) # noqa: PD002
+ newEdge = list(graph.neighbors(node)) # noqa: N806
graph.add_edge(newEdge[0], newEdge[1])
# And delete the node
graph.remove_node(node)
- except:
+ except: # noqa: E722
error_list.append(nodeid)
return edges
+
+
# Break down long roads according to delta
-def breakDownLongEdges(edges, delta, roadDF, nodesDF, tolerance = 10e-3):
- dropedEdges = []
- newEdges = []
+def breakDownLongEdges(edges, delta, roadDF, nodesDF, tolerance=10e-3): # noqa: N802, N803, D103
+ dropedEdges = [] # noqa: N806
+ newEdges = [] # noqa: N806
crs = edges.crs
edges_dict = edges.reset_index()
nodes_dict = nodesDF.reset_index()
edges_dict = edges.to_dict(orient='records')
nodes_dict = nodesDF.to_dict(orient='records')
- roadDF["IDbase"] = roadDF["ID"].apply(lambda x: x.split('_')[0])
- num_segExistingMap = roadDF.groupby("IDbase").count()["ID"].to_dict()
+ roadDF['IDbase'] = roadDF['ID'].apply(lambda x: x.split('_')[0])
+ num_segExistingMap = roadDF.groupby('IDbase').count()['ID'].to_dict() # noqa: N806
for row_ind in range(len(edges_dict)):
- LS = edges_dict[row_ind]["geometry"]
- num_seg = int(np.ceil(LS.length/delta))
+ LS = edges_dict[row_ind]['geometry'] # noqa: N806
+ num_seg = int(np.ceil(LS.length / delta))
if num_seg == 1:
continue
- distances = np.linspace(0, LS.length, num_seg+1)
- points = shapely.MultiPoint([LS.interpolate(distance) for distance in distances[:-1]] + [LS.boundary.geoms[1]])
- LS = shapely.ops.snap(LS, points, tolerance)
- splittedLS = shapely.ops.split(LS,points).geoms
- currentEdge = edges_dict[row_ind].copy()
- num_segExisting = num_segExistingMap[currentEdge["ID"].split('_')[0]]
- newNodes = []
- currentNodesNum = len(nodes_dict)-1 # nodeID starts with 0
+ distances = np.linspace(0, LS.length, num_seg + 1)
+ points = shapely.MultiPoint(
+ [LS.interpolate(distance) for distance in distances[:-1]]
+ + [LS.boundary.geoms[1]]
+ )
+ LS = shapely.ops.snap(LS, points, tolerance) # noqa: N806
+ splittedLS = shapely.ops.split(LS, points).geoms # noqa: N806
+ currentEdge = edges_dict[row_ind].copy() # noqa: N806
+ num_segExisting = num_segExistingMap[currentEdge['ID'].split('_')[0]] # noqa: N806
+ newNodes = [] # noqa: N806
+ currentNodesNum = len(nodes_dict) - 1 # nodeID starts with 0 # noqa: N806
for pt in points.geoms:
- newNode_dict = {"nodeID":currentNodesNum,"oldNodeID":np.nan, "geometry":pt}
- currentNodesNum += 1
+ newNode_dict = { # noqa: N806
+ 'nodeID': currentNodesNum,
+ 'oldNodeID': np.nan,
+ 'geometry': pt,
+ }
+ currentNodesNum += 1 # noqa: N806
newNodes.append(newNode_dict)
- newNodes = newNodes[1:-1] #The first and last points already exists in the nodes DF. delete them
- nodes_dict = nodes_dict + newNodes
- for sLS_ind, sLS in enumerate(splittedLS):
+ newNodes = newNodes[ # noqa: N806
+ 1:-1
+ ] # The first and last points already exists in the nodes DF. delete them
+ nodes_dict = nodes_dict + newNodes # noqa: PLR6104
+ for sLS_ind, sLS in enumerate(splittedLS): # noqa: N806
# create new edge
- if sLS_ind ==0:
- newID = currentEdge["ID"]
+ if sLS_ind == 0:
+ newID = currentEdge['ID'] # noqa: N806
else:
- newID = currentEdge["ID"].split("_")[0]+"_"+str(num_segExisting+1)
- num_segExisting +=1
- num_segExistingMap[currentEdge["ID"].split('_')[0]] = num_segExistingMap[currentEdge["ID"].split('_')[0]]+1
- newGeom = sLS
- if sLS_ind ==0:
- newEdge_ns = currentEdge["node_start"]
- newEdge_ne = newNodes[sLS_ind]["nodeID"]
- elif sLS_ind < len(splittedLS)-1:
- newEdge_ns = newNodes[sLS_ind-1]["nodeID"]
- newEdge_ne = newNodes[sLS_ind]["nodeID"]
+ newID = ( # noqa: N806
+ currentEdge['ID'].split('_')[0] + '_' + str(num_segExisting + 1)
+ )
+ num_segExisting += 1 # noqa: N806
+ num_segExistingMap[currentEdge['ID'].split('_')[0]] = ( # noqa: PLR6104
+ num_segExistingMap[currentEdge['ID'].split('_')[0]] + 1
+ )
+ newGeom = sLS # noqa: N806
+ if sLS_ind == 0:
+ newEdge_ns = currentEdge['node_start'] # noqa: N806
+ newEdge_ne = newNodes[sLS_ind]['nodeID'] # noqa: N806
+ elif sLS_ind < len(splittedLS) - 1:
+ newEdge_ns = newNodes[sLS_ind - 1]['nodeID'] # noqa: N806
+ newEdge_ne = newNodes[sLS_ind]['nodeID'] # noqa: N806
else:
- newEdge_ns = newNodes[sLS_ind-1]["nodeID"]
- newEdge_ne = currentEdge["node_end"]
- newEdge = currentEdge.copy()
- newEdge.update({"ID":newID,
- "roadType":currentEdge["roadType"],
- "geometry":newGeom,
- "node_start":newEdge_ns,
- "node_end":newEdge_ne,
- "maxMPH":currentEdge["maxMPH"],
- "lanes":currentEdge["lanes"]})
+ newEdge_ns = newNodes[sLS_ind - 1]['nodeID'] # noqa: N806
+ newEdge_ne = currentEdge['node_end'] # noqa: N806
+ newEdge = currentEdge.copy() # noqa: N806
+ newEdge.update(
+ {
+ 'ID': newID,
+ 'roadType': currentEdge['roadType'],
+ 'geometry': newGeom,
+ 'node_start': newEdge_ns,
+ 'node_end': newEdge_ne,
+ 'maxMPH': currentEdge['maxMPH'],
+ 'lanes': currentEdge['lanes'],
+ }
+ )
newEdges.append(newEdge)
dropedEdges.append(row_ind)
edges = edges.drop(dropedEdges)
- if len(newEdges)>0:
- newEdges = gpd.GeoDataFrame(newEdges, crs=crs)
+ if len(newEdges) > 0:
+ newEdges = gpd.GeoDataFrame(newEdges, crs=crs) # noqa: N806
edges = pd.concat([edges, newEdges], ignore_index=True)
edges = edges.reset_index(drop=True)
- nodesDF = gpd.GeoDataFrame(nodes_dict, crs = crs)
+ nodesDF = gpd.GeoDataFrame(nodes_dict, crs=crs) # noqa: N806
return edges, nodesDF
-def create_asset_files(output_file, asset_source_file, bridge_filter,
- tunnel_filter, road_filter, doParallel, roadSegLength):
+def create_asset_files( # noqa: C901, D103, PLR0914, PLR0915
+ output_file,
+ asset_source_file,
+ bridge_filter,
+ tunnel_filter,
+ road_filter,
+ doParallel, # noqa: N803
+ roadSegLength, # noqa: N803
+):
# these imports are here to save time when the app is called without
# the -getRV flag
-
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
# Get the out dir, may not always be in the results folder if multiple assets are used
- outDir = os.path.dirname(output_file)
-
+ outDir = os.path.dirname(output_file) # noqa: PTH120, N806
+
# check if a filter is provided for bridges
if bridge_filter is not None:
bridges_requested = []
for assets in bridge_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- bridges_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ bridges_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
bridges_requested.append(int(assets))
bridges_requested = np.array(bridges_requested)
@@ -212,9 +255,9 @@ def create_asset_files(output_file, asset_source_file, bridge_filter,
if tunnel_filter is not None:
tunnels_requested = []
for assets in tunnel_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- tunnels_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ tunnels_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
tunnels_requested.append(int(assets))
tunnels_requested = np.array(tunnels_requested)
@@ -222,26 +265,32 @@ def create_asset_files(output_file, asset_source_file, bridge_filter,
if road_filter is not None:
roads_requested = []
for assets in road_filter.split(','):
- if "-" in assets:
- asset_low, asset_high = assets.split("-")
- roads_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in assets:
+ asset_low, asset_high = assets.split('-')
+ roads_requested += list(range(int(asset_low), int(asset_high) + 1))
else:
roads_requested.append(int(assets))
roads_requested = np.array(roads_requested)
-
+
# load the JSON file with the asset information
- with open(asset_source_file, "r", encoding="utf-8") as sourceFile:
+ with open(asset_source_file, encoding='utf-8') as sourceFile: # noqa: PTH123, N806
assets_dict = json.load(sourceFile)
-
- bridges_array = assets_dict.get("hwy_bridges",None)
- tunnels_array = assets_dict.get("hwy_tunnels",None)
- roads_array = assets_dict.get("roadways", None)
- nodes_dict= assets_dict.get("nodes", None)
+
+ bridges_array = assets_dict.get('hwy_bridges', None)
+ tunnels_array = assets_dict.get('hwy_tunnels', None)
+ roads_array = assets_dict.get('roadways', None)
+ nodes_dict = assets_dict.get('nodes', None)
if nodes_dict is None:
- print("JSON_to_AIM_tranportation ERROR: A key of 'nodes' is not found in the asset file: " + asset_source_file)
+ print( # noqa: T201
+ "JSON_to_AIM_tranportation ERROR: A key of 'nodes' is not found in the asset file: "
+ + asset_source_file
+ )
return
if tunnels_array is None and bridges_array is None and roads_array is None:
- print("JSON_to_AIM_tranportation ERROR: None of 'hwy_bridges', 'hwy_tunnels', nor 'roadways' is not found in the asset file: " + asset_source_file)
+ print( # noqa: T201
+ "JSON_to_AIM_tranportation ERROR: None of 'hwy_bridges', 'hwy_tunnels', nor 'roadways' is not found in the asset file: "
+ + asset_source_file
+ )
return
assets_array = []
@@ -250,271 +299,351 @@ def create_asset_files(output_file, asset_source_file, bridge_filter,
if bridge_filter is not None:
assets_available = np.arange(len(bridges_array))
bridges_to_run = bridges_requested[
- np.where(np.in1d(bridges_requested, assets_available))[0]]
+ np.where(np.isin(bridges_requested, assets_available))[0]
+ ]
for i in bridges_to_run:
- selected_bridges.append(bridges_array[i])
+ selected_bridges.append(bridges_array[i]) # noqa: PERF401
else:
selected_bridges = bridges_array
- bridges_to_run = list(range(0, len(bridges_array)))
+ bridges_to_run = list(range(len(bridges_array)))
# if there is a filter, then pull out only the required tunnels
selected_tunnels = []
if tunnel_filter is not None:
assets_available = np.arange(len(tunnels_array))
tunnels_to_run = tunnels_requested[
- np.where(np.in1d(tunnels_requested, assets_available))[0]]
+ np.where(np.isin(tunnels_requested, assets_available))[0]
+ ]
for i in tunnels_to_run:
- selected_tunnels.append(tunnels_array[i])
+ selected_tunnels.append(tunnels_array[i]) # noqa: PERF401
else:
selected_tunnels = tunnels_array
- tunnels_to_run = list(range(0, len(tunnels_array)))
+ tunnels_to_run = list(range(len(tunnels_array)))
# if there is a filter, then pull out only the required roads
selected_roads = []
if road_filter is not None:
assets_available = np.arange(len(roads_array))
roads_to_run = roads_requested[
- np.where(np.in1d(roads_requested, assets_available))[0]]
+ np.where(np.isin(roads_requested, assets_available))[0]
+ ]
for i in roads_to_run:
- selected_roads.append(roads_array[i])
+ selected_roads.append(roads_array[i]) # noqa: PERF401
else:
selected_roads = roads_array
- roads_to_run = list(range(0, len(roads_array)))
+ roads_to_run = list(range(len(roads_array)))
# Reconstruct road network
- datacrs = assets_dict.get("crs", None)
+ datacrs = assets_dict.get('crs', None)
if datacrs is None:
- print("JSON_to_AIM_tranportation WARNING: 'crs' is not found in the asset file: " + asset_source_file)
- print("The CRS epsg:4326 is used by default")
- datacrs = "epsg:4326"
-
- if len(selected_roads)>0:
- roadDF = pd.DataFrame.from_dict(selected_roads)
- LineStringList = []
+ print( # noqa: T201
+ "JSON_to_AIM_tranportation WARNING: 'crs' is not found in the asset file: "
+ + asset_source_file
+ )
+ print('The CRS epsg:4326 is used by default') # noqa: T201
+ datacrs = 'epsg:4326'
+
+ if len(selected_roads) > 0:
+ roadDF = pd.DataFrame.from_dict(selected_roads) # noqa: N806
+ LineStringList = [] # noqa: N806
for ind in roadDF.index:
- start_node = nodes_dict[str(roadDF.loc[ind, "start_node"])]
- end_node = nodes_dict[str(roadDF.loc[ind, "end_node"])]
- LineStringList.append(shapely.geometry.LineString([(start_node["lon"], start_node["lat"]), (end_node["lon"], end_node["lat"])]))
- roadDF["geometry"] = LineStringList
- roadDF = roadDF[["ID","roadType","lanes","maxMPH","geometry"]]
- roadGDF = gpd.GeoDataFrame(roadDF, geometry="geometry", crs=datacrs)
- graph = momepy.gdf_to_nx(roadGDF.to_crs("epsg:6500"), approach='primal')
- with warnings.catch_warnings(): #Suppress the warning of disconnected components in the graph
- warnings.simplefilter("ignore")
- nodes, edges, sw = momepy.nx_to_gdf(graph, points=True, lines=True,
- spatial_weights=True)
+ start_node = nodes_dict[str(roadDF.loc[ind, 'start_node'])]
+ end_node = nodes_dict[str(roadDF.loc[ind, 'end_node'])]
+ LineStringList.append(
+ shapely.geometry.LineString(
+ [
+ (start_node['lon'], start_node['lat']),
+ (end_node['lon'], end_node['lat']),
+ ]
+ )
+ )
+ roadDF['geometry'] = LineStringList
+ roadDF = roadDF[['ID', 'roadType', 'lanes', 'maxMPH', 'geometry']] # noqa: N806
+ roadGDF = gpd.GeoDataFrame(roadDF, geometry='geometry', crs=datacrs) # noqa: N806
+ graph = momepy.gdf_to_nx(roadGDF.to_crs('epsg:6500'), approach='primal')
+ with warnings.catch_warnings(): # Suppress the warning of disconnected components in the graph
+ warnings.simplefilter('ignore')
+ nodes, edges, sw = momepy.nx_to_gdf( # noqa: F841
+ graph, points=True, lines=True, spatial_weights=True
+ )
# Oneway or twoway is not considered in D&L, remove duplicated edges
- edges = edges[edges.duplicated(['node_start', 'node_end'], keep="first")==False]
- edges = edges.reset_index(drop=True).drop("mm_len",axis=1)
- ### Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
+ edges = edges[
+ edges.duplicated(['node_start', 'node_end'], keep='first') == False # noqa: E712
+ ]
+ edges = edges.reset_index(drop=True).drop('mm_len', axis=1)
+ # Some edges has start_node as the last point in the geometry and end_node as the first point, check and reorder
for ind in edges.index:
- start = nodes.loc[edges.loc[ind, "node_start"],"geometry"]
- end = nodes.loc[edges.loc[ind, "node_end"],"geometry"]
- first = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[0])
- last = shapely.geometry.Point(edges.loc[ind,"geometry"].coords[-1])
- #check if first and last are the same
- if (start == first and end == last):
+ start = nodes.loc[edges.loc[ind, 'node_start'], 'geometry']
+ end = nodes.loc[edges.loc[ind, 'node_end'], 'geometry']
+ first = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[0])
+ last = shapely.geometry.Point(edges.loc[ind, 'geometry'].coords[-1])
+ # check if first and last are the same
+ if start == first and end == last:
continue
- elif (start == last and end == first):
- newStartID = edges.loc[ind, "node_end"]
- newEndID = edges.loc[ind, "node_start"]
- edges.loc[ind,"node_start"] = newStartID
- edges.loc[ind,"node_end"] = newEndID
+ elif start == last and end == first: # noqa: RET507
+ newStartID = edges.loc[ind, 'node_end'] # noqa: N806
+ newEndID = edges.loc[ind, 'node_start'] # noqa: N806
+ edges.loc[ind, 'node_start'] = newStartID
+ edges.loc[ind, 'node_end'] = newEndID
else:
- print(ind, "th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function")
- nodesID_to_remove = [i for i, n in enumerate(graph.nodes) if len(list(graph.neighbors(n))) == 2]
- nodes_to_remove = [n for i, n in enumerate(graph.nodes) if len(list(graph.neighbors(n))) == 2]
-
+ print( # noqa: T201
+ ind,
+ 'th row of edges has wrong start/first, end/last pairs, likely a bug of momepy.gdf_to_nx function',
+ )
+ nodesID_to_remove = [ # noqa: N806
+ i
+ for i, n in enumerate(graph.nodes)
+ if len(list(graph.neighbors(n))) == 2 # noqa: PLR2004
+ ]
+ nodes_to_remove = [
+ n
+ for i, n in enumerate(graph.nodes)
+ if len(list(graph.neighbors(n))) == 2 # noqa: PLR2004
+ ]
+
edges = remove2neibourEdges(nodesID_to_remove, nodes_to_remove, edges, graph)
- remainingNodesOldID = list(set(edges["node_start"].values.tolist() + edges["node_end"].values.tolist()))
- nodes = nodes.loc[remainingNodesOldID,:].sort_index()
- nodes = nodes.reset_index(drop=True).reset_index().rename(columns={"index":"nodeID", "nodeID":"oldNodeID"})
- edges = edges.merge(nodes[["nodeID", "oldNodeID"]], left_on="node_start",
- right_on = "oldNodeID", how="left").drop(["node_start", "oldNodeID"], axis=1).rename(columns = {"nodeID":"node_start"})
- edges = edges.merge(nodes[["nodeID", "oldNodeID"]], left_on="node_end",
- right_on = "oldNodeID", how="left").drop(["node_end", "oldNodeID"], axis=1).rename(columns = {"nodeID":"node_end"})
+ remainingNodesOldID = list( # noqa: N806
+ set(
+ edges['node_start'].values.tolist() # noqa: PD011
+ + edges['node_end'].values.tolist() # noqa: PD011
+ )
+ )
+ nodes = nodes.loc[remainingNodesOldID, :].sort_index()
+ nodes = (
+ nodes.reset_index(drop=True)
+ .reset_index()
+ .rename(columns={'index': 'nodeID', 'nodeID': 'oldNodeID'})
+ )
+ edges = (
+ edges.merge(
+ nodes[['nodeID', 'oldNodeID']],
+ left_on='node_start',
+ right_on='oldNodeID',
+ how='left',
+ )
+ .drop(['node_start', 'oldNodeID'], axis=1)
+ .rename(columns={'nodeID': 'node_start'})
+ )
+ edges = (
+ edges.merge(
+ nodes[['nodeID', 'oldNodeID']],
+ left_on='node_end',
+ right_on='oldNodeID',
+ how='left',
+ )
+ .drop(['node_end', 'oldNodeID'], axis=1)
+ .rename(columns={'nodeID': 'node_end'})
+ )
edges, nodes = breakDownLongEdges(edges, roadSegLength, roadDF, nodes)
- locationGS = gpd.GeoSeries(edges["geometry"].apply(lambda x: x.centroid),crs = edges.crs).to_crs(datacrs)
- edges = edges.to_crs(datacrs).rename(columns = {"node_start":"start_node","node_end":"end_node"})
- edges["location_lon"] = locationGS.apply(lambda x:x.x)
- edges["location_lat"] = locationGS.apply(lambda x:x.y)
-
- edges = edges.reset_index().rename(columns={"index":"AIM_id"})
- edges["AIM_id"] = edges["AIM_id"].apply(lambda x:"r"+str(x))
- edges.to_file(os.path.join(outDir,"roadNetworkEdgesSelected.geojson"), driver = "GeoJSON")
-
- nodesNeeded = list(set(edges["start_node"].values.tolist() + edges["end_node"].values.tolist()))
- nodes = nodes.loc[nodesNeeded,:]
- nodes = nodes.to_crs(datacrs)[["nodeID","geometry"]]
- nodes.to_file(os.path.join(outDir,"roadNetworkNodesSelected.geojson"), driver = "GeoJSON")
+ locationGS = gpd.GeoSeries( # noqa: N806
+ edges['geometry'].apply(lambda x: x.centroid), crs=edges.crs
+ ).to_crs(datacrs)
+ edges = edges.to_crs(datacrs).rename(
+ columns={'node_start': 'start_node', 'node_end': 'end_node'}
+ )
+ edges['location_lon'] = locationGS.apply(lambda x: x.x)
+ edges['location_lat'] = locationGS.apply(lambda x: x.y)
+
+ edges = edges.reset_index().rename(columns={'index': 'AIM_id'})
+ edges['AIM_id'] = edges['AIM_id'].apply(lambda x: 'r' + str(x))
+ edges.to_file(
+ os.path.join(outDir, 'roadNetworkEdgesSelected.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
+
+ nodesNeeded = list( # noqa: N806
+ set(
+ edges['start_node'].values.tolist() # noqa: PD011
+ + edges['end_node'].values.tolist() # noqa: PD011
+ )
+ )
+ nodes = nodes.loc[nodesNeeded, :]
+ nodes = nodes.to_crs(datacrs)[['nodeID', 'geometry']]
+ nodes.to_file(
+ os.path.join(outDir, 'roadNetworkNodesSelected.geojson'), # noqa: PTH118
+ driver='GeoJSON',
+ )
else:
edges = pd.DataFrame.from_dict({})
count = 0
ind = 0
for asset in selected_bridges:
- asset_id = "b" + str(bridges_to_run[ind])
+ asset_id = 'b' + str(bridges_to_run[ind])
ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- locationNodeID = str(asset["location"])
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': nodes_dict[locationNodeID]["lat"],
- 'longitude': nodes_dict[locationNodeID]["lon"]
- }
- )
+ locationNodeID = str(asset['location']) # noqa: N806
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': nodes_dict[locationNodeID]['lat'],
+ 'longitude': nodes_dict[locationNodeID]['lon'],
+ },
+ ),
}
- asset.pop("location")
+ asset.pop('location')
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
- AIM_i["GeneralInformation"].update({"locationNode":locationNodeID})
- AIM_i["GeneralInformation"].update({"assetSubtype":"hwy_bridge"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update(asset)
+ AIM_i['GeneralInformation'].update({'locationNode': locationNodeID})
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'hwy_bridge'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
+
+ count = count + 1 # noqa: PLR6104
- count = count + 1
-
ind = 0
for asset in selected_tunnels:
- asset_id = "t" + str(tunnels_to_run[ind])
+ asset_id = 't' + str(tunnels_to_run[ind])
ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- locationNodeID = str(asset["location"])
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': nodes_dict[locationNodeID]["lat"],
- 'longitude': nodes_dict[locationNodeID]["lon"]
- }
- )
+ locationNodeID = str(asset['location']) # noqa: N806
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': nodes_dict[locationNodeID]['lat'],
+ 'longitude': nodes_dict[locationNodeID]['lon'],
+ },
+ ),
}
- asset.pop("location")
+ asset.pop('location')
# save every label as-is
- AIM_i["GeneralInformation"].update(asset)
- AIM_i["GeneralInformation"].update({"locationNode":locationNodeID})
- AIM_i["GeneralInformation"].update({"assetSubtype":"hwy_tunnel"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update(asset)
+ AIM_i['GeneralInformation'].update({'locationNode': locationNodeID})
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'hwy_tunnel'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
+
+ count = count + 1 # noqa: PLR6104
- count = count + 1
-
ind = 0
for row_ind in edges.index:
- asset_id = "r" + str(row_ind)
+ asset_id = 'r' + str(row_ind)
ind += 1
- if runParallel == False or (count % numP) == procID:
-
+ if runParallel == False or (count % numP) == procID: # noqa: E712
# initialize the AIM file
- AIM_i = {
- "RandomVariables": [],
- "GeneralInformation": dict(
- AIM_id = asset_id,
- location = {
- 'latitude': edges.loc[row_ind,"location_lat"],
- 'longitude': edges.loc[row_ind,"location_lon"]
- }
- )
+ AIM_i = { # noqa: N806
+ 'RandomVariables': [],
+ 'GeneralInformation': dict( # noqa: C408
+ AIM_id=asset_id,
+ location={
+ 'latitude': edges.loc[row_ind, 'location_lat'],
+ 'longitude': edges.loc[row_ind, 'location_lon'],
+ },
+ ),
}
- AIM_i["GeneralInformation"].update(edges.loc[row_ind,:].drop(["geometry","location_lat","location_lon"]).to_dict())
- geom = {"type":"LineString","coordinates":[[pt[0], pt[1]] for pt in list(edges.loc[row_ind,"geometry"].coords)]}
- AIM_i["GeneralInformation"].update({"geometry":str(geom)})
- AIM_i["GeneralInformation"].update({"assetSubtype":"roadway"})
- AIM_file_name = "{}-AIM.json".format(asset_id)
-
- AIM_file_name = os.path.join(outDir,AIM_file_name)
-
- with open(AIM_file_name, 'w', encoding="utf-8") as f:
+ AIM_i['GeneralInformation'].update(
+ edges.loc[row_ind, :]
+ .drop(['geometry', 'location_lat', 'location_lon'])
+ .to_dict()
+ )
+ geom = {
+ 'type': 'LineString',
+ 'coordinates': [
+ [pt[0], pt[1]]
+ for pt in list(edges.loc[row_ind, 'geometry'].coords)
+ ],
+ }
+ AIM_i['GeneralInformation'].update({'geometry': str(geom)})
+ AIM_i['GeneralInformation'].update({'assetSubtype': 'roadway'})
+ AIM_file_name = f'{asset_id}-AIM.json' # noqa: N806
+
+ AIM_file_name = os.path.join(outDir, AIM_file_name) # noqa: PTH118, N806
+
+ with open(AIM_file_name, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(AIM_i, f, indent=2)
- assets_array.append(dict(id=str(asset_id), file=AIM_file_name))
+ assets_array.append(dict(id=str(asset_id), file=AIM_file_name)) # noqa: C408
- count = count + 1
+ count = count + 1 # noqa: PLR6104
if procID != 0:
-
# if not P0, write data to output file with procID in name and barrier
- output_file = os.path.join(outDir,f'tmp_{procID}.json')
+ output_file = os.path.join(outDir, f'tmp_{procID}.json') # noqa: PTH118
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=0)
-
- comm.Barrier()
-
- else:
- if runParallel == True:
+ comm.Barrier()
+ else:
+ if runParallel == True: # noqa: E712
# if parallel & P0, barrier so that all files written above, then loop over other processor files: open, load data and append
- comm.Barrier()
+ comm.Barrier()
for i in range(1, numP):
- fileToAppend = os.path.join(outDir,f'tmp_{i}.json')
- with open(fileToAppend, 'r', encoding="utf-8") as data_file:
+ fileToAppend = os.path.join(outDir, f'tmp_{i}.json') # noqa: PTH118, N806
+ with open(fileToAppend, encoding='utf-8') as data_file: # noqa: FURB101, PTH123
json_data = data_file.read()
- assetsToAppend = json.loads(json_data)
+ assetsToAppend = json.loads(json_data) # noqa: N806
assets_array += assetsToAppend
- with open(output_file, 'w', encoding="utf-8") as f:
+ with open(output_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(assets_array, f, indent=2)
-
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
- parser.add_argument('--assetFile',
- help = "Path to the file that will contain a list of asset ids and "
- "corresponding AIM filenames")
- parser.add_argument('--assetSourceFile',
- help = "Path to the JSON file with the transportation asset inventory")
- parser.add_argument('--bridgesFilter',
- help = "Filter applied to select a subset of bridges from the "
- "inventory",
- default=None)
- parser.add_argument('--tunnelsFilter',
- help = "Filter applied to select a subset of assets from the "
- "inventory",
- default=None)
- parser.add_argument('--roadsFilter',
- help = "Filter applied to select a subset of assets from the "
- "inventory",
- default=None)
- parser.add_argument('--roadSegLength',
- help = "Maximum length (m) of road segments in the created AIM "
- "files",
+ parser.add_argument(
+ '--assetFile',
+ help='Path to the file that will contain a list of asset ids and '
+ 'corresponding AIM filenames',
+ )
+ parser.add_argument(
+ '--assetSourceFile',
+ help='Path to the JSON file with the transportation asset inventory',
+ )
+ parser.add_argument(
+ '--bridgesFilter',
+ help='Filter applied to select a subset of bridges from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--tunnelsFilter',
+ help='Filter applied to select a subset of assets from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--roadsFilter',
+ help='Filter applied to select a subset of assets from the ' 'inventory',
+ default=None,
+ )
+ parser.add_argument(
+ '--roadSegLength',
+ help='Maximum length (m) of road segments in the created AIM ' 'files',
type=float,
- default=100)
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpiexec')
- parser.add_argument('--getRV',
- help = "Identifies the preparational stage of the workflow. This app "
- "is only used in that stage, so it does not do anything if "
- "called without this flag.",
+ default=100,
+ )
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
+ parser.add_argument(
+ '--getRV',
+ help='Identifies the preparational stage of the workflow. This app '
+ 'is only used in that stage, so it does not do anything if '
+ 'called without this flag.',
default=False,
- nargs='?', const=True)
+ nargs='?',
+ const=True,
+ )
# parser.add_argument('--saveFullNetwork',
# help = "Save the full network into edges and nodes.",
# default=False,
@@ -523,7 +652,16 @@ def create_asset_files(output_file, asset_source_file, bridge_filter,
args = parser.parse_args()
if args.getRV:
- sys.exit(create_asset_files(args.assetFile, args.assetSourceFile, args.bridgesFilter,
- args.tunnelsFilter, args.roadsFilter, args.doParallel, args.roadSegLength))
+ sys.exit(
+ create_asset_files(
+ args.assetFile,
+ args.assetSourceFile,
+ args.bridgesFilter,
+ args.tunnelsFilter,
+ args.roadsFilter,
+ args.doParallel,
+ args.roadSegLength,
+ )
+ )
else:
- pass # not used
+ pass # not used
diff --git a/modules/createEDP/simpleEDP/simpleEDP.py b/modules/createEDP/simpleEDP/simpleEDP.py
index f817da735..66931bdf3 100644
--- a/modules/createEDP/simpleEDP/simpleEDP.py
+++ b/modules/createEDP/simpleEDP/simpleEDP.py
@@ -1,54 +1,67 @@
-from __future__ import division, print_function
-import os, sys
+import sys # noqa: CPY001, D100, INP001
+
if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-import argparse, posixpath, ntpath, json
+import argparse
+import json
+
-def write_RV(AIM_input_path, EDP_input_path, EDP_type):
-
+def write_RV(AIM_input_path, EDP_input_path, EDP_type): # noqa: N802, N803, D103
# load the AIM file
- with open(AIM_input_path, 'r', encoding='utf-8') as f:
- AIM_in = json.load(f)
+ with open(AIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ AIM_in = json.load(f) # noqa: N806
- EDP_list = []
- if "EDP" in AIM_in.keys():
- for edp in AIM_in["EDP"]:
- EDP_list.append({
- "type": edp["type"],
- "cline": edp.get("cline", "1"),
- "floor": edp.get("floor", "1"),
- "dofs": edp.get("dofs", [1,]),
- "scalar_data": [],
- })
+ EDP_list = [] # noqa: N806
+ if 'EDP' in AIM_in.keys(): # noqa: SIM118
+ for edp in AIM_in['EDP']:
+ EDP_list.append( # noqa: PERF401
+ {
+ 'type': edp['type'],
+ 'cline': edp.get('cline', '1'),
+ 'floor': edp.get('floor', '1'),
+ 'dofs': edp.get(
+ 'dofs',
+ [
+ 1,
+ ],
+ ),
+ 'scalar_data': [],
+ }
+ )
else:
- EDP_list.append({
- "type": EDP_type,
- "cline": "1",
- "floor": "1",
- "dofs": [1,],
- "scalar_data": [],
- })
+ EDP_list.append(
+ {
+ 'type': EDP_type,
+ 'cline': '1',
+ 'floor': '1',
+ 'dofs': [
+ 1,
+ ],
+ 'scalar_data': [],
+ }
+ )
- EDP_json = {
- "RandomVariables": [],
- "total_number_edp": len(EDP_list),
- "EngineeringDemandParameters": [{
- "responses": EDP_list
- },]
+ EDP_json = { # noqa: N806
+ 'RandomVariables': [],
+ 'total_number_edp': len(EDP_list),
+ 'EngineeringDemandParameters': [
+ {'responses': EDP_list},
+ ],
}
- with open(EDP_input_path, 'w') as f:
+ with open(EDP_input_path, 'w') as f: # noqa: PLW1514, PTH123
json.dump(EDP_json, f, indent=2)
-def create_EDP(AIM_input_path, EDP_input_path, EDP_type):
- pass
-if __name__ == '__main__':
+def create_EDP(AIM_input_path, EDP_input_path, EDP_type): # noqa: ARG001, N802, N803, D103
+ pass
+
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM', default=None)
diff --git a/modules/createEDP/standardEDP/StandardEDP.cpp b/modules/createEDP/standardEDP/StandardEDP.cpp
index 6fd686cdb..c7b71dfdd 100644
--- a/modules/createEDP/standardEDP/StandardEDP.cpp
+++ b/modules/createEDP/standardEDP/StandardEDP.cpp
@@ -95,7 +95,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/standardEarthquakeEDP/StandardEarthquakeEDP.cpp b/modules/createEDP/standardEarthquakeEDP/StandardEarthquakeEDP.cpp
index 6f9f6d69b..92874c931 100644
--- a/modules/createEDP/standardEarthquakeEDP/StandardEarthquakeEDP.cpp
+++ b/modules/createEDP/standardEarthquakeEDP/StandardEarthquakeEDP.cpp
@@ -140,7 +140,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/standardHydroEDP/StandardHydroEDP.cpp b/modules/createEDP/standardHydroEDP/StandardHydroEDP.cpp
index 197baf3d1..82ffac457 100644
--- a/modules/createEDP/standardHydroEDP/StandardHydroEDP.cpp
+++ b/modules/createEDP/standardHydroEDP/StandardHydroEDP.cpp
@@ -114,7 +114,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/standardStormSurgeEDP/StandardStormSurgeEDP.cpp b/modules/createEDP/standardStormSurgeEDP/StandardStormSurgeEDP.cpp
index f25a7baa6..9ab07ed46 100644
--- a/modules/createEDP/standardStormSurgeEDP/StandardStormSurgeEDP.cpp
+++ b/modules/createEDP/standardStormSurgeEDP/StandardStormSurgeEDP.cpp
@@ -114,7 +114,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/standardTsunamiEDP/StandardTsunamiEDP.cpp b/modules/createEDP/standardTsunamiEDP/StandardTsunamiEDP.cpp
index 699b50c5f..7b8d95b77 100644
--- a/modules/createEDP/standardTsunamiEDP/StandardTsunamiEDP.cpp
+++ b/modules/createEDP/standardTsunamiEDP/StandardTsunamiEDP.cpp
@@ -114,7 +114,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/standardWindEDP/StandardWindEDP.cpp b/modules/createEDP/standardWindEDP/StandardWindEDP.cpp
index 94c2b89e3..0c91fec62 100644
--- a/modules/createEDP/standardWindEDP/StandardWindEDP.cpp
+++ b/modules/createEDP/standardWindEDP/StandardWindEDP.cpp
@@ -114,7 +114,7 @@ int main(int argc, char **argv)
//
// create a json_array of ints indicating what dof the event patterns apply to
- // -- neeeded in EPD
+ // -- needed in EPD
int numDOF = 0;
json_t *theDOFs = json_array();
diff --git a/modules/createEDP/surrogateEDP/surrogateEDP.py b/modules/createEDP/surrogateEDP/surrogateEDP.py
index 5943b7e27..108ddda8c 100644
--- a/modules/createEDP/surrogateEDP/surrogateEDP.py
+++ b/modules/createEDP/surrogateEDP/surrogateEDP.py
@@ -1,48 +1,56 @@
-from __future__ import division, print_function
-import os, sys
+import os # noqa: CPY001, D100, INP001
+import sys
+
if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-import argparse, posixpath, ntpath, json
+import argparse
+import json
+
-def write_RV(AIM_input_path, EDP_input_path, EDP_type):
-
+def write_RV(AIM_input_path, EDP_input_path, EDP_type): # noqa: ARG001, N802, N803, D103
# load the AIM file
- with open(AIM_input_path, 'r', encoding='utf-8') as f:
- root_AIM = json.load(f)
+ with open(AIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ root_AIM = json.load(f) # noqa: N806
#
# Is this the correct application
#
- if root_AIM["Applications"]["Modeling"]["Application"] != "SurrogateGPBuildingModel":
- with open("../workflow.err","w") as f:
- f.write("Do not select [None] in the EDP tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.")
- exit(-1)
+ if (
+ root_AIM['Applications']['Modeling']['Application']
+ != 'SurrogateGPBuildingModel'
+ ):
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write(
+ 'Do not select [None] in the EDP tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.'
+ )
+ exit(-1) # noqa: PLR1722
#
# Get EDP info from surrogate model file
#
- print("General Information tab is ignored")
- root_SAM = root_AIM['Applications']['Modeling']
+ print('General Information tab is ignored') # noqa: T201
+ root_SAM = root_AIM['Applications']['Modeling'] # noqa: N806
- surrogate_path = os.path.join(root_SAM['ApplicationData']['MS_Path'], root_SAM['ApplicationData']['mainScript'])
- print(surrogate_path)
+ surrogate_path = os.path.join( # noqa: PTH118
+ root_SAM['ApplicationData']['MS_Path'],
+ root_SAM['ApplicationData']['mainScript'],
+ )
+ print(surrogate_path) # noqa: T201
- with open(surrogate_path, 'r', encoding='utf-8') as f:
+ with open(surrogate_path, encoding='utf-8') as f: # noqa: PTH123
surrogate_model = json.load(f)
- root_EDP = surrogate_model['EDP']
-
-
+ root_EDP = surrogate_model['EDP'] # noqa: N806
# if it is surrogate,
# Save Load EDP.json from standard surrogate models and write it to EDP
- '''
+ """
EDP_list = []
if "EDP" in AIM_in.keys():
for edp in AIM_in["EDP"]:
@@ -69,15 +77,16 @@ def write_RV(AIM_input_path, EDP_input_path, EDP_type):
"responses": EDP_list
},]
}
- '''
- with open(EDP_input_path, 'w', encoding='utf-8') as f:
+ """ # noqa: W291
+ with open(EDP_input_path, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(root_EDP, f, indent=2)
-def create_EDP(AIM_input_path, EDP_input_path, EDP_type):
- pass
-if __name__ == '__main__':
+def create_EDP(AIM_input_path, EDP_input_path, EDP_type): # noqa: ARG001, N802, N803, D103
+ pass
+
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM', default=None)
diff --git a/modules/createEDP/userEDP_R/UserDefinedEDP.py b/modules/createEDP/userEDP_R/UserDefinedEDP.py
index aefc58041..160bedaf8 100644
--- a/modules/createEDP/userEDP_R/UserDefinedEDP.py
+++ b/modules/createEDP/userEDP_R/UserDefinedEDP.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,79 +37,80 @@
# Adam Zsarnóczay
#
+import argparse
+import json
import sys
-import argparse, json
-def write_RV(AIM_file, EVENT_file, EDP_file, EDP_specs):
+def write_RV(AIM_file, EVENT_file, EDP_file, EDP_specs): # noqa: N802, N803, D103
# We do this to provide an option for different behavior under setup,
# even though it is unlikely to have random variables for EDPs.
write_EDP(AIM_file, EVENT_file, EDP_file, EDP_specs)
-def write_EDP(AIM_file, EVENT_file, EDP_file, EDP_specs):
- with open(AIM_file, 'r') as f:
+def write_EDP(AIM_file, EVENT_file, EDP_file, EDP_specs): # noqa: N802, N803, D103
+ with open(AIM_file) as f: # noqa: PLW1514, PTH123
bim_file = json.load(f)
- with open(EVENT_file, 'r') as f:
- event_file = json.load(f)
+ with open(EVENT_file) as f: # noqa: PLW1514, PTH123
+ event_file = json.load(f) # noqa: F841
stories = bim_file['GeneralInformation']['NumberOfStories']
- with open(EDP_specs, 'r') as f:
+ with open(EDP_specs) as f: # noqa: PLW1514, PTH123
edp_specs = json.load(f)
- EDP_locs = edp_specs['locations']
- EDP_types = edp_specs['EDP_types']
+ EDP_locs = edp_specs['locations'] # noqa: N806
+ EDP_types = edp_specs['EDP_types'] # noqa: N806
- EDP_list = []
- total_EDP_num = 0
+ EDP_list = [] # noqa: N806
+ total_EDP_num = 0 # noqa: N806
for edp_name, edp_data in EDP_types.items():
for loc_id, loc_data in edp_data.items():
- for story in range(stories+1):
-
-
+ for story in range(stories + 1):
if edp_name == 'PID':
if story > 0:
- EDP_list.append({
- 'type' : edp_name,
- 'id' : int(loc_id) + story,
- 'cline' : loc_id,
- 'floor1' : story-1,
- 'floor2' : story,
- 'node' : [EDP_locs[loc_id][s]
- for s in [story-1, story]],
- 'dofs' : loc_data,
- 'scalar_data': []
- })
- total_EDP_num += len(loc_data)
+ EDP_list.append(
+ {
+ 'type': edp_name,
+ 'id': int(loc_id) + story,
+ 'cline': loc_id,
+ 'floor1': story - 1,
+ 'floor2': story,
+ 'node': [
+ EDP_locs[loc_id][s] for s in [story - 1, story]
+ ],
+ 'dofs': loc_data,
+ 'scalar_data': [],
+ }
+ )
+ total_EDP_num += len(loc_data) # noqa: N806
else:
- EDP_list.append({
- 'type' : edp_name,
- 'id' : int(loc_id) + story,
- 'cline' : loc_id,
- 'floor' : story,
- 'node' : EDP_locs[loc_id][story],
- 'dofs' : loc_data,
- 'scalar_data': []
- })
- total_EDP_num += len(loc_data)
+ EDP_list.append(
+ {
+ 'type': edp_name,
+ 'id': int(loc_id) + story,
+ 'cline': loc_id,
+ 'floor': story,
+ 'node': EDP_locs[loc_id][story],
+ 'dofs': loc_data,
+ 'scalar_data': [],
+ }
+ )
+ total_EDP_num += len(loc_data) # noqa: N806
edp_file = {
- "RandomVariables": [],
- "total_number_edp": total_EDP_num,
- "EngineeringDemandParameters": [{
- 'name': '...',
- 'responses': EDP_list
- }]
+ 'RandomVariables': [],
+ 'total_number_edp': total_EDP_num,
+ 'EngineeringDemandParameters': [{'name': '...', 'responses': EDP_list}],
}
- with open(EDP_file, 'w') as f:
+ with open(EDP_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(edp_file, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
@@ -122,8 +122,14 @@ def write_EDP(AIM_file, EVENT_file, EDP_file, EDP_specs):
args = parser.parse_args()
if args.getRV:
- sys.exit(write_RV(args.filenameAIM, args.filenameEVENT,
- args.filenameEDP, args.EDPspecs))
+ sys.exit(
+ write_RV(
+ args.filenameAIM, args.filenameEVENT, args.filenameEDP, args.EDPspecs
+ )
+ )
else:
- sys.exit(write_EDP(args.filenameAIM, args.filenameEVENT,
- args.filenameEDP, args.EDPspecs))
+ sys.exit(
+ write_EDP(
+ args.filenameAIM, args.filenameEVENT, args.filenameEDP, args.EDPspecs
+ )
+ )
diff --git a/modules/createEVENT/ASCE7_WindSpeed/ASCE7_WindSpeed.py b/modules/createEVENT/ASCE7_WindSpeed/ASCE7_WindSpeed.py
index 395f6537a..d8d888d8a 100644
--- a/modules/createEVENT/ASCE7_WindSpeed/ASCE7_WindSpeed.py
+++ b/modules/createEVENT/ASCE7_WindSpeed/ASCE7_WindSpeed.py
@@ -1,88 +1,86 @@
-from __future__ import division, print_function
-import os, sys
+import os # noqa: CPY001, D100, INP001
+import sys
+
if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-import argparse, posixpath, ntpath, json
+import argparse
+import json
+
-def write_RV(BIM_input_path, EVENT_input_path):
-
+def write_RV(BIM_input_path, EVENT_input_path): # noqa: ARG001, N802, N803, D103
# create the empty EVENT.json file
- EVENT_in = {
- "Events":[]
- }
+ EVENT_in = {'Events': []} # noqa: N806
- with open(EVENT_input_path, 'w') as f:
+ with open(EVENT_input_path, 'w') as f: # noqa: PLW1514, PTH123
json.dump(EVENT_in, f, indent=2)
- #TODO: if there are multiple events, we need to create a random variable for them
+ # TODO: if there are multiple events, we need to create a random variable for them # noqa: TD002
+
+
+def get_windspeed(BIM_input_path, EVENT_input_path, wind_database_path, severity): # noqa: N803, D103
+ sys.path.insert(0, os.getcwd()) # noqa: PTH109
-def get_windspeed(BIM_input_path, EVENT_input_path, wind_database_path, severity):
-
- sys.path.insert(0, os.getcwd())
-
# load the BIM file
- with open(BIM_input_path, 'r') as f:
- BIM_in = json.load(f)
+ with open(BIM_input_path) as f: # noqa: PLW1514, PTH123
+ BIM_in = json.load(f) # noqa: N806
# load the EVENT file
- with open(EVENT_input_path, 'r') as f:
- EVENT_in = json.load(f)
+ with open(EVENT_input_path) as f: # noqa: PLW1514, PTH123
+ EVENT_in = json.load(f) # noqa: N806
# if there is a wind database path provided
if wind_database_path is not None:
-
# then we need to load the wind data from there
- with open(wind_database_path, 'r') as f:
+ with open(wind_database_path) as f: # noqa: PLW1514, PTH123
wind_db = json.load(f)
# the location id is stored in the BIM file
- location_id = BIM_in["GeneralInformation"]["id"]
+ location_id = BIM_in['GeneralInformation']['id']
for event in wind_db:
- if event["id"] == location_id:
+ if event['id'] == location_id:
wind_speed_in = event
break
# if there is no wind database, then a single wind input file is expected
else:
-
# load the file with the wind speeds
- for event in BIM_in["Events"]:
- if ((event["EventClassification"] == "Wind" ) and
- (event["Events"][0]["type"] == "ASCE7_WindSpeed")):
-
- event_info = event["Events"][0]
- with open(event_info["fileName"], 'r') as f:
+ for event in BIM_in['Events']:
+ if (event['EventClassification'] == 'Wind') and (
+ event['Events'][0]['type'] == 'ASCE7_WindSpeed'
+ ):
+ event_info = event['Events'][0]
+ with open(event_info['fileName']) as f: # noqa: PLW1514, PTH123
wind_speed_in = json.load(f)
- event_id = wind_speed_in["id"]
+ event_id = wind_speed_in['id']
if severity is None:
- severity = event_info["severity"]
+ severity = event_info['severity']
- for wind_data in wind_speed_in["atcHazardData"]["WindData"]["datasets"]:
- if wind_data["name"] == severity:
+ for wind_data in wind_speed_in['atcHazardData']['WindData']['datasets']:
+ if wind_data['name'] == severity:
event_data = wind_data
break
event_json = {
- "type": "Wind",
- "subtype": "ASCE7_WindSpeed",
- "index": event_id,
- "peak_wind_gust_speed": event_data["data"]["value"],
- "unit": event_data["unit"]
+ 'type': 'Wind',
+ 'subtype': 'ASCE7_WindSpeed',
+ 'index': event_id,
+ 'peak_wind_gust_speed': event_data['data']['value'],
+ 'unit': event_data['unit'],
}
- EVENT_in["Events"].append(event_json)
+ EVENT_in['Events'].append(event_json)
- with open(EVENT_input_path, 'w') as f:
- json.dump(EVENT_in,f,indent=2)
+ with open(EVENT_input_path, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(EVENT_in, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
@@ -94,5 +92,11 @@ def get_windspeed(BIM_input_path, EVENT_input_path, wind_database_path, severity
if args.getRV:
sys.exit(write_RV(args.filenameAIM, args.filenameEVENT))
else:
- sys.exit(get_windspeed(args.filenameAIM, args.filenameEVENT,
- args.windDatabase, args.severity))
+ sys.exit(
+ get_windspeed(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.windDatabase,
+ args.severity,
+ )
+ )
diff --git a/modules/createEVENT/CFDEvent/CFDEvent.py b/modules/createEVENT/CFDEvent/CFDEvent.py
index 74427e0b4..f3a58f36a 100644
--- a/modules/createEVENT/CFDEvent/CFDEvent.py
+++ b/modules/createEVENT/CFDEvent/CFDEvent.py
@@ -1,103 +1,88 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- patternsArray = []
- windEventJson = {
- "type" : "Wind",
- "subtype": "OpenFOAM CFD Expert Event",
- "pattern": patternsArray,
- "pressure": [],
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ patternsArray = [] # noqa: N806
+ windEventJson = { # noqa: N806
+ 'type': 'Wind',
+ 'subtype': 'OpenFOAM CFD Expert Event',
+ 'pattern': patternsArray,
+ 'pressure': [],
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [windEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [windEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
- with open(eventFilePath, "w", encoding='utf-8') as eventsFile:
+ with open(eventFilePath, 'w', encoding='utf-8') as eventsFile: # noqa: PTH123, N806
json.dump(eventDict, eventsFile)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r') as BIMFile:
- bim = json.load(BIMFile)
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath) as BIMFile: # noqa: N806, PLW1514, PTH123
+ bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using CFD
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by CFD")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
-
- #parsing arguments
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by CFD'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
+
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
+ if arguments.getRV == True: # noqa: E712
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- #write the event file
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ # write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/DEDM_HRP/DEDM_HRP.cpp b/modules/createEVENT/DEDM_HRP/DEDM_HRP.cpp
index 07a46a781..68ecd3b6c 100644
--- a/modules/createEVENT/DEDM_HRP/DEDM_HRP.cpp
+++ b/modules/createEVENT/DEDM_HRP/DEDM_HRP.cpp
@@ -77,7 +77,7 @@ main(int argc, char **argv) {
json_t *inputEventsArray = json_object_get(input, "Events");
if (inputEventsArray == NULL) {
- std::cerr << "FATAL ERROR - input file conatins no Events key-pair\n";
+ std::cerr << "FATAL ERROR - input file contains no Events key-pair\n";
exit(-1);
}
@@ -190,7 +190,7 @@ int addEvent(json_t *input, json_t *currentEvent, json_t *outputEvent,
// for each floor we need to modify the time step and load factor
// to reflect time step and forces .. data obtained for U=100m/s
// forces factor = windSpeed^2/100^2, time step factor = 100/windSpeed
- // and if shape == 1 (square), we need to get forces from other file and swicth as no angle > 45
+ // and if shape == 1 (square), we need to get forces from other file and switch as no angle > 45
//
json_t *dtJO = json_object_get(event, "dT");
@@ -306,7 +306,7 @@ int addEvent(json_t *input, json_t *currentEvent, json_t *outputEvent,
widthJO == NULL ||
depthJO == NULL ||
storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -1;
}
@@ -417,7 +417,7 @@ int addEvent(json_t *input, json_t *currentEvent, json_t *outputEvent,
modelHeightJO == NULL ||
modelExposureJO == NULL ||
windSpeedJO == NULL) {
- std::cerr << "ERROR missing Information from Event (modelPlan, modelHeight, exposure, windSpeed all neeed)\n";
+ std::cerr << "ERROR missing Information from Event (modelPlan, modelHeight, exposure, windSpeed all needed)\n";
return -1;
}
diff --git a/modules/createEVENT/EmptyDomainCFD/EmptyDomainCFD.py b/modules/createEVENT/EmptyDomainCFD/EmptyDomainCFD.py
index 3be25b193..1db2a3ca1 100644
--- a/modules/createEVENT/EmptyDomainCFD/EmptyDomainCFD.py
+++ b/modules/createEVENT/EmptyDomainCFD/EmptyDomainCFD.py
@@ -1,103 +1,88 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- patternsArray = []
- windEventJson = {
- "type" : "Wind",
- "subtype": "IsolatedBuildingCFD",
- "pattern": patternsArray,
- "pressure": [],
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ patternsArray = [] # noqa: N806
+ windEventJson = { # noqa: N806
+ 'type': 'Wind',
+ 'subtype': 'IsolatedBuildingCFD',
+ 'pattern': patternsArray,
+ 'pressure': [],
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [windEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [windEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
- with open(eventFilePath, "w") as eventsFile:
+ with open(eventFilePath, 'w') as eventsFile: # noqa: N806, PLW1514, PTH123
json.dump(eventDict, eventsFile)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r') as BIMFile:
- bim = json.load(BIMFile)
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath) as BIMFile: # noqa: N806, PLW1514, PTH123
+ bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using CFD
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by CFD")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
-
- #parsing arguments
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by CFD'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
+
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
+ if arguments.getRV == True: # noqa: E712
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- #write the event file
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ # write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/EmptyDomainCFD/foam_file_processor.py b/modules/createEVENT/EmptyDomainCFD/foam_file_processor.py
index 5d6e7a758..202998636 100644
--- a/modules/createEVENT/EmptyDomainCFD/foam_file_processor.py
+++ b/modules/createEVENT/EmptyDomainCFD/foam_file_processor.py
@@ -1,71 +1,72 @@
-# This script contains functions for reading and writing
-# OpenFoam dictionaries and filses.
+# This script contains functions for reading and writing # noqa: CPY001, D100, INP001
+# OpenFoam dictionaries and filses.
#
-import numpy as np
import os
-def find_keyword_line(dict_lines, keyword):
-
+import numpy as np
+
+
+def find_keyword_line(dict_lines, keyword): # noqa: D103
start_line = -1
-
+
count = 0
for line in dict_lines:
- l = line.lstrip(" ")
-
+ l = line.lstrip(' ') # noqa: E741
+
if l.startswith(keyword):
start_line = count
break
-
- count += 1
+
+ count += 1 # noqa: SIM113
return start_line
-
+
+
def write_foam_field(field, file_name):
- """
- Writes a given numpy two dimensional array to OpenFOAM
- field format. It can handel the following formats:
+ """Writes a given numpy two dimensional array to OpenFOAM
+ field format. It can handle the following formats:
pointField,
vectorField,
tensorField,
symmTensorField
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D400, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name, "w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- line = "\n("
+ line = '\n('
for j in range(size[1]):
- line += " {:.6e}".format(field[i,j])
- line += ")"
+ line += f' {field[i, j]:.6e}'
+ line += ')'
foam_file.write(line)
-
- foam_file.write('\n);')
+
+ foam_file.write('\n);')
foam_file.close()
+
def write_scalar_field(field, file_name):
- """
- Writes a given one dimensional numpy array to OpenFOAM
+ """Writes a given one dimensional numpy array to OpenFOAM
scalar field format.
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name,"w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- foam_file.write("\n {:.6e}".format(field.flatten()[i]))
-
- foam_file.write('\n);')
- foam_file.close()
\ No newline at end of file
+ foam_file.write(f'\n {field.flatten()[i]:.6e}')
+
+ foam_file.write('\n);')
+ foam_file.close()
diff --git a/modules/createEVENT/EmptyDomainCFD/post_process_output.py b/modules/createEVENT/EmptyDomainCFD/post_process_output.py
index 49b04f4d8..f11de48cf 100644
--- a/modules/createEVENT/EmptyDomainCFD/post_process_output.py
+++ b/modules/createEVENT/EmptyDomainCFD/post_process_output.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016-2017, The Regents of the University of California (Regents).
+# Copyright (c) 2016-2017, The Regents of the University of California (Regents). # noqa: D100, INP001
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -38,126 +37,124 @@
#
-# This script reads OpenFOAM output and plot the characteristics of the
-# approaching wind. For now, it read and plots only velocity field data and
-# pressure on predicted set of probes.
+# This script reads OpenFOAM output and plot the characteristics of the
+# approaching wind. For now, it read and plots only velocity field data and
+# pressure on predicted set of probes.
#
-import sys
-import os
-import subprocess
+import argparse
import json
-import stat
+import os
import shutil
from pathlib import Path
+
import numpy as np
-import matplotlib.pyplot as plt
-import matplotlib.gridspec as gridspec
-from scipy import signal
-from scipy.interpolate import interp1d
-from scipy.interpolate import UnivariateSpline
-from scipy import stats
-import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots
-import argparse
+from scipy import signal
+def readPressureProbes(fileName): # noqa: N802, N803
+ """Created on Wed May 16 14:31:42 2018
-def readPressureProbes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
Reads pressure probe data from OpenFOAM and return the probe location, time, and the pressure
for each time step.
-
+
@author: Abiy
- """
+ """ # noqa: D400, D401
probes = []
p = []
- time = []
-
- with open(fileName, "r") as f:
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
- probes.append([float(line[3]),float(line[4]),float(line[5])])
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+ probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.split()
+ else:
+ line = line.split() # noqa: PLW2901
time.append(float(line[0]))
p_probe_i = np.zeros([len(probes)])
- for i in range(len(probes)):
+ for i in range(len(probes)):
p_probe_i[i] = float(line[i + 1])
p.append(p_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
p = np.asarray(p, dtype=np.float32)
-
+
return probes, time, p
+
def read_pressure_data(file_names):
- """
- This functions takes names of different OpenFOAM pressure measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might show up.
+ """This functions takes names of different OpenFOAM pressure measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might show up.
Parameters
----------
- *args
- List of file pashes of pressure data to be connected together.
+ *args
+ List of file pashes of pressure data to be connected together.
Returns
-------
time, pressure
Returns the pressure time and pressure data of the connected file.
- """
- no_files = len(file_names)
- connected_time = [] # Connected array of time
+
+ """ # noqa: D205, D401, D404
+ no_files = len(file_names)
+ connected_time = [] # Connected array of time
connected_p = [] # connected array of pressure.
time1 = []
- p1 = []
+ p1 = []
time2 = []
- p2 = []
- probes= []
-
- for i in range(no_files):
- probes, time2, p2 = readPressureProbes(file_names[i])
-
- if i==0:
+ p2 = []
+ probes = []
+
+ for i in range(no_files):
+ probes, time2, p2 = readPressureProbes(file_names[i])
+
+ if i == 0:
connected_time = time2
- connected_p = p2
+ connected_p = p2
else:
try:
index = np.where(time2 > time1[-1])[0][0]
- # index += 1
+ # index += 1
- except:
- # sys.exit('Fatal Error!: the pressure filese have time gap')
- index = 0 # Joint them even if they have a time gap
+ except: # noqa: E722
+ # sys.exit('Fatal Error!: the pressure files have time gap')
+ index = 0 # Joint them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
connected_p = np.concatenate((connected_p, p2[index:]))
time1 = time2
- p1 = p2
+ p1 = p2 # noqa: F841
return probes, connected_time, connected_p
class PressureData:
- """
- A class that holds a pressure data and performs the following operations:
- - mean and rms pressure coefficients
- - peak pressure coefficients
- """
- def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
- start_time=None, end_time=None):
+ """A class that holds a pressure data and performs the following operations:
+ - mean and rms pressure coefficients
+ - peak pressure coefficients
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ u_ref=0.0,
+ rho=1.25,
+ p_ref=0.0,
+ start_time=None,
+ end_time=None,
+ ):
self.path = path
self.u_ref = u_ref
self.p_ref = p_ref
@@ -168,375 +165,369 @@ def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
self.__set_time()
self.Nt = len(self.time)
self.T = self.time[-1]
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
- self.dt = np.mean(np.diff(self.time))
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
+ self.dt = np.mean(np.diff(self.time))
self.probe_count = np.shape(self.probes)[0]
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
# print(sorted_index)
# print("\tTime directories: %s" %(time_names))
- file_names = []
-
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]],'p')
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'p') # noqa: PTH118
file_names.append(file_name)
-
+
# print(file_names)
self.probes, self.time, self.p = read_pressure_data(file_names)
- self.p = self.rho*np.transpose(self.p) # OpenFOAM gives p/rho
+ self.p = self.rho * np.transpose(self.p) # OpenFOAM gives p/rho
# self.p = np.transpose(self.p) # OpenFOAM gives p/rho
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
- def __set_time (self):
- if(self.start_time != None):
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
# self.cp = self.cp[:,start_index:]
- try:
- self.p = self.p[:,start_index:]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, start_index:]
+ except: # noqa: S110, E722
pass
-
- if(self.end_time != None):
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
# self.cp = self.cp[:,:end_index]
- try:
- self.p = self.p[:,:end_index]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, :end_index]
+ except: # noqa: S110, E722
pass
-
-
+def von_karman_spectrum(f, Uav, I, L, comp=0): # noqa: N803, E741, D103
+ psd = np.zeros(len(f)) # noqa: F841
-def von_karman_spectrum(f, Uav, I, L, comp=0):
-
- psd = np.zeros(len(f))
+ if comp == 0:
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ / np.power(1.0 + 70.8 * np.power(f * L / Uav, 2.0), 5.0 / 6.0)
+ )
- if comp==0:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)/np.power(1.0 + 70.8*np.power(f*L/ Uav, 2.0), 5.0 / 6.0)
+ if comp == 1 or comp == 2: # noqa: RET503, PLR1714, PLR2004
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ * (1.0 + 188.4 * np.power(2.0 * f * L / Uav, 2.0))
+ / np.power(1.0 + 70.8 * np.power(2.0 * f * L / Uav, 2.0), 11.0 / 6.0)
+ )
- if comp==1 or comp==2:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)*(1.0 + 188.4*np.power(2.0*f*L/Uav, 2.0)) /np.power(1.0 + 70.8*np.power(2.0*f*L/Uav, 2.0), 11.0/6.0)
def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
def write_open_foam_vector_field(p, file_name):
-
- """
- Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
- format.
-
- """
- f = open(file_name,"w+")
- f.write('%d' % len(p[:,2]))
+ """Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
+ format.
+
+ """ # noqa: D205, D401
+ f = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ f.write('%d' % len(p[:, 2]))
f.write('\n(')
- for i in range(len(p[:,2])):
- f.write('\n ({:.7e} {:.7e} {:.7e})'.format(p[i,0], p[i,1], p[i,2]))
-
- f.write('\n);')
- f.close()
+ for i in range(len(p[:, 2])):
+ f.write(f'\n ({p[i, 0]:.7e} {p[i, 1]:.7e} {p[i, 2]:.7e})')
+ f.write('\n);')
+ f.close()
-def read_openFoam_scalar_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_scalar_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ sField = [] # noqa: N806
- sField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
itrf = iter(f)
next(itrf)
for line in itrf:
- if line.startswith('(') or line.startswith(')'):
- continue
- else:
- line = line.split()
+ if line.startswith('(') or line.startswith(')'): # noqa: PIE810
+ continue
+ else: # noqa: RET507
+ line = line.split() # noqa: PLW2901
sField.append(float(line[0]))
-
- sField = np.asarray(sField, dtype=np.float32)
-
- return sField
+ sField = np.asarray(sField, dtype=np.float32) # noqa: N806
+
+ return sField # noqa: RET504
-def read_openFoam_vector_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_vector_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
- if len(line) < 3:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
+ if len(line) < 3: # noqa: PLR2004
continue
-
- vField.append([float(line[0]),float(line[1]),float(line[2])])
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField.append([float(line[0]), float(line[1]), float(line[2])])
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 9
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_symmetric_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_symmetric_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 6
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
+ return vField # noqa: RET504
def read_velocity_data(path):
- """
- This functions takes names of different OpenFOAM velocity measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might showup.
+ """This functions takes names of different OpenFOAM velocity measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might showup.
Parameters
----------
- *args
- List of file paths of velocity data to be connected together.
+ *args
+ List of file paths of velocity data to be connected together.
Returns
-------
time, pressure
Returns the velocity time and velocity data of the connected file.
- """
- num_files = len(path)
- connected_time = [] # Connected array of time
- connected_U = [] # connected array of pressure.
+ """ # noqa: D205, D401, D404
+ num_files = len(path)
+ connected_time = [] # Connected array of time
+ connected_U = [] # connected array of pressure. # noqa: N806
- time1 = []
- U1 = []
+ time1 = []
+ U1 = [] # noqa: N806
time2 = []
- U2 = []
+ U2 = [] # noqa: N806
probes = []
-
- for i in range(num_files):
- probes, time2, U2 = read_velocity_probes(path[i])
- if i != 0:
+
+ for i in range(num_files):
+ probes, time2, U2 = read_velocity_probes(path[i]) # noqa: N806
+ if i != 0:
try:
index = np.where(time2 > time1[-1])[0][0]
- except:
+ except: # noqa: E722
# sys.exit('Fatal Error!: the pressure files have time gap')
- index = 0 # Join them even if they have a time gap
+ index = 0 # Join them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
- connected_U = np.concatenate((connected_U, U2[index:]))
+ connected_U = np.concatenate((connected_U, U2[index:])) # noqa: N806
else:
connected_time = time2
- connected_U = U2
+ connected_U = U2 # noqa: N806
time1 = time2
- U1 = U2
+ U1 = U2 # noqa: N806, F841
shape = np.shape(connected_U)
- U = np.zeros((shape[1], shape[2], shape[0]))
-
+ U = np.zeros((shape[1], shape[2], shape[0])) # noqa: N806
+
for i in range(shape[1]):
for j in range(shape[2]):
- U[i,j,:] = connected_U[:,i,j]
+ U[i, j, :] = connected_U[:, i, j]
return probes, connected_time, U
-def read_velocity_probes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
- Reads velocity probe data from OpenFOAM and return the probe location, time,
+
+def read_velocity_probes(fileName): # noqa: N803
+ """Created on Wed May 16 14:31:42 2018
+
+ Reads velocity probe data from OpenFOAM and return the probe location, time,
and the velocity vector for each time step.
- """
+ """ # noqa: D400, D401
probes = []
- U = []
- time = []
-
- with open(fileName, "r") as f:
+ U = [] # noqa: N806
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ else:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
try:
time.append(float(line[0]))
- except:
+ except: # noqa: S112, E722
continue
- u_probe_i = np.zeros([len(probes),3])
- for i in range(len(probes)):
- u_probe_i[i,:] = [float(line[3*i + 1]), float(line[3*i + 2]), float(line[3*i + 3])]
+ u_probe_i = np.zeros([len(probes), 3])
+ for i in range(len(probes)):
+ u_probe_i[i, :] = [
+ float(line[3 * i + 1]),
+ float(line[3 * i + 2]),
+ float(line[3 * i + 3]),
+ ]
U.append(u_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
- U = np.asarray(U, dtype=np.float32)
+ U = np.asarray(U, dtype=np.float32) # noqa: N806
return probes, time, U
+
def calculate_length_scale(u, uav, dt, min_corr=0.0):
-
- """
- Calculates the length scale of a velocity time history given.
-
- """
-
- u = u - np.mean(u)
-
- corr = signal.correlate(u, u, mode='full')
-
- u_std = np.std(u)
-
- corr = corr[int(len(corr)/2):]/(u_std**2*len(u))
-
- loc = np.argmax(corr < min_corr)
-
- corr = corr[:loc]
-
- L = uav*np.trapz(corr, dx=dt)
-
- return L
+ """Calculates the length scale of a velocity time history given.""" # noqa: D401
+ u = u - np.mean(u) # noqa: PLR6104
+
+ corr = signal.correlate(u, u, mode='full')
+
+ u_std = np.std(u)
+
+ corr = corr[int(len(corr) / 2) :] / (u_std**2 * len(u))
+
+ loc = np.argmax(corr < min_corr)
+
+ corr = corr[:loc]
+
+ L = uav * np.trapz(corr, dx=dt) # noqa: NPY201, N806
+
+ return L # noqa: RET504
-def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+
+def psd(x, dt, nseg): # noqa: F811
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
+
class VelocityData:
- """
- A class that holds a velocity data and performs the following operations:
- - mean velocity profile
- - turbulence intensity profiles
- - integral scale of turbulence profiles
- """
- def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
- start_time=None, end_time=None, resample_dt = None):
+ """A class that holds a velocity data and performs the following operations:
+ - mean velocity profile
+ - turbulence intensity profiles
+ - integral scale of turbulence profiles
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ sampling_rate=400,
+ filter_data=False, # noqa: FBT002
+ filter_freq=400,
+ start_time=None,
+ end_time=None,
+ resample_dt=None,
+ ):
self.path = path
self.sampling_rate = sampling_rate
self.filter_data = filter_data
@@ -545,502 +536,850 @@ def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
self.end_time = end_time
self.component_count = 3
self.resample_dt = resample_dt
- self.__read_cfd_data()
+ self.__read_cfd_data()
self.__set_time()
self.Nt = len(self.time)
- self.T = self.time[-1]
- self.dt = np.mean(np.diff(self.time))
- self.f_max = 1.0/(2.0*self.dt)
+ self.T = self.time[-1]
+ self.dt = np.mean(np.diff(self.time))
+ self.f_max = 1.0 / (2.0 * self.dt)
self.probe_count = np.shape(self.probes)[0]
self.Np = self.probe_count
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
self.__filter_signal()
self.__calculate_all()
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
- file_names = []
-
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]], "U")
- file_names.append( file_name)
-
-
- self.probes, self.time, self.U = read_velocity_data(file_names)
-
- #Distance along the path of the profile
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'U') # noqa: PTH118
+ file_names.append(file_name)
+
+ self.probes, self.time, self.U = read_velocity_data(file_names)
+
+ # Distance along the path of the profile
n_points = np.shape(self.probes)[0]
self.dist = np.zeros(n_points)
- for i in range(n_points-1):
- self.dist[i + 1] = self.dist[i] + np.linalg.norm(self.probes[i + 1, :] - self.probes[i, :])
-
+ for i in range(n_points - 1):
+ self.dist[i + 1] = self.dist[i] + np.linalg.norm(
+ self.probes[i + 1, :] - self.probes[i, :]
+ )
# Coefficient of variation
- cv = np.std(np.diff(self.time))/np.mean(np.diff(self.time))
-
- if cv > 1.0e-4:
+ cv = np.std(np.diff(self.time)) / np.mean(np.diff(self.time))
+
+ if cv > 1.0e-4: # noqa: PLR2004
self.__adjust_time_step()
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
-
- def __adjust_time_step (self):
-
- if self.resample_dt == None:
- dt = np.mean(np.diff(self.time))
- else:
- dt = self.resample_dt
-
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __adjust_time_step(self):
+ if self.resample_dt == None: # noqa: E711
+ dt = np.mean(np.diff(self.time))
+ else:
+ dt = self.resample_dt
+
time = np.arange(start=self.time[0], stop=self.time[-1], step=dt)
-
+
shape = np.shape(self.U)
-
- U = np.zeros((shape[0],shape[1],len(time)))
+
+ U = np.zeros((shape[0], shape[1], len(time))) # noqa: N806
for i in range(shape[0]):
for j in range(shape[1]):
U[i, j, :] = np.interp(time, self.time, self.U[i, j, :])
-
- self.time = time
- self.U = U
-
+ self.time = time
+ self.U = U
+
def __filter_signal(self):
if self.filter_data:
- low_pass = signal.butter(10, self.filter_freq,'lowpass', fs=self.sampling_rate, output='sos')
+ low_pass = signal.butter(
+ 10, self.filter_freq, 'lowpass', fs=self.sampling_rate, output='sos'
+ )
for i in range(self.probe_count):
for j in range(self.component_count):
- self.U[i,j,:] = signal.sosfilt(low_pass, self.U[i,j,:])
+ self.U[i, j, :] = signal.sosfilt(low_pass, self.U[i, j, :])
- def __set_time (self):
- if(self.start_time != None):
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
- self.U = self.U[:,:,start_index:]
-
- if(self.end_time != None):
+ self.U = self.U[:, :, start_index:]
+
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
- self.U = self.U[:,:,:end_index]
+ self.U = self.U[:, :, :end_index]
def __calculate_all(self):
-
self.u = np.zeros((self.probe_count, self.component_count, self.Nt))
- #Calculate the mean velocity profile.
+ # Calculate the mean velocity profile.
- self.Uav = np.mean(self.U[:,0,:], axis=1)
+ self.Uav = np.mean(self.U[:, 0, :], axis=1)
+
+ # Calculate the turbulence intensity.
+ self.I = np.std(self.U, axis=2) # gets the standard deviation
+ self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
+ self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
+ self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
- #Calculate the turbulence intensity.
- self.I = np.std(self.U, axis=2) # gets the standard deviation
- self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
- self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
- self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
for i in range(self.component_count):
- self.I[:,i] = self.I[:,i]/self.Uav
-
-
- #Calculate the length scale profiles.
+ self.I[:, i] = self.I[:, i] / self.Uav # noqa: PLR6104
+
+ # Calculate the length scale profiles.
self.L = np.zeros((self.probe_count, self.component_count))
for i in range(self.probe_count):
for j in range(self.component_count):
- self.u[i,j,:] = self.U[i,j,:] - np.mean(self.U[i,j,:])
- self.L[i,j] = calculate_length_scale(self.u[i,j,:], self.Uav[i], self.dt, 0.05)
+ self.u[i, j, :] = self.U[i, j, :] - np.mean(self.U[i, j, :])
+ self.L[i, j] = calculate_length_scale(
+ self.u[i, j, :], self.Uav[i], self.dt, 0.05
+ )
-
- #Calculate the shear stress profiles.
+ # Calculate the shear stress profiles.
self.uv_bar = np.zeros(self.Np)
self.uw_bar = np.zeros(self.Np)
-
+
for i in range(self.Np):
- self.uv_bar[i] = np.cov(self.U[i,0,:], self.U[i,1,:])[0,1]
- self.uw_bar[i] = np.cov(self.U[i,0,:], self.U[i,2,:])[0,1]
+ self.uv_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 1, :])[0, 1]
+ self.uw_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 2, :])[0, 1]
+
+ def get_Uav(self, z): # noqa: N802, D102
+ from scipy import interpolate # noqa: PLC0415
- def get_Uav(self, z):
- from scipy import interpolate
-
f = interpolate.interp1d(self.z, self.Uav)
-
- return f(z)
+ return f(z)
def copy_vtk_planes_and_order(input_path, output_path, field):
- """
- This code reads VTK sample plane data from OpenFOAM case directory and
- copies them into other directory with all vtks files ordered in their
- respective time sequence in one directory.
-
+ """This code reads VTK sample plane data from OpenFOAM case directory and
+ copies them into other directory with all vtks files ordered in their
+ respective time sequence in one directory.
+
input_path: path of the vtk files in the postProcessing directory
ouput_path: path to write the vtk files in order
- """
+ """ # noqa: D205, D401, D404
+ if not os.path.isdir(input_path): # noqa: PTH112
+ print(f'Cannot find the path for: {input_path}') # noqa: T201
+ return
- if not os.path.isdir(input_path):
- print("Cannot find the path for: {}".format(input_path))
- return
-
- if not os.path.isdir(output_path):
- print("Cannot find the path for: {}".format(output_path))
- return
-
-
- print("Reading from path: {}".format(input_path))
+ if not os.path.isdir(output_path): # noqa: PTH112
+ print(f'Cannot find the path for: {output_path}') # noqa: T201
+ return
+
+ print(f'Reading from path: {input_path}') # noqa: T201
time_names = os.listdir(input_path)
- times = np.float_(time_names)
+ times = np.float64(time_names)
sorted_index = np.argsort(times).tolist()
-
- n_times = len(times)
-
- print("\tNumber of time direcories: {} ".format(n_times))
- print("\tTime step: {:.4f} s".format(np.mean(np.diff(times))))
- print("\tTotal duration: {:.4f} s".format(times[sorted_index[-1]] - times[sorted_index[0]]))
-
-
+
+ n_times = len(times)
+
+ print(f'\tNumber of time directories: {n_times} ') # noqa: T201
+ print(f'\tTime step: {np.mean(np.diff(times)):.4f} s') # noqa: T201
+ print( # noqa: T201
+ f'\tTotal duration: {times[sorted_index[-1]] - times[sorted_index[0]]:.4f} s'
+ )
+
for i in range(n_times):
index = sorted_index[i]
- pathi = os.path.join(input_path, time_names[index])
+ pathi = os.path.join(input_path, time_names[index]) # noqa: PTH118
os.listdir(pathi)
-
- new_name = "{}_T{:04d}.vtk".format(field, i + 1)
+
+ new_name = f'{field}_T{i + 1:04d}.vtk'
for f in os.listdir(pathi):
- if f.endswith(".vtk"):
- new_path = os.path.join(output_path, new_name)
- old_path = os.path.join(pathi, f)
+ if f.endswith('.vtk'):
+ new_path = os.path.join(output_path, new_name) # noqa: PTH118
+ old_path = os.path.join(pathi, f) # noqa: PTH118
shutil.copyfile(old_path, new_path)
- print("Copied path: {}".format(old_path))
+ print(f'Copied path: {old_path}') # noqa: T201
+
-def plot_wind_profiles_and_spectra(case_path, output_path, prof_name):
-
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
+def plot_wind_profiles_and_spectra(case_path, output_path, prof_name): # noqa: D103
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
-
+
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
- ref_h = wc_data["referenceHeight"]
-
+ wc_data = json_data['windCharacteristics']
+
+ ref_h = wc_data['referenceHeight']
+
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
prof = VelocityData(prof_path, start_time=None, end_time=None)
-
- #Create wind profile data profile z, Uav, Iu ..., Lu ...,
+ # Create wind profile data profile z, Uav, Iu ..., Lu ...,
prof_np = np.zeros((len(prof.z), 9))
- prof_np[:,0] = prof.z
- prof_np[:,1] = prof.Uav
- prof_np[:,2] = prof.I[:,0]
- prof_np[:,3] = prof.I[:,1]
- prof_np[:,4] = prof.I[:,2]
- prof_np[:,5] = prof.uw_bar
- prof_np[:,6] = prof.L[:,0]
- prof_np[:,7] = prof.L[:,1]
- prof_np[:,8] = prof.L[:,2]
-
-
-
- #Read the target wind profile data
- tar_path = os.path.join(case_path, "constant", "boundaryData", "inlet")
-
- tar_p = read_openFoam_vector_field(os.path.join(tar_path, "points"))
- tar_U = read_openFoam_scalar_field(os.path.join(tar_path, "U"))
- tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, "R"))
- tar_L = read_openFoam_tensor_field(os.path.join(tar_path, "L"))
-
- tar_U_ref = np.interp(ref_h, tar_p[:,2], tar_U)
-
-
- tar_Iu = np.sqrt(tar_R[:, 0])/tar_U
- tar_Iv = np.sqrt(tar_R[:, 3])/tar_U
- tar_Iw = np.sqrt(tar_R[:, 5])/tar_U
+ prof_np[:, 0] = prof.z
+ prof_np[:, 1] = prof.Uav
+ prof_np[:, 2] = prof.I[:, 0]
+ prof_np[:, 3] = prof.I[:, 1]
+ prof_np[:, 4] = prof.I[:, 2]
+ prof_np[:, 5] = prof.uw_bar
+ prof_np[:, 6] = prof.L[:, 0]
+ prof_np[:, 7] = prof.L[:, 1]
+ prof_np[:, 8] = prof.L[:, 2]
+
+ # Read the target wind profile data
+ tar_path = os.path.join(case_path, 'constant', 'boundaryData', 'inlet') # noqa: PTH118
+
+ tar_p = read_openFoam_vector_field(os.path.join(tar_path, 'points')) # noqa: PTH118
+ tar_U = read_openFoam_scalar_field(os.path.join(tar_path, 'U')) # noqa: PTH118, N806
+ tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, 'R')) # noqa: PTH118, N806
+ tar_L = read_openFoam_tensor_field(os.path.join(tar_path, 'L')) # noqa: PTH118, N806
+
+ tar_U_ref = np.interp(ref_h, tar_p[:, 2], tar_U) # noqa: N806, F841
+
+ tar_Iu = np.sqrt(tar_R[:, 0]) / tar_U # noqa: N806
+ tar_Iv = np.sqrt(tar_R[:, 3]) / tar_U # noqa: N806
+ tar_Iw = np.sqrt(tar_R[:, 5]) / tar_U # noqa: N806
tar_uw = tar_R[:, 2]
-
- tar_Lu = tar_L[:, 0]
- tar_Lv = tar_L[:, 3]
- tar_Lw = tar_L[:, 6]
-
- tar_I = np.zeros((3, len(tar_Iu)))
- tar_L = np.zeros((3, len(tar_Lu)))
-
- tar_I[0,:] = tar_Iu
- tar_I[1,:] = tar_Iv
- tar_I[2,:] = tar_Iw
-
- tar_L[0,:] = tar_Lu
- tar_L[1,:] = tar_Lv
- tar_L[2,:] = tar_Lw
-
-
- subplot_titles = ("Mean Velocity", "Turbulence Intensity, Iu", "Turbulence Intensity, Iv", "Turbulence Intensity, Iw",
- "Shear Stress", "Length Scale, Lu", "Length Scale, Lv", "Length Scale, Lw")
-
- fig = make_subplots(rows=2, cols=4, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
-
- fig.add_trace(go.Scatter(x=tar_U, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,1], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="$U_{av} [m/s]$", range=[0, 1.25*np.max(prof_np[:,1])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+
+ tar_Lu = tar_L[:, 0] # noqa: N806
+ tar_Lv = tar_L[:, 3] # noqa: N806
+ tar_Lw = tar_L[:, 6] # noqa: N806
+
+ tar_I = np.zeros((3, len(tar_Iu))) # noqa: N806
+ tar_L = np.zeros((3, len(tar_Lu))) # noqa: N806
+
+ tar_I[0, :] = tar_Iu
+ tar_I[1, :] = tar_Iv
+ tar_I[2, :] = tar_Iw
+
+ tar_L[0, :] = tar_Lu
+ tar_L[1, :] = tar_Lv
+ tar_L[2, :] = tar_Lw
+
+ subplot_titles = (
+ 'Mean Velocity',
+ 'Turbulence Intensity, Iu',
+ 'Turbulence Intensity, Iv',
+ 'Turbulence Intensity, Iw',
+ 'Shear Stress',
+ 'Length Scale, Lu',
+ 'Length Scale, Lv',
+ 'Length Scale, Lw',
+ )
+
+ fig = make_subplots(
+ rows=2,
+ cols=4,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ fig.add_trace(
+ go.Scatter(
+ x=tar_U,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 1],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='$U_{av} [m/s]$',
+ range=[0, 1.25 * np.max(prof_np[:, 1])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
# Turbulence Intensity Iu
- fig.add_trace(go.Scatter(x=tar_Iu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,2], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=2)
- fig.update_xaxes(title_text="$I_{u}$", range=[0, 1.3*np.max(prof_np[:,2])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 2],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$I_{u}$',
+ range=[0, 1.3 * np.max(prof_np[:, 2])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
# Turbulence Intensity Iv
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,3], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=3)
- fig.update_xaxes(title_text="$I_{v}$", range=[0, 1.3*np.max(prof_np[:,3])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 3],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$I_{v}$',
+ range=[0, 1.3 * np.max(prof_np[:, 3])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
# Turbulence Intensity Iw
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,4], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=4)
- fig.update_xaxes(title_text="$I_{w}$", range=[0, 1.3*np.max(prof_np[:,4])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
-
-
- # Shear Stress Profile
- fig.add_trace(go.Scatter(x=tar_uw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,5], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=1)
- fig.update_xaxes(title_text=r'$\overline{uw}$', range=[1.3*np.min(prof_np[:,5]), 1.5*np.max(prof_np[:,5])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 4],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$I_{w}$',
+ range=[0, 1.3 * np.max(prof_np[:, 4])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+
+ # Shear Stress Profile
+ fig.add_trace(
+ go.Scatter(
+ x=tar_uw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 5],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=1,
+ )
+ fig.update_xaxes(
+ title_text=r'$\overline{uw}$',
+ range=[1.3 * np.min(prof_np[:, 5]), 1.5 * np.max(prof_np[:, 5])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
# Length scale Lu
- fig.add_trace(go.Scatter(x=tar_Lu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,6], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=2)
- fig.update_xaxes(title_text="$L_{u} [m]$", range=[0, 1.5*np.max(prof_np[:,6])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 6],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$L_{u} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 6])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
# Length scale Lv
- fig.add_trace(go.Scatter(x=tar_Lv, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,7], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=3)
- fig.update_xaxes(title_text="$L_{v} [m]$", range=[0, 1.5*np.max(prof_np[:,7])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lv,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 7],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$L_{v} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 7])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
# Length scale Lw
- fig.add_trace(go.Scatter(x=tar_Lw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,8], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=4)
- fig.update_xaxes(title_text="$L_{w} [m]$", range=[0, 1.5*np.max(prof_np[:,8])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
-
-
- fig.update_layout(height=850, width=1200, title_text="",showlegend=False)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 8],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$L_{w} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 8])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+
+ fig.update_layout(height=850, width=1200, title_text='', showlegend=False)
fig.show()
- fig.write_html(os.path.join(output_path, prof_name + ".html"), include_mathjax="cdn")
-
+ fig.write_html(
+ os.path.join(output_path, prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
+ # Plot the spectra at four locations
- #Plot the spectra at four locations
-
- spec_h = ref_h*np.array([0.25, 0.50, 1.00, 2.00])
+ spec_h = ref_h * np.array([0.25, 0.50, 1.00, 2.00])
n_spec = len(spec_h)
nseg = 5
ncomp = 3
- ylabel = ['$fS_{u}/\sigma^2_{u}$',
- '$fS_{v}/\sigma^2_{v}$',
- '$fS_{w}/\sigma^2_{w}$']
-
+ ylabel = [
+ r'$fS_{u}/\sigma^2_{u}$',
+ r'$fS_{v}/\sigma^2_{v}$',
+ r'$fS_{w}/\sigma^2_{w}$',
+ ]
for i in range(n_spec):
- loc = np.argmin(np.abs(prof_np[:,0] - spec_h[i]))
-
- loc_tar = np.argmin(np.abs(tar_p[:,2] - spec_h[i]))
-
- subplot_titles = ("u-component", "v-component", "w-component")
- fig = make_subplots(rows=1, cols=3, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
- U_ref_prof = np.interp(spec_h[i], prof_np[:,0], prof_np[:,1])
- U_ref_tar = np.interp(spec_h[i], tar_p[:,2], tar_U)
-
- #Plot each component
+ loc = np.argmin(np.abs(prof_np[:, 0] - spec_h[i]))
+
+ loc_tar = np.argmin(np.abs(tar_p[:, 2] - spec_h[i]))
+
+ subplot_titles = ('u-component', 'v-component', 'w-component')
+ fig = make_subplots(
+ rows=1,
+ cols=3,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ U_ref_prof = np.interp(spec_h[i], prof_np[:, 0], prof_np[:, 1]) # noqa: N806
+ U_ref_tar = np.interp(spec_h[i], tar_p[:, 2], tar_U) # noqa: N806
+
+ # Plot each component
for j in range(ncomp):
- freq, spec = psd(prof.u[loc, j,:], prof.dt, nseg)
-
- f_min = np.min(freq)/1.5
- f_max = 1.5*np.max(freq)
-
- u_var = np.var(prof.u[loc, j,:])
-
- spec = freq*spec/u_var
- freq = freq*spec_h[i]/U_ref_prof
-
-
- tar_Iz = tar_I[j,loc_tar]
- tar_Lz = tar_L[j,loc_tar]
-
-
- vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
- vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
-
- vonk_psd = vonk_f*vonk_psd/np.square(U_ref_tar*tar_Iz)
- vonk_f = vonk_f*spec_h[i]/U_ref_tar
-
-
- fig.add_trace(go.Scatter(x=freq, y=spec, line=dict(color='firebrick', width=1.5),
- mode='lines', name=prof_name, ), row=1, col=1+j)
- fig.add_trace(go.Scatter(x=vonk_f, y=vonk_psd, line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target(von Karman)', ), row=1, col=1+j)
- fig.update_xaxes(type="log", title_text="$fz/U$",
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
- fig.update_yaxes(type="log", title_text=ylabel[j], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
-
- fig.update_layout(height=450, width=1500, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "spectra_" + prof_name + "_H" + str(1 + i) + ".html"), include_mathjax="cdn")
-
+ freq, spec = psd(prof.u[loc, j, :], prof.dt, nseg)
+
+ f_min = np.min(freq) / 1.5
+ f_max = 1.5 * np.max(freq)
+
+ u_var = np.var(prof.u[loc, j, :])
+ spec = freq * spec / u_var
+ freq = freq * spec_h[i] / U_ref_prof
+ tar_Iz = tar_I[j, loc_tar] # noqa: N806
+ tar_Lz = tar_L[j, loc_tar] # noqa: N806
-def plot_pressure_profile(case_path, output_path, prof_name):
+ vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
+ vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
+ vonk_psd = vonk_f * vonk_psd / np.square(U_ref_tar * tar_Iz)
+ vonk_f = vonk_f * spec_h[i] / U_ref_tar
+
+ fig.add_trace(
+ go.Scatter(
+ x=freq,
+ y=spec,
+ line=dict(color='firebrick', width=1.5), # noqa: C408
+ mode='lines',
+ name=prof_name,
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=vonk_f,
+ y=vonk_psd,
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target(von Karman)',
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.update_xaxes(
+ type='log',
+ title_text='$fz/U$',
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+ fig.update_yaxes(
+ type='log',
+ title_text=ylabel[j],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+
+ fig.update_layout(height=450, width=1500, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join( # noqa: PTH118
+ output_path, 'spectra_' + prof_name + '_H' + str(1 + i) + '.html'
+ ),
+ include_mathjax='cdn',
+ )
- prof = PressureData(prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0)
+def plot_pressure_profile(case_path, output_path, prof_name): # noqa: D103
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
+
+ prof = PressureData(
+ prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0
+ )
std_p = np.std(prof.p, axis=1)
-
-
- subplot_titles = ("Pressure Fluctuation",)
-
- fig = make_subplots(rows=1, cols=1, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
+ subplot_titles = ('Pressure Fluctuation',)
+
+ fig = make_subplots(
+ rows=1,
+ cols=1,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
# Plot pressure fluctuation Velocity
- fig.add_trace(go.Scatter(x=prof.x-np.min(prof.x), y=std_p, line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="Distance from inlet (x) [m]", range=[np.min(prof.x-np.min(prof.x)), np.max(prof.x-np.min(prof.x))],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text=r"Pressure R.M.S", range=[0, 1.15*np.max(std_p)], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=prof.x - np.min(prof.x),
+ y=std_p,
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='Distance from inlet (x) [m]',
+ range=[np.min(prof.x - np.min(prof.x)), np.max(prof.x - np.min(prof.x))],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text=r'Pressure R.M.S',
+ range=[0, 1.15 * np.max(std_p)],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+
+ fig.update_layout(height=400, width=800, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join(output_path, 'pressure_' + prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
- fig.update_layout(height=400, width=800, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "pressure_" + prof_name + ".html"), include_mathjax="cdn")
-
-if __name__ == '__main__':
+if __name__ == '__main__':
""""
Entry point to read the simulation results from OpenFOAM case and post-process it.
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get EVENT file from OpenFOAM output")
- parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True)
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get EVENT file from OpenFOAM output'
+ )
+ parser.add_argument(
+ '-c', '--case', help='OpenFOAM case directory', required=True
+ )
arguments, unknowns = parser.parse_known_args()
-
- case_path = arguments.case
-
- print("Case full path: ", case_path)
+ case_path = arguments.case
+
+ print('Case full path: ', case_path) # noqa: T201
# prof_name = sys.argv[2]
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
- json_data = json.load(json_file)
-
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
-
+ rm_data = json_data['resultMonitoring']
+
wind_profiles = rm_data['windProfiles']
vtk_planes = rm_data['vtkPlanes']
-
- prof_output_path = os.path.join(case_path, "constant", "simCenter", "output", "windProfiles")
- #Check if it exists and remove files
- if os.path.exists(prof_output_path):
+ prof_output_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'output', 'windProfiles'
+ )
+
+ # Check if it exists and remove files
+ if os.path.exists(prof_output_path): # noqa: PTH110
shutil.rmtree(prof_output_path)
-
- #Create new path
+
+ # Create new path
Path(prof_output_path).mkdir(parents=True, exist_ok=True)
-
- #Plot velocity and pressure profiles
+
+ # Plot velocity and pressure profiles
for prof in wind_profiles:
- name = prof["name"]
- field = prof["field"]
- print(name)
- print(field)
-
- if field=="Velocity":
+ name = prof['name']
+ field = prof['field']
+ print(name) # noqa: T201
+ print(field) # noqa: T201
+
+ if field == 'Velocity':
plot_wind_profiles_and_spectra(case_path, prof_output_path, name)
-
- if field=="Pressure":
+
+ if field == 'Pressure':
plot_pressure_profile(case_path, prof_output_path, name)
-
-
+
# Copy the VTK files renamed
for pln in vtk_planes:
- name = pln["name"]
- field = pln["field"]
-
- vtk_path = os.path.join(case_path, "postProcessing", name)
- vtk_path_renamed = os.path.join(case_path, "postProcessing", name + "_renamed")
+ name = pln['name']
+ field = pln['field']
+
+ vtk_path = os.path.join(case_path, 'postProcessing', name) # noqa: PTH118
+ vtk_path_renamed = os.path.join( # noqa: PTH118
+ case_path, 'postProcessing', name + '_renamed'
+ )
Path(vtk_path_renamed).mkdir(parents=True, exist_ok=True)
copy_vtk_planes_and_order(vtk_path, vtk_path_renamed, field)
-
- #Check if it exists and remove files
- if os.path.exists(vtk_path):
- shutil.rmtree(vtk_path)
\ No newline at end of file
+
+ # Check if it exists and remove files
+ if os.path.exists(vtk_path): # noqa: PTH110
+ shutil.rmtree(vtk_path)
diff --git a/modules/createEVENT/EmptyDomainCFD/setup_case.py b/modules/createEVENT/EmptyDomainCFD/setup_case.py
index ecfa93365..1c1e611ee 100644
--- a/modules/createEVENT/EmptyDomainCFD/setup_case.py
+++ b/modules/createEVENT/EmptyDomainCFD/setup_case.py
@@ -1,1091 +1,1059 @@
-"""
-This script writes BC and initial condition, and setups the OpenFoam case
+"""This script writes BC and initial condition, and setups the OpenFoam case
directory.
-"""
-import numpy as np
-import sys
-import os
+""" # noqa: CPY001, D205, D404, INP001
+
import json
-import numpy as np
+import os
+import sys
+
import foam_file_processor as foam
-from stl import mesh
+import numpy as np
-def write_block_mesh_dict(input_json_path, template_dict_path, case_path):
+def write_block_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["blockMeshParameters"]
+ mesh_data = json_data['blockMeshParameters']
geom_data = json_data['GeometricData']
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
origin = np.array(geom_data['origin'])
- scale = geom_data['geometricScale']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale'] # noqa: F841
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_cells = mesh_data['xNumCells']
y_cells = mesh_data['yNumCells']
z_cells = mesh_data['zNumCells']
-
+
x_grading = mesh_data['xGrading']
y_grading = mesh_data['yGrading']
z_grading = mesh_data['zGrading']
-
- bc_map = {"slip": 'wall', "cyclic": 'cyclic', "noSlip": 'wall',
- "symmetry": 'symmetry', "empty": 'empty', "TInf": 'patch',
- "MeanABL": 'patch', "Uniform": 'patch', "zeroPressureOutlet": 'patch',
- "roughWallFunction": 'wall',"smoothWallFunction": 'wall'}
+ bc_map = {
+ 'slip': 'wall',
+ 'cyclic': 'cyclic',
+ 'noSlip': 'wall',
+ 'symmetry': 'symmetry',
+ 'empty': 'empty',
+ 'TInf': 'patch',
+ 'MeanABL': 'patch',
+ 'Uniform': 'patch',
+ 'zeroPressureOutlet': 'patch',
+ 'roughWallFunction': 'wall',
+ 'smoothWallFunction': 'wall',
+ }
inlet_type = bc_map[boundary_data['inletBoundaryCondition']]
outlet_type = bc_map[boundary_data['outletBoundaryCondition']]
- ground_type = bc_map[boundary_data['groundBoundaryCondition']]
+ ground_type = bc_map[boundary_data['groundBoundaryCondition']]
top_type = bc_map[boundary_data['topBoundaryCondition']]
front_type = bc_map[boundary_data['sidesBoundaryCondition']]
back_type = bc_map[boundary_data['sidesBoundaryCondition']]
length_unit = json_data['lengthUnit']
-
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
x_max = x_min + Lx
y_max = y_min + Ly
z_max = z_min + Lz
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/blockMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/blockMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- dict_lines[17] = "\txMin\t\t{:.4f};\n".format(x_min)
- dict_lines[18] = "\tyMin\t\t{:.4f};\n".format(y_min)
- dict_lines[19] = "\tzMin\t\t{:.4f};\n".format(z_min)
- dict_lines[20] = "\txMax\t\t{:.4f};\n".format(x_max)
- dict_lines[21] = "\tyMax\t\t{:.4f};\n".format(y_max)
- dict_lines[22] = "\tzMax\t\t{:.4f};\n".format(z_max)
+ dict_lines[17] = f'\txMin\t\t{x_min:.4f};\n'
+ dict_lines[18] = f'\tyMin\t\t{y_min:.4f};\n'
+ dict_lines[19] = f'\tzMin\t\t{z_min:.4f};\n'
+ dict_lines[20] = f'\txMax\t\t{x_max:.4f};\n'
+ dict_lines[21] = f'\tyMax\t\t{y_max:.4f};\n'
+ dict_lines[22] = f'\tzMax\t\t{z_max:.4f};\n'
- dict_lines[23] = "\txCells\t\t{:d};\n".format(x_cells)
- dict_lines[24] = "\tyCells\t\t{:d};\n".format(y_cells)
- dict_lines[25] = "\tzCells\t\t{:d};\n".format(z_cells)
+ dict_lines[23] = f'\txCells\t\t{x_cells:d};\n'
+ dict_lines[24] = f'\tyCells\t\t{y_cells:d};\n'
+ dict_lines[25] = f'\tzCells\t\t{z_cells:d};\n'
- dict_lines[26] = "\txGrading\t{:.4f};\n".format(x_grading)
- dict_lines[27] = "\tyGrading\t{:.4f};\n".format(y_grading)
- dict_lines[28] = "\tzGrading\t{:.4f};\n".format(z_grading)
+ dict_lines[26] = f'\txGrading\t{x_grading:.4f};\n'
+ dict_lines[27] = f'\tyGrading\t{y_grading:.4f};\n'
+ dict_lines[28] = f'\tzGrading\t{z_grading:.4f};\n'
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
- dict_lines[31] = "convertToMeters {:.4f};\n".format(convert_to_meters)
- dict_lines[61] = " type {};\n".format(inlet_type)
- dict_lines[70] = " type {};\n".format(outlet_type)
- dict_lines[79] = " type {};\n".format(ground_type)
- dict_lines[88] = " type {};\n".format(top_type)
- dict_lines[97] = " type {};\n".format(front_type)
- dict_lines[106] = " type {};\n".format(back_type)
-
-
- write_file_name = case_path + "/system/blockMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[31] = f'convertToMeters {convert_to_meters:.4f};\n'
+ dict_lines[61] = f' type {inlet_type};\n'
+ dict_lines[70] = f' type {outlet_type};\n'
+ dict_lines[79] = f' type {ground_type};\n'
+ dict_lines[88] = f' type {top_type};\n'
+ dict_lines[97] = f' type {front_type};\n'
+ dict_lines[106] = f' type {back_type};\n'
+
+ write_file_name = case_path + '/system/blockMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
+def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["snappyHexMeshParameters"]
+ mesh_data = json_data['snappyHexMeshParameters']
geom_data = json_data['GeometricData']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
origin = np.array(geom_data['origin'])
-
+
num_cells_between_levels = mesh_data['numCellsBetweenLevels']
resolve_feature_angle = mesh_data['resolveFeatureAngle']
- num_processors = mesh_data['numProcessors']
-
+ num_processors = mesh_data['numProcessors'] # noqa: F841
+
refinement_boxes = mesh_data['refinementBoxes']
-
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
- x_max = x_min + Lx
+ x_max = x_min + Lx # noqa: F841
y_max = y_min + Ly
- z_max = z_min + Lz
-
- inside_point = [x_min + Lf/2.0, (y_min + y_max)/2.0, (z_min + z_max)/2.0]
+ z_max = z_min + Lz
+ inside_point = [x_min + Lf / 2.0, (y_min + y_max) / 2.0, (z_min + z_max) / 2.0]
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/snappyHexMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/snappyHexMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "addLayers")
- dict_lines[start_index] = "addLayers\t{};\n".format("off")
-
- ###################### Edit Geometry Section ##############################
-
- #Add refinement box geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- n_boxes = len(refinement_boxes)
- for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " type searchableBox;\n"
- added_part += " min ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][2], refinement_boxes[i][3], refinement_boxes[i][4])
- added_part += " max ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][5], refinement_boxes[i][6], refinement_boxes[i][7])
- added_part += " }\n"
-
- dict_lines.insert(start_index, added_part)
-
-
- ################# Edit castellatedMeshControls Section ####################
- #Write 'nCellsBetweenLevels'
- start_index = foam.find_keyword_line(dict_lines, "nCellsBetweenLevels")
- dict_lines[start_index] = " nCellsBetweenLevels {:d};\n".format(num_cells_between_levels)
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'addLayers')
+ dict_lines[start_index] = 'addLayers\t{};\n'.format('off')
- #Write 'resolveFeatureAngle'
- start_index = foam.find_keyword_line(dict_lines, "resolveFeatureAngle")
- dict_lines[start_index] = " resolveFeatureAngle {:d};\n".format(resolve_feature_angle)
+ # Edit Geometry Section ##############################
- #Write 'insidePoint'
- start_index = foam.find_keyword_line(dict_lines, "insidePoint")
- dict_lines[start_index] = " insidePoint ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #For compatibility with OpenFOAM-9 and older
- start_index = foam.find_keyword_line(dict_lines, "locationInMesh")
- dict_lines[start_index] = " locationInMesh ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
- start_index = foam.find_keyword_line(dict_lines, "outsidePoint")
- dict_lines[start_index] = " outsidePoint ({:.4e} {:.4e} {:.4e});\n".format(-1e-20, -1e-20, -1e-20)
+ # Add refinement box geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ n_boxes = len(refinement_boxes)
+ for i in range(n_boxes):
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' type searchableBox;\n'
+ added_part += f' min ({refinement_boxes[i][2]:.4f} {refinement_boxes[i][3]:.4f} {refinement_boxes[i][4]:.4f});\n'
+ added_part += f' max ({refinement_boxes[i][5]:.4f} {refinement_boxes[i][6]:.4f} {refinement_boxes[i][7]:.4f});\n'
+ added_part += ' }\n'
+ dict_lines.insert(start_index, added_part)
-
- #Add box refinements
- added_part = ""
+ # Edit castellatedMeshControls Section ####################
+
+ # Write 'nCellsBetweenLevels'
+ start_index = foam.find_keyword_line(dict_lines, 'nCellsBetweenLevels')
+ dict_lines[start_index] = (
+ f' nCellsBetweenLevels {num_cells_between_levels:d};\n'
+ )
+
+ # Write 'resolveFeatureAngle'
+ start_index = foam.find_keyword_line(dict_lines, 'resolveFeatureAngle')
+ dict_lines[start_index] = f' resolveFeatureAngle {resolve_feature_angle:d};\n'
+
+ # Write 'insidePoint'
+ start_index = foam.find_keyword_line(dict_lines, 'insidePoint')
+ dict_lines[start_index] = (
+ f' insidePoint ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # For compatibility with OpenFOAM-9 and older
+ start_index = foam.find_keyword_line(dict_lines, 'locationInMesh')
+ dict_lines[start_index] = (
+ f' locationInMesh ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
+ start_index = foam.find_keyword_line(dict_lines, 'outsidePoint')
+ dict_lines[start_index] = (
+ f' outsidePoint ({-1e-20:.4e} {-1e-20:.4e} {-1e-20:.4e});\n'
+ )
+
+ # Add box refinements
+ added_part = ''
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " mode inside;\n"
- added_part += " level {};\n".format(refinement_boxes[i][1])
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' mode inside;\n'
+ added_part += f' level {refinement_boxes[i][1]};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/snappyHexMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/snappyHexMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
def write_boundary_data_files(input_json_path, case_path):
- """
- This functions writes wind profile files in "constant/boundaryData/inlet"
- if TInf options are used for the simulation.
- """
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+ """This functions writes wind profile files in "constant/boundaryData/inlet"
+ if TInf options are used for the simulation.
+ """ # noqa: D205, D401, D404
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
- if boundary_data['inletBoundaryCondition']=="TInf":
+ if boundary_data['inletBoundaryCondition'] == 'TInf':
geom_data = json_data['GeometricData']
- wind_profiles = np.array(boundary_data["inflowProperties"]['windProfiles'])
+ wind_profiles = np.array(boundary_data['inflowProperties']['windProfiles'])
- bd_path = case_path + "/constant/boundaryData/inlet/"
+ bd_path = case_path + '/constant/boundaryData/inlet/'
- #Write points file
+ # Write points file
n_pts = np.shape(wind_profiles)[0]
- points = np.zeros((n_pts, 3))
-
+ points = np.zeros((n_pts, 3))
origin = np.array(geom_data['origin'])
-
- Ly = geom_data['domainWidth']
- Lf = geom_data['fetchLength']
-
+
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
+ y_min = -Ly / 2.0 - origin[1]
y_max = y_min + Ly
- points[:,0] = x_min
- points[:,1] = (y_min + y_max)/2.0
- points[:,2] = wind_profiles[:, 0]
+ points[:, 0] = x_min
+ points[:, 1] = (y_min + y_max) / 2.0
+ points[:, 2] = wind_profiles[:, 0]
- #Shift the last element of the y coordinate
- #a bit to make planer interpolation easier
+ # Shift the last element of the y coordinate
+ # a bit to make planer interpolation easier
points[-1:, 1] = y_max
- foam.write_foam_field(points, bd_path + "points")
+ foam.write_foam_field(points, bd_path + 'points')
- #Write wind speed file as a scalar field
- foam.write_scalar_field(wind_profiles[:, 1], bd_path + "U")
+ # Write wind speed file as a scalar field
+ foam.write_scalar_field(wind_profiles[:, 1], bd_path + 'U')
- #Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
- foam.write_foam_field(wind_profiles[:, 2:8], bd_path + "R")
+ # Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
+ foam.write_foam_field(wind_profiles[:, 2:8], bd_path + 'R')
- #Write length scale file (8 columns -> it's a tensor field)
- foam.write_foam_field(wind_profiles[:, 8:17], bd_path + "L")
+ # Write length scale file (8 columns -> it's a tensor field)
+ foam.write_foam_field(wind_profiles[:, 8:17], bd_path + 'L')
-def write_U_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_U_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- inlet_BC_type = boundary_data['inletBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- sides_BC_type = boundary_data['sidesBoundaryCondition']
-
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ inlet_BC_type = boundary_data['inletBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/UFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/UFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- ##################### Internal Field #########################
- #Initialize the internal fields frow a lower velocity to avoid Courant number
- #instability when the solver starts. Now %10 of roof-height wind speed is set
- start_index = foam.find_keyword_line(dict_lines, "internalField")
+
+ # Internal Field #########################
+ # Initialize the internal fields frow a lower velocity to avoid Courant number
+ # instability when the solver starts. Now %10 of roof-height wind speed is set
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
# dict_lines[start_index] = "internalField uniform ({:.4f} 0 0);\n".format(1.0*wind_speed)
- #Set the internal field to zero to make it easy for the solver to start
- dict_lines[start_index] = "internalField uniform (0 0 0);\n"
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
-
- if inlet_BC_type == "Uniform":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
-
- if inlet_BC_type == "MeanABL":
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletVelocity;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 uniform \t {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
- if inlet_BC_type == "TInf":
- added_part = ""
- added_part += "\t type \t turbulentDFMInlet;\n"
- added_part += "\t filterType \t exponential;\n"
- added_part += "\t filterFactor \t {};\n".format(4)
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t periodicInY \t {};\n".format("true")
- added_part += "\t periodicInZ \t {};\n".format("false")
- added_part += "\t constMeanU \t {};\n".format("true")
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
+ # Set the internal field to zero to make it easy for the solver to start
+ dict_lines[start_index] = 'internalField uniform (0 0 0);\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+
+ if inlet_BC_type == 'Uniform':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+
+ if inlet_BC_type == 'MeanABL':
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletVelocity;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 uniform \t {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
+ if inlet_BC_type == 'TInf':
+ added_part = ''
+ added_part += '\t type \t turbulentDFMInlet;\n'
+ added_part += '\t filterType \t exponential;\n'
+ added_part += f'\t filterFactor \t {4};\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+ added_part += '\t periodicInY \t {};\n'.format('true')
+ added_part += '\t periodicInZ \t {};\n'.format('false')
+ added_part += '\t constMeanU \t {};\n'.format('true')
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform (0 0 0);\n"
- added_part += "\t value \t uniform (0 0 0);\n"
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += '\t inletValue \t uniform (0 0 0);\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t value \t uniform (0 0 0);\n"
- added_part += "\t uniformValue \t constant (0 0 0);\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+ added_part += '\t uniformValue \t constant (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/U"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/0/U'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_p_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_p_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/pFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/pFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- p0 = 0.0;
+ # BC and initial condition
+ p0 = 0.0
+ # Internal Field #########################
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(p0)
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {p0:.4f};\n'
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(p0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {p0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/p"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/p'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_nut_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_nut_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
# wind_speed = wind_data['roofHeightWindSpeed']
# building_height = wind_data['buildingHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/nutFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/nutFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- nut0 = 0.0
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(nut0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ nut0 = 0.0
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {nut0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(nut0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {nut0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkAtmRoughWallFunction;\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t value \t uniform 0.0;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkAtmRoughWallFunction;\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t value \t uniform 0.0;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/nut"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/nut'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_epsilon_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_epsilon_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/epsilonFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/epsilonFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- epsilon0 = 0.01
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(epsilon0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletEpsilon;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ # BC and initial condition
+ epsilon0 = 0.01
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {epsilon0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletEpsilon;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(epsilon0)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {epsilon0:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- #Note: Should be replaced with smooth wall function for epsilon,
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ # Note: Should be replaced with smooth wall function for epsilon,
# now the same with rough wall function.
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/epsilon"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/epsilon'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_k_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_k_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/kFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/kFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition (you may need to scale to model scale)
+
+ # BC and initial condition (you may need to scale to model scale)
# k0 = 1.3 #not in model scale
-
- I = 0.1
- k0 = 1.5*(I*wind_speed)**2
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField \t uniform {:.4f};\n".format(k0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletK;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ I = 0.1 # noqa: N806, E741
+ k0 = 1.5 * (I * wind_speed) ** 2
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField \t uniform {k0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletK;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(k0)
- added_part += "\t value \t uniform {:.4f};\n".format(k0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {k0:.4f};\n'
+ added_part += f'\t value \t uniform {k0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/k"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/k'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_controlDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_controlDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
- rm_data = json_data["resultMonitoring"]
-
+ ns_data = json_data['numericalSetup']
+ rm_data = json_data['resultMonitoring']
+
solver_type = ns_data['solverType']
duration = ns_data['duration']
time_step = ns_data['timeStep']
max_courant_number = ns_data['maxCourantNumber']
adjust_time_step = ns_data['adjustTimeStep']
-
+
monitor_wind_profiles = rm_data['monitorWindProfile']
monitor_vtk_planes = rm_data['monitorVTKPlane']
wind_profiles = rm_data['windProfiles']
vtk_planes = rm_data['vtkPlanes']
-
- # Need to change this for
- max_delta_t = 10*time_step
-
- #Write 10 times
+ # Need to change this for
+ max_delta_t = 10 * time_step
+
+ # Write 10 times
write_frequency = 10.0
- write_interval_time = duration/write_frequency
- write_interval_count = int(write_interval_time/time_step)
- purge_write = 3
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/controlDictTemplate", "r")
+ write_interval_time = duration / write_frequency
+ write_interval_count = int(write_interval_time / time_step)
+ purge_write = 3
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/controlDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write application type
- start_index = foam.find_keyword_line(dict_lines, "application")
- dict_lines[start_index] = "application \t{};\n".format(solver_type)
-
- #Write end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime \t{:.6f};\n".format(duration)
-
- #Write time step time
- start_index = foam.find_keyword_line(dict_lines, "deltaT")
- dict_lines[start_index] = "deltaT \t\t{:.6f};\n".format(time_step)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+
+ # Write application type
+ start_index = foam.find_keyword_line(dict_lines, 'application')
+ dict_lines[start_index] = f'application \t{solver_type};\n'
+
+ # Write end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime \t{duration:.6f};\n'
+
+ # Write time step time
+ start_index = foam.find_keyword_line(dict_lines, 'deltaT')
+ dict_lines[start_index] = f'deltaT \t\t{time_step:.6f};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('adjustableRunTime')
else:
- dict_lines[start_index] = "writeControl \t\t{};\n".format("timeStep")
-
- #Write adjustable time step or not
- start_index = foam.find_keyword_line(dict_lines, "adjustTimeStep")
- dict_lines[start_index] = "adjustTimeStep \t\t{};\n".format("yes" if adjust_time_step else "no")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(write_interval_time)
+ dict_lines[start_index] = 'writeControl \t\t{};\n'.format('timeStep')
+
+ # Write adjustable time step or not
+ start_index = foam.find_keyword_line(dict_lines, 'adjustTimeStep')
+ dict_lines[start_index] = 'adjustTimeStep \t\t{};\n'.format(
+ 'yes' if adjust_time_step else 'no'
+ )
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = f'writeInterval \t{write_interval_time:.6f};\n'
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(write_interval_count)
-
- #Write maxCo
- start_index = foam.find_keyword_line(dict_lines, "maxCo")
- dict_lines[start_index] = "maxCo \t{:.2f};\n".format(max_courant_number)
-
- #Write maximum time step
- start_index = foam.find_keyword_line(dict_lines, "maxDeltaT")
- dict_lines[start_index] = "maxDeltaT \t{:.6f};\n".format(max_delta_t)
-
-
- #Write purge write interval
- start_index = foam.find_keyword_line(dict_lines, "purgeWrite")
- dict_lines[start_index] = "purgeWrite \t{};\n".format(purge_write)
-
- ########################### Function Objects ##############################
-
- #Find function object location
- start_index = foam.find_keyword_line(dict_lines, "functions") + 2
-
- #Write wind profile monitoring functionObjects
+ dict_lines[start_index] = f'writeInterval \t{write_interval_count};\n'
+
+ # Write maxCo
+ start_index = foam.find_keyword_line(dict_lines, 'maxCo')
+ dict_lines[start_index] = f'maxCo \t{max_courant_number:.2f};\n'
+
+ # Write maximum time step
+ start_index = foam.find_keyword_line(dict_lines, 'maxDeltaT')
+ dict_lines[start_index] = f'maxDeltaT \t{max_delta_t:.6f};\n'
+
+ # Write purge write interval
+ start_index = foam.find_keyword_line(dict_lines, 'purgeWrite')
+ dict_lines[start_index] = f'purgeWrite \t{purge_write};\n'
+
+ # Function Objects ##############################
+
+ # Find function object location
+ start_index = foam.find_keyword_line(dict_lines, 'functions') + 2
+
+ # Write wind profile monitoring functionObjects
if monitor_wind_profiles:
- added_part = ""
+ added_part = ''
for prof in wind_profiles:
- added_part += " #includeFunc {}\n".format(prof["name"])
+ added_part += ' #includeFunc {}\n'.format(prof['name'])
dict_lines.insert(start_index, added_part)
-
- #Write VTK sampling sampling points
+
+ # Write VTK sampling sampling points
if monitor_vtk_planes:
- added_part = ""
+ added_part = ''
for pln in vtk_planes:
- added_part += " #includeFunc {}\n".format(pln["name"])
+ added_part += ' #includeFunc {}\n'.format(pln['name'])
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/controlDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/controlDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_fvSolution_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_fvSolution_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
json_file.close()
-
+
num_non_orthogonal_correctors = ns_data['numNonOrthogonalCorrectors']
num_correctors = ns_data['numCorrectors']
num_outer_correctors = ns_data['numOuterCorrectors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSolutionTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/fvSolutionTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write simpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "SIMPLE") + 2
- added_part = ""
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write simpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'SIMPLE') + 2
+ added_part = ''
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pimpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "PIMPLE") + 2
- added_part = ""
- added_part += " nOuterCorrectors \t{};\n".format(num_outer_correctors)
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pimpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PIMPLE') + 2
+ added_part = ''
+ added_part += f' nOuterCorrectors \t{num_outer_correctors};\n'
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pisoFoam options
- start_index = foam.find_keyword_line(dict_lines, "PISO") + 2
- added_part = ""
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pisoFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PISO') + 2
+ added_part = ''
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSolution"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSolution'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-def write_pressure_probes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
+def write_pressure_probes_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
pressure_sampling_points = rm_data['pressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
-
- added_part = ""
-
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
+
for i in range(len(pressure_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(pressure_sampling_points[i][0], pressure_sampling_points[i][1], pressure_sampling_points[i][2])
-
+ added_part += f' ({pressure_sampling_points[i][0]:.6f} {pressure_sampling_points[i][1]:.6f} {pressure_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/pressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/pressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-
-def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_wind_profiles_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- ns_data = json_data["numericalSetup"]
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
@@ -1093,516 +1061,506 @@ def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
write_interval = rm_data['profileWriteInterval']
start_time = rm_data['profileStartTime']
- if rm_data['monitorWindProfile'] == False:
- return
-
- if len(wind_profiles)==0:
+ if rm_data['monitorWindProfile'] == False: # noqa: E712
return
- #Write dict files for wind profiles
+ if len(wind_profiles) == 0:
+ return
+
+ # Write dict files for wind profiles
for prof in wind_profiles:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
-
- #Write start time for the probes
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- #Write name of the profile
- name = prof["name"]
- start_index = foam.find_keyword_line(dict_lines, "profileName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = prof["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write point coordinates
- start_x = prof["startX"]
- start_y = prof["startY"]
- start_z = prof["startZ"]
-
- end_x = prof["endX"]
- end_y = prof["endY"]
- end_z = prof["endZ"]
- n_points = prof["nPoints"]
-
- dx = (end_x - start_x)/n_points
- dy = (end_y - start_y)/n_points
- dz = (end_z - start_z)/n_points
-
- #Write locations of the probes
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
- added_part = ""
-
- for pi in range(n_points):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(start_x + pi*dx, start_y + pi*dy, start_z + pi*dz)
-
- dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
+ else:
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
+
+ # Write start time for the probes
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ # Write name of the profile
+ name = prof['name']
+ start_index = foam.find_keyword_line(dict_lines, 'profileName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = prof['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write point coordinates
+ start_x = prof['startX']
+ start_y = prof['startY']
+ start_z = prof['startZ']
+
+ end_x = prof['endX']
+ end_y = prof['endY']
+ end_z = prof['endZ']
+ n_points = prof['nPoints']
+
+ dx = (end_x - start_x) / n_points
+ dy = (end_y - start_y) / n_points
+ dz = (end_z - start_z) / n_points
+
+ # Write locations of the probes
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+ added_part = ''
+
+ for pi in range(n_points):
+ added_part += f' ({start_x + pi * dx:.6f} {start_y + pi * dy:.6f} {start_z + pi * dz:.6f})\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_vtk_plane_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_vtk_plane_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
-
vtk_planes = rm_data['vtkPlanes']
write_interval = rm_data['vtkWriteInterval']
- if rm_data['monitorVTKPlane'] == False:
- return
-
- if len(vtk_planes)==0:
+ if rm_data['monitorVTKPlane'] == False: # noqa: E712
return
- #Write dict files for wind profiles
+ if len(vtk_planes) == 0:
+ return
+
+ # Write dict files for wind profiles
for pln in vtk_planes:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/vtkPlaneTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/vtkPlaneTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
+ else:
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
- #Write start and end time for the section
+ # Write start and end time for the section
start_time = pln['startTime']
end_time = pln['endTime']
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- start_index = foam.find_keyword_line(dict_lines, "timeEnd")
- dict_lines[start_index] = " timeEnd \t\t{:.6f};\n".format(end_time)
-
- #Write name of the profile
- name = pln["name"]
- start_index = foam.find_keyword_line(dict_lines, "planeName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = pln["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write normal and point coordinates
- point_x = pln["pointX"]
- point_y = pln["pointY"]
- point_z = pln["pointZ"]
-
- normal_axis = pln["normalAxis"]
-
- start_index = foam.find_keyword_line(dict_lines, "point")
- dict_lines[start_index] = "\t point\t\t({:.6f} {:.6f} {:.6f});\n".format(point_x, point_y, point_z)
-
- start_index = foam.find_keyword_line(dict_lines, "normal")
- if normal_axis=="X":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(1, 0, 0)
- if normal_axis=="Y":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 1, 0)
- if normal_axis=="Z":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 0, 1)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'timeEnd')
+ dict_lines[start_index] = f' timeEnd \t\t{end_time:.6f};\n'
+
+ # Write name of the profile
+ name = pln['name']
+ start_index = foam.find_keyword_line(dict_lines, 'planeName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = pln['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write normal and point coordinates
+ point_x = pln['pointX']
+ point_y = pln['pointY']
+ point_z = pln['pointZ']
+
+ normal_axis = pln['normalAxis']
+
+ start_index = foam.find_keyword_line(dict_lines, 'point')
+ dict_lines[start_index] = (
+ f'\t point\t\t({point_x:.6f} {point_y:.6f} {point_z:.6f});\n'
+ )
+
+ start_index = foam.find_keyword_line(dict_lines, 'normal')
+ if normal_axis == 'X':
+ dict_lines[start_index] = f'\t normal\t\t({1} {0} {0});\n'
+ if normal_axis == 'Y':
+ dict_lines[start_index] = f'\t normal\t\t({0} {1} {0});\n'
+ if normal_axis == 'Z':
+ dict_lines[start_index] = f'\t normal\t\t({0} {0} {1});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_momentumTransport_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_momentumTransport_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
- RANS_type = turb_data['RANSModelType']
- LES_type = turb_data['LESModelType']
- DES_type = turb_data['DESModelType']
+ RANS_type = turb_data['RANSModelType'] # noqa: N806
+ LES_type = turb_data['LESModelType'] # noqa: N806
+ DES_type = turb_data['DESModelType'] # noqa: N806
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/momentumTransportTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/momentumTransportTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "simulationType")
- dict_lines[start_index] = "simulationType \t{};\n".format("RAS" if simulation_type=="RANS" else simulation_type)
-
- if simulation_type=="RANS":
- #Write RANS model type
- start_index = foam.find_keyword_line(dict_lines, "RAS") + 2
- added_part = " model \t{};\n".format(RANS_type)
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'simulationType')
+ dict_lines[start_index] = 'simulationType \t{};\n'.format(
+ 'RAS' if simulation_type == 'RANS' else simulation_type
+ )
+
+ if simulation_type == 'RANS':
+ # Write RANS model type
+ start_index = foam.find_keyword_line(dict_lines, 'RAS') + 2
+ added_part = f' model \t{RANS_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="LES":
- #Write LES SGS model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(LES_type)
+
+ elif simulation_type == 'LES':
+ # Write LES SGS model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{LES_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="DES":
- #Write DES model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(DES_type)
+
+ elif simulation_type == 'DES':
+ # Write DES model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{DES_type};\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/constant/momentumTransport"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/momentumTransport'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_physicalProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_physicalProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/physicalPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/physicalPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.4e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.4e};\n'
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/physicalProperties'
- #Write edited dict to file
- write_file_name = case_path + "/constant/physicalProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_transportProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_transportProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/transportPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/transportPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.3e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.3e};\n'
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/transportProperties'
- #Write edited dict to file
- write_file_name = case_path + "/constant/transportProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_fvSchemes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_fvSchemes_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSchemesTemplate{}".format(simulation_type), "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + f'/fvSchemesTemplate{simulation_type}') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSchemes"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSchemes'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_decomposeParDict_file(input_json_path, template_dict_path, case_path):
+ output_file.close()
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_decomposeParDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
num_processors = ns_data['numProcessors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/decomposeParDictTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/decomposeParDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write number of sub-domains
- start_index = foam.find_keyword_line(dict_lines, "numberOfSubdomains")
- dict_lines[start_index] = "numberOfSubdomains\t{};\n".format(num_processors)
-
- #Write method of decomposition
- start_index = foam.find_keyword_line(dict_lines, "decomposer")
- dict_lines[start_index] = "decomposer\t\t{};\n".format("scotch")
-
- #Write method of decomposition for OF-V9 and lower compatability
- start_index = foam.find_keyword_line(dict_lines, "method")
- dict_lines[start_index] = "method\t\t{};\n".format("scotch")
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/decomposeParDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write number of sub-domains
+ start_index = foam.find_keyword_line(dict_lines, 'numberOfSubdomains')
+ dict_lines[start_index] = f'numberOfSubdomains\t{num_processors};\n'
+
+ # Write method of decomposition
+ start_index = foam.find_keyword_line(dict_lines, 'decomposer')
+ dict_lines[start_index] = 'decomposer\t\t{};\n'.format('scotch')
+
+ # Write method of decomposition for OF-V9 and lower compatibility
+ start_index = foam.find_keyword_line(dict_lines, 'method')
+ dict_lines[start_index] = 'method\t\t{};\n'.format('scotch')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/decomposeParDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
+ output_file.close()
+
+
+def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
fmax = 200.0
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
- ns_data = json_data["numericalSetup"]
-
+ wc_data = json_data['windCharacteristics']
+ ns_data = json_data['numericalSetup']
+
wind_speed = wc_data['referenceWindSpeed']
duration = ns_data['duration']
-
- #Generate a little longer duration to be safe
- duration = duration*1.010
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/DFSRTurbDictTemplate", "r")
+ # Generate a little longer duration to be safe
+ duration = duration * 1.010 # noqa: PLR6104
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/DFSRTurbDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write the end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime\t\t\t{:.4f};\n".format(duration)
-
- #Write patch name
- start_index = foam.find_keyword_line(dict_lines, "patchName")
- dict_lines[start_index] = "patchName\t\t\"{}\";\n".format("inlet")
-
- #Write cohUav
- start_index = foam.find_keyword_line(dict_lines, "cohUav")
- dict_lines[start_index] = "cohUav\t\t\t{:.4f};\n".format(wind_speed)
-
- #Write fmax
- start_index = foam.find_keyword_line(dict_lines, "fMax")
- dict_lines[start_index] = "fMax\t\t\t{:.4f};\n".format(fmax)
-
- #Write time step
- start_index = foam.find_keyword_line(dict_lines, "timeStep")
- dict_lines[start_index] = "timeStep\t\t{:.4f};\n".format(1.0/fmax)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/DFSRTurbDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write the end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime\t\t\t{duration:.4f};\n'
+
+ # Write patch name
+ start_index = foam.find_keyword_line(dict_lines, 'patchName')
+ dict_lines[start_index] = 'patchName\t\t"{}";\n'.format('inlet')
+
+ # Write cohUav
+ start_index = foam.find_keyword_line(dict_lines, 'cohUav')
+ dict_lines[start_index] = f'cohUav\t\t\t{wind_speed:.4f};\n'
+
+ # Write fmax
+ start_index = foam.find_keyword_line(dict_lines, 'fMax')
+ dict_lines[start_index] = f'fMax\t\t\t{fmax:.4f};\n'
+
+ # Write time step
+ start_index = foam.find_keyword_line(dict_lines, 'timeStep')
+ dict_lines[start_index] = f'timeStep\t\t{1.0 / fmax:.4f};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/DFSRTurbDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
+
-if __name__ == '__main__':
-
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
input_json_path = sys.argv[1]
template_dict_path = sys.argv[2]
case_path = sys.argv[3]
-
-
+
# input_json_path = "/home/abiy/Documents/WE-UQ/LocalWorkDir/EmptyDomainCFD/constant/simCenter/input"
# template_dict_path = "/home/abiy/SimCenter/SourceCode/NHERI-SimCenter/SimCenterBackendApplications/applications/createEVENT/EmptyDomainCFD/templateOF10Dicts"
# case_path = "/home/abiy/Documents/WE-UQ/LocalWorkDir/EmptyDomainCFD"
-
+
# data_path = os.getcwd()
# script_path = os.path.dirname(os.path.realpath(__file__))
-
-
- #Create case director
- # set up goes here
-
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+ # Create case director
+ # set up goes here
+
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
RANS_type = turb_data['RANSModelType']
LES_type = turb_data['LESModelType']
-
- #Write blockMesh
+
+ # Write blockMesh
write_block_mesh_dict(input_json_path, template_dict_path, case_path)
- #Create and write the SnappyHexMeshDict file
+ # Create and write the SnappyHexMeshDict file
write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path)
-
- #Write files in "0" directory
+
+ # Write files in "0" directory
write_U_file(input_json_path, template_dict_path, case_path)
write_p_file(input_json_path, template_dict_path, case_path)
write_nut_file(input_json_path, template_dict_path, case_path)
write_k_file(input_json_path, template_dict_path, case_path)
-
- if simulation_type == "RANS" and RANS_type=="kEpsilon":
+
+ if simulation_type == 'RANS' and RANS_type == 'kEpsilon':
write_epsilon_file(input_json_path, template_dict_path, case_path)
- #Write control dict
+ # Write control dict
write_controlDict_file(input_json_path, template_dict_path, case_path)
-
- #Write results to be monitored
+
+ # Write results to be monitored
write_wind_profiles_file(input_json_path, template_dict_path, case_path)
write_vtk_plane_file(input_json_path, template_dict_path, case_path)
-
- #Write fvSolution dict
+
+ # Write fvSolution dict
write_fvSolution_file(input_json_path, template_dict_path, case_path)
- #Write fvSchemes dict
+ # Write fvSchemes dict
write_fvSchemes_file(input_json_path, template_dict_path, case_path)
- #Write momentumTransport dict
+ # Write momentumTransport dict
write_momentumTransport_file(input_json_path, template_dict_path, case_path)
-
- #Write physicalProperties dict
+
+ # Write physicalProperties dict
write_physicalProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
+
+ # Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
write_transportProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write decomposeParDict
+
+ # Write decomposeParDict
write_decomposeParDict_file(input_json_path, template_dict_path, case_path)
-
- #Write DFSRTurb dict
+
+ # Write DFSRTurb dict
# write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path)
-
- #Write TInf files
+
+ # Write TInf files
write_boundary_data_files(input_json_path, case_path)
diff --git a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/CMakeLists.txt b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/CMakeLists.txt
index 7dac1246a..e1f5c61bb 100644
--- a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/CMakeLists.txt
+++ b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/CMakeLists.txt
@@ -5,7 +5,7 @@ simcenter_add_file(NAME nutFileTemplate)
simcenter_add_file(NAME pFileTemplate)
simcenter_add_file(NAME epsilonFileTemplate)
-#Files in "costant" directory
+#Files in "constant" directory
simcenter_add_file(NAME physicalPropertiesTemplate)
simcenter_add_file(NAME transportPropertiesTemplate)
simcenter_add_file(NAME momentumTransportTemplate)
@@ -21,4 +21,4 @@ simcenter_add_file(NAME fvSchemesTemplateDES)
simcenter_add_file(NAME fvSchemesTemplateRANS)
simcenter_add_file(NAME fvSolutionTemplate)
simcenter_add_file(NAME probeTemplate)
-simcenter_add_file(NAME vtkPlaneTemplate)
\ No newline at end of file
+simcenter_add_file(NAME vtkPlaneTemplate)
diff --git a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/DFSRTurbDictTemplate b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/DFSRTurbDictTemplate
index c8b14b2c4..00f3fc64e 100644
--- a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/DFSRTurbDictTemplate
+++ b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/DFSRTurbDictTemplate
@@ -57,7 +57,7 @@ C (//x y z
windProfile
{
//read scaling factors for I, L
- //that varies with hieght
+ //that varies with height
adjustProfile off;
//Factors to scale turbulence intensities and length scale profiles
diff --git a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/decomposeParDictTemplate b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/decomposeParDictTemplate
index c93a2398e..62ed6e269 100644
--- a/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/decomposeParDictTemplate
+++ b/modules/createEVENT/EmptyDomainCFD/templateOF10Dicts/decomposeParDictTemplate
@@ -18,7 +18,7 @@ numberOfSubdomains 8;
decomposer hierarchical;
-//Needed for compatability
+//Needed for compatibility
method hierarchical;
distributor ptscotch;
diff --git a/modules/createEVENT/GeoClawOpenFOAM/AddBuildingForces.py b/modules/createEVENT/GeoClawOpenFOAM/AddBuildingForces.py
index 008bd9f0c..f932a5f86 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/AddBuildingForces.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/AddBuildingForces.py
@@ -1,137 +1,142 @@
-#!/usr/bin/env python
-from __future__ import print_function
-import os, sys
+#!/usr/bin/env python # noqa: CPY001, D100, EXE001
import argparse
import json
+import os
-def validateCaseDirectoryStructure(caseDir):
- """
- This method validates that the provided case directory is valid and contains the 0, constant and system directory
+
+def validateCaseDirectoryStructure(caseDir): # noqa: N802, N803
+ """This method validates that the provided case directory is valid and contains the 0, constant and system directory
It also checks that system directory contains the controlDict
- """
- if not os.path.isdir(caseDir):
+ """ # noqa: D205, D400, D401, D404
+ if not os.path.isdir(caseDir): # noqa: PTH112
return False
-
- caseDirList = os.listdir(caseDir)
- necessaryDirs = ["0", "constant", "system"]
- if any(not aDir in caseDirList for aDir in necessaryDirs):
+
+ caseDirList = os.listdir(caseDir) # noqa: N806
+ necessaryDirs = ['0', 'constant', 'system'] # noqa: N806
+ if any(aDir not in caseDirList for aDir in necessaryDirs):
return False
- controlDictPath = os.path.join(caseDir, "system/controlDict")
- if not os.path.exists(controlDictPath):
+ controlDictPath = os.path.join(caseDir, 'system/controlDict') # noqa: PTH118, N806
+ if not os.path.exists(controlDictPath): # noqa: SIM103, PTH110
return False
-
+
return True
-def findFunctionsDictionary(controlDictLines):
- """
- This method will find functions dictionary in the controlDict
- """
+
+def findFunctionsDictionary(controlDictLines): # noqa: N802, N803
+ """This method will find functions dictionary in the controlDict""" # noqa: D400, D401, D404
for line in controlDictLines:
- if line.startswith("functions"):
+ if line.startswith('functions'):
return (True, controlDictLines.index(line) + 2)
return [False, len(controlDictLines)]
-def writeForceDictionary(controlDictLines, lineIndex, floorsCount, patches):
- """
- This method will write the force dictionary
- """
-
- for line in ["\t\n", "\tbuildingsForces\n", "\t{\n", "\t}\n", "\n"]:
+def writeForceDictionary(controlDictLines, lineIndex, floorsCount, patches): # noqa: N802, N803
+ """This method will write the force dictionary""" # noqa: D400, D401, D404
+ for line in ['\t\n', '\tbuildingsForces\n', '\t{\n', '\t}\n', '\n']:
controlDictLines.insert(lineIndex, line)
- lineIndex += 1
-
- forceDictionary = {
- "type": "forces",
- "libs": '("libforces.so")',
- "writeControl": "timeStep",
- "writeInterval": 1,
- "patches": "({})".format(patches),
- "rho": "rhoInf",
- "log": "true",
- "rhoInf": 1,
- "CofR": "(0 0 0)",
+ lineIndex += 1 # noqa: N806
+
+ forceDictionary = { # noqa: N806
+ 'type': 'forces',
+ 'libs': '("libforces.so")',
+ 'writeControl': 'timeStep',
+ 'writeInterval': 1,
+ 'patches': f'({patches})',
+ 'rho': 'rhoInf',
+ 'log': 'true',
+ 'rhoInf': 1,
+ 'CofR': '(0 0 0)',
}
- lineIndex -= 2
+ lineIndex -= 2 # noqa: N806
for key, value in forceDictionary.items():
- controlDictLines.insert(lineIndex, "\t\t" + key + "\t" + str(value)+ ";\n")
- lineIndex += 1
+ controlDictLines.insert(lineIndex, '\t\t' + key + '\t' + str(value) + ';\n')
+ lineIndex += 1 # noqa: N806
- for line in ["\n", "\t\tbinData\n", "\t\t{\n", "\t\t}\n", "\n"]:
+ for line in ['\n', '\t\tbinData\n', '\t\t{\n', '\t\t}\n', '\n']:
controlDictLines.insert(lineIndex, line)
- lineIndex += 1
+ lineIndex += 1 # noqa: N806
- lineIndex -= 2
- binDictionary = {
- "nBin": str(floorsCount),
- "direction": '(0 0 1)',
- "cumulative": "no"
+ lineIndex -= 2 # noqa: N806
+ binDictionary = { # noqa: N806
+ 'nBin': str(floorsCount),
+ 'direction': '(0 0 1)',
+ 'cumulative': 'no',
}
for key, value in binDictionary.items():
- controlDictLines.insert(lineIndex, "\t\t\t" + key + "\t" + str(value)+ ";\n")
- lineIndex += 1
+ controlDictLines.insert(
+ lineIndex, '\t\t\t' + key + '\t' + str(value) + ';\n'
+ )
+ lineIndex += 1 # noqa: N806
-def AddBuildingsForces(floorsCount, patches):
- """
- First, we need to validate the case directory structure
- """
+
+def AddBuildingsForces(floorsCount, patches): # noqa: N802, N803
+ """First, we need to validate the case directory structure""" # noqa: D400
# if not validateCaseDirectoryStructure(caseDir):
# print("Invalid OpenFOAM Case Directory!")
# sys.exit(-1)
+ # controlDictPath = os.path.join(caseDir, "system/controlDict")
+ controlDictPath = 'system/controlDict' # noqa: N806
+ with open(controlDictPath) as controlDict: # noqa: N806, PLW1514, PTH123
+ controlDictLines = controlDict.readlines() # noqa: N806
- #controlDictPath = os.path.join(caseDir, "system/controlDict")
- controlDictPath = "system/controlDict"
- with open(controlDictPath, 'r') as controlDict:
- controlDictLines = controlDict.readlines()
-
- [isFound, lineIndex] = findFunctionsDictionary(controlDictLines)
+ [isFound, lineIndex] = findFunctionsDictionary(controlDictLines) # noqa: N806
- #If we cannot find the function dictionary, we will create one
+ # If we cannot find the function dictionary, we will create one
if not isFound:
- for line in ["\n", "functions\n", "{\n", "}\n"]:
+ for line in ['\n', 'functions\n', '{\n', '}\n']:
controlDictLines.insert(lineIndex, line)
- lineIndex += 1
-
+ lineIndex += 1 # noqa: N806
- #Now we can add the building forces
+ # Now we can add the building forces
writeForceDictionary(controlDictLines, lineIndex, floorsCount, patches)
- #Writing updated controlDict
- with open(controlDictPath, 'w') as controlDict:
+ # Writing updated controlDict
+ with open(controlDictPath, 'w') as controlDict: # noqa: N806, PLW1514, PTH123
controlDict.writelines(controlDictLines)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r') as BIMFile:
- bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath) as BIMFile: # noqa: N806, PLW1514, PTH123
+ bim = json.load(BIMFile)
+
+ return int(bim['GeneralInformation']['stories'])
+
-if __name__ == "__main__":
- #CLI parser
- parser = argparse.ArgumentParser(description="Add forces postprocessing to OpenFOAM controlDict")
- #parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True)
- parser.add_argument('-f', '--floors', help= "Number of Floors", type=int, required=False)
- parser.add_argument('-b', '--bim', help= "path to BIM file", required=False)
- parser.add_argument('-p', '--patches', help= "Patches used for extracting forces on building", required=False)
+if __name__ == '__main__':
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Add forces postprocessing to OpenFOAM controlDict'
+ )
+ # parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True)
+ parser.add_argument(
+ '-f', '--floors', help='Number of Floors', type=int, required=False
+ )
+ parser.add_argument('-b', '--bim', help='path to BIM file', required=False)
+ parser.add_argument(
+ '-p',
+ '--patches',
+ help='Patches used for extracting forces on building',
+ required=False,
+ )
- #Parsing arguments
+ # Parsing arguments
arguments, unknowns = parser.parse_known_args()
floors = arguments.floors
if not floors:
if arguments.bim:
floors = GetFloorsCount(arguments.bim)
- else:
+ else:
floors = 1
patches = arguments.patches
if not patches:
- patches = "Building"
+ patches = 'Building'
- #Add building forces to post-processing
+ # Add building forces to post-processing
# AddBuildingsForces(arguments.case, floors, patches)
AddBuildingsForces(floors, patches)
diff --git a/modules/createEVENT/GeoClawOpenFOAM/GeoClaw.py b/modules/createEVENT/GeoClawOpenFOAM/GeoClaw.py
index 52f86eaa0..491aff653 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/GeoClaw.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/GeoClaw.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,50 +32,52 @@
####################################################################
# Standard python modules
import os
+
import numpy as np
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class GeoClaw():
- """
- This class includes the methods related to
- coupling GeoClaw and OpenFOAM in HydroUQ
-
- Methods
- --------
- decomptext: Get all the text for the decomposeParDict
- """
+class GeoClaw:
+ """This class includes the methods related to
+ coupling GeoClaw and OpenFOAM in HydroUQ
- #############################################################
- def creategeom(self,data,path):
- '''
- Creates the geometry for bathymetry
+ Methods
+ -------
+ decomptext: Get all the text for the decomposeParDict
- Arguments
- -----------
- data: all the JSON data
- '''
+ """ # noqa: D205, D400, D404
- # Create a utilities object
- hydroutil = hydroUtils()
+ #############################################################
+ def creategeom(self, data, path):
+ """Creates the geometry for bathymetry
- # Get information about the interface
- swcfdfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SWCFDInteFile"]))
- swcfdfilepath = os.path.join(path,swcfdfile)
- swcfdpoints = np.genfromtxt(swcfdfilepath, delimiter=',',dtype=(float, float))
+ Arguments:
+ ---------
+ data: all the JSON data
- # Add extremum to the constants file
- maxvalues = np.max(swcfdpoints,axis=0)
- minvalues = np.min(swcfdpoints,axis=0)
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
- # Points of interest
- bottompts = self.getbathy(maxvalues,minvalues,data)
+ # Get information about the interface
+ swcfdfile = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SWCFDInteFile'])
+ )
+ swcfdfilepath = os.path.join(path, swcfdfile) # noqa: PTH118
+ swcfdpoints = np.genfromtxt(
+ swcfdfilepath, delimiter=',', dtype=(float, float)
+ )
- #
+ # Add extremum to the constants file
+ maxvalues = np.max(swcfdpoints, axis=0)
+ minvalues = np.min(swcfdpoints, axis=0)
+ # Points of interest
+ bottompts = self.getbathy(maxvalues, minvalues, data) # noqa: F841
- return 0
\ No newline at end of file
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/GeoClawBathy.py b/modules/createEVENT/GeoClawOpenFOAM/GeoClawBathy.py
index cb7edd4fa..2fed354a4 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/GeoClawBathy.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/GeoClawBathy.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,35 +32,33 @@
####################################################################
# Standard python modules
-
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class GeoClawBathy():
- """
- This class includes the methods related to
- reading GeoClaw bathymetry into OpenFOAM in HydroUQ
+class GeoClawBathy:
+ """This class includes the methods related to
+ reading GeoClaw bathymetry into OpenFOAM in HydroUQ
- Methods
- --------
- creategeom: Create geometry and STL files
- """
+ Methods
+ -------
+ creategeom: Create geometry and STL files
- #############################################################
- def creategeom(self,data,path):
- '''
- Creates the geometry for bathymetry
+ """ # noqa: D205, D400, D404
- Arguments
- -----------
- data: all the JSON data
- '''
+ #############################################################
+ def creategeom(self, data, path): # noqa: ARG002, PLR6301
+ """Creates the geometry for bathymetry
- # Create a utilities object
- hydroutil = hydroUtils()
+ Arguments:
+ ---------
+ data: all the JSON data
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils() # noqa: F841
- return 0
\ No newline at end of file
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/GeoClawOpenFOAM.py b/modules/createEVENT/GeoClawOpenFOAM/GeoClawOpenFOAM.py
index 599a3f38a..493e430e0 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/GeoClawOpenFOAM.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/GeoClawOpenFOAM.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
#
# This file is part of the SimCenter Backend Applications.
@@ -38,28 +37,22 @@
import argparse
-def main(inputFile,
- evtFile,
- getRV) :
-
- print("Finished GeoClawOpenFOAM application")
-if __name__ == '__main__':
+def main(inputFile, evtFile, getRV): # noqa: ARG001, N803, D103
+ print('Finished GeoClawOpenFOAM application') # noqa: T201
- #Defining the command line arguments
- parser = argparse.ArgumentParser(
- "Run the GeoClawOpenFOAM application.",
- allow_abbrev=False)
- parser = argparse.ArgumentParser()
- parser.add_argument('--filenameAIM', default=None)
- parser.add_argument('--filenameEVENT', default='NA')
- parser.add_argument('--getRV', nargs='?', const=True, default=False)
+if __name__ == '__main__':
+ # Defining the command line arguments
+ parser = argparse.ArgumentParser(
+ 'Run the GeoClawOpenFOAM application.', allow_abbrev=False
+ )
- args = parser.parse_args()
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--filenameAIM', default=None)
+ parser.add_argument('--filenameEVENT', default='NA')
+ parser.add_argument('--getRV', nargs='?', const=True, default=False)
- main(inputFile = args.filenameAIM,
- evtFile = args.filenameEVENT,
- getRV = args.getRV)
+ args = parser.parse_args()
-
+ main(inputFile=args.filenameAIM, evtFile=args.filenameEVENT, getRV=args.getRV)
diff --git a/modules/createEVENT/GeoClawOpenFOAM/GetOpenFOAMEvent.py b/modules/createEVENT/GeoClawOpenFOAM/GetOpenFOAMEvent.py
index 4caed29b7..c4098273e 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/GetOpenFOAMEvent.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/GetOpenFOAMEvent.py
@@ -1,208 +1,202 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
+import os
+import re
+
-class FloorForces:
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-
-def validateCaseDirectoryStructure(caseDir):
- """
- This method validates that the provided case directory is valid and contains the 0, constant and system directory
+
+
+def validateCaseDirectoryStructure(caseDir): # noqa: N802, N803
+ """This method validates that the provided case directory is valid and contains the 0, constant and system directory
It also checks that system directory contains the controlDict
- """
- if not os.path.isdir(caseDir):
+ """ # noqa: D205, D400, D401, D404
+ if not os.path.isdir(caseDir): # noqa: PTH112
return False
-
- caseDirList = os.listdir(caseDir)
- necessaryDirs = ["0", "constant", "system", "postProcessing"]
- if any(not aDir in caseDirList for aDir in necessaryDirs):
+
+ caseDirList = os.listdir(caseDir) # noqa: N806
+ necessaryDirs = ['0', 'constant', 'system', 'postProcessing'] # noqa: N806
+ if any(aDir not in caseDirList for aDir in necessaryDirs):
return False
- controlDictPath = os.path.join(caseDir, "system/controlDict")
- if not os.path.exists(controlDictPath):
+ controlDictPath = os.path.join(caseDir, 'system/controlDict') # noqa: PTH118, N806
+ if not os.path.exists(controlDictPath): # noqa: SIM103, PTH110
return False
-
+
return True
-def parseForceComponents(forceArray):
- """
- This method takes the OpenFOAM force array and parse into components x,y,z
- """
+
+def parseForceComponents(forceArray): # noqa: N802, N803
+ """This method takes the OpenFOAM force array and parse into components x,y,z""" # noqa: D400, D401, D404
components = forceArray.strip('()').split()
x = float(components[0])
y = float(components[1])
z = float(components[2])
return [x, y, z]
-def ReadOpenFOAMForces(buildingForcesPath, floorsCount, startTime):
- """
- This method will read the forces from the output files in the OpenFOAM case output (post processing)
- """
- deltaT = 0
+
+def ReadOpenFOAMForces(buildingForcesPath, floorsCount, startTime): # noqa: N802, N803
+ """This method will read the forces from the output files in the OpenFOAM case output (post processing)""" # noqa: D400, D401, D404
+ deltaT = 0 # noqa: N806
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- forcePattern = re.compile(r"\([0-9.e\+\-\s]+\)")
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ forcePattern = re.compile(r'\([0-9.e\+\-\s]+\)') # noqa: N806
- with open(buildingForcesPath, 'r') as forcesFile:
- forceLines = forcesFile.readlines()
- needsDeltaT = True
+ with open(buildingForcesPath) as forcesFile: # noqa: N806, PLW1514, PTH123
+ forceLines = forcesFile.readlines() # noqa: N806
+ needsDeltaT = True # noqa: N806
for line in forceLines:
- if line.startswith("#"):
+ if line.startswith('#'):
continue
- elif(needsDeltaT):
- deltaT = float(line.split()[0])
- needsDeltaT = False
+ elif needsDeltaT: # noqa: RET507
+ deltaT = float(line.split()[0]) # noqa: N806
+ needsDeltaT = False # noqa: N806
t = float(line.split()[0])
if t > startTime:
- detectedForces = re.findall(forcePattern, line)
+ detectedForces = re.findall(forcePattern, line) # noqa: N806
for i in range(floorsCount):
# Read the different force types (pressure, viscous and porous!)
- pressureForce = detectedForces[6 * i]
- viscousForce = detectedForces[6 * i + 1]
- porousForce = detectedForces[6 * i + 2]
+ pressureForce = detectedForces[6 * i] # noqa: N806
+ viscousForce = detectedForces[6 * i + 1] # noqa: N806
+ porousForce = detectedForces[6 * i + 2] # noqa: N806
- # Parse force components
+ # Parse force components
[fprx, fpry, fprz] = parseForceComponents(pressureForce)
[fvx, fvy, fvz] = parseForceComponents(viscousForce)
[fpox, fpoy, fpoz] = parseForceComponents(porousForce)
-
+
# Aggregate forces in X, Y, Z directions
forces[i].X.append(fprx + fvx + fpox)
forces[i].Y.append(fpry + fvy + fpoy)
forces[i].Z.append(fprz + fvz + fpoz)
-
-
return [deltaT, forces]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(timeSeriesArray, patternsArray, force, direction, floor, dT):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WaterForceSeries_" + str(floor) + direction
- timeSeries = {
- "name": seriesName,
- "dT": dT,
- "type": "Value",
- "data": force
- }
-
+def addFloorForceToEvent( # noqa: N802
+ timeSeriesArray, # noqa: N803
+ patternsArray, # noqa: N803
+ force,
+ direction,
+ floor,
+ dT, # noqa: N803
+):
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WaterForceSeries_' + str(floor) + direction # noqa: N806
+ timeSeries = {'name': seriesName, 'dT': dT, 'type': 'Value', 'data': force} # noqa: N806
+
timeSeriesArray.append(timeSeries)
-
- patternName = "WaterForcePattern_" + str(floor) + direction
+
+ patternName = 'WaterForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WaterFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WaterFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def addFloorPressure(pressureArray, floor):
- """
- Add floor pressure in the event file
- """
- floorPressure = {
- "story":str(floor),
- "pressure":[0.0, 0.0]
- }
+
+def addFloorPressure(pressureArray, floor): # noqa: N802, N803
+ """Add floor pressure in the event file""" # noqa: D400
+ floorPressure = {'story': str(floor), 'pressure': [0.0, 0.0]} # noqa: N806
pressureArray.append(floorPressure)
-def writeEVENT(forces, deltaT):
- """
- This method writes the EVENT.json file
- """
- timeSeriesArray = []
- patternsArray = []
- pressureArray = []
- waterEventJson = {
- "type" : "Hydro",
- "subtype": "OpenFOAM CFD Hydro Event",
- "timeSeries": timeSeriesArray,
- "pattern": patternsArray,
- "pressure": pressureArray,
- "dT": deltaT,
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, deltaT): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ timeSeriesArray = [] # noqa: N806
+ patternsArray = [] # noqa: N806
+ pressureArray = [] # noqa: N806
+ waterEventJson = { # noqa: N806
+ 'type': 'Hydro',
+ 'subtype': 'OpenFOAM CFD Hydro Event',
+ 'timeSeries': timeSeriesArray,
+ 'pattern': patternsArray,
+ 'pressure': pressureArray,
+ 'dT': deltaT,
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [waterEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [waterEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(timeSeriesArray, patternsArray, floorForces.X, "X", floor, deltaT)
- addFloorForceToEvent(timeSeriesArray, patternsArray, floorForces.Y, "Y", floor, deltaT)
+ addFloorForceToEvent(
+ timeSeriesArray, patternsArray, floorForces.X, 'X', floor, deltaT
+ )
+ addFloorForceToEvent(
+ timeSeriesArray, patternsArray, floorForces.Y, 'Y', floor, deltaT
+ )
addFloorPressure(pressureArray, floor)
- with open("EVENT.json", "w") as eventsFile:
+ with open('EVENT.json', 'w') as eventsFile: # noqa: N806, PLW1514, PTH123
json.dump(eventDict, eventsFile)
-def GetOpenFOAMEvent(floorsCount, startTime):
- """
- Read OpenFOAM output and generate an EVENT file for the building
- """
- forcesOutputName = "buildingsForces"
+def GetOpenFOAMEvent(floorsCount, startTime): # noqa: N802, N803
+ """Read OpenFOAM output and generate an EVENT file for the building""" # noqa: D400
+ forcesOutputName = 'buildingsForces' # noqa: N806
- if floorsCount == 1:
- buildingForcesPath = os.path.join("postProcessing", forcesOutputName, "0", "forces.dat")
+ if floorsCount == 1:
+ buildingForcesPath = os.path.join( # noqa: PTH118, N806
+ 'postProcessing', forcesOutputName, '0', 'forces.dat'
+ )
else:
- buildingForcesPath = os.path.join("postProcessing", forcesOutputName, "0", "forces_bins.dat")
+ buildingForcesPath = os.path.join( # noqa: PTH118, N806
+ 'postProcessing', forcesOutputName, '0', 'forces_bins.dat'
+ )
- [deltaT, forces] = ReadOpenFOAMForces(buildingForcesPath, floorsCount, startTime)
+ [deltaT, forces] = ReadOpenFOAMForces(buildingForcesPath, floorsCount, startTime) # noqa: N806
# Write the EVENT file
writeEVENT(forces, deltaT)
- print("OpenFOAM event is written to EVENT.json")
+ print('OpenFOAM event is written to EVENT.json') # noqa: T201
+
+
+def ReadBIM(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath) as BIMFile: # noqa: N806, PLW1514, PTH123
+ bim = json.load(BIMFile)
-def ReadBIM(BIMFilePath):
- with open(BIMFilePath,'r') as BIMFile:
- bim = json.load(BIMFile)
+ return [
+ int(bim['GeneralInformation']['stories']),
+ float(bim['Events'][0]['StartTime']),
+ ]
- return [int(bim["GeneralInformation"]["stories"]), float(bim["Events"][0]["StartTime"])]
-if __name__ == "__main__":
+if __name__ == '__main__':
"""
Entry point to read the forces from OpenFOAM case and use it for the EVENT
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get EVENT file from OpenFOAM output")
- parser.add_argument('-b', '--bim', help= "path to BIM file", required=False)
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get EVENT file from OpenFOAM output'
+ )
+ parser.add_argument('-b', '--bim', help='path to BIM file', required=False)
- #parsing arguments
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- [floors, startTime] = ReadBIM(arguments.bim)
+ [floors, startTime] = ReadBIM(arguments.bim) # noqa: N816
- GetOpenFOAMEvent(floors, startTime)
\ No newline at end of file
+ GetOpenFOAMEvent(floors, startTime)
diff --git a/modules/createEVENT/GeoClawOpenFOAM/Processor.py b/modules/createEVENT/GeoClawOpenFOAM/Processor.py
index 4f9696a63..6eefdd222 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/Processor.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/Processor.py
@@ -1,25 +1,24 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
+"""LICENSE INFORMATION:
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -30,255 +29,326 @@
####################################################################
# Standard python modules
import argparse
+import datetime
import json
import sys
-import datetime
from pathlib import Path
# Other custom modules
from hydroUtils import hydroUtils
from openfoam7 import openfoam7
+
####################################################################
# Main function
####################################################################
-def main():
- """
- This is the primary function
-
- Objects:
- h2oparser: Parser for CLI arguments
-
- Functions:
- main(): All necessary calls are made from this routine
-
- Variables:
- fipath: Path to dakota.json
- """
-
- # Get the system argument
- # Create a parser Object
- h2oparser = argparse.ArgumentParser(description='Get the Dakota.json file')
-
- # Add the arguments
- # Path to dakota.json input file
- h2oparser.add_argument(
- '-b',
- metavar='path to input file',
- type=str,
- help='the path to input json file',
- required=True)
- # Input directory - templateDir
- h2oparser.add_argument(
- '-I',
- metavar='path to input directory',
- type=str,
- help='the path to input directory',
- required=True)
- # Library
- h2oparser.add_argument(
- '-L',
- metavar='path to library',
- type=str,
- help='the path to library',
- required=True)
- # User bin
- h2oparser.add_argument(
- '-P',
- metavar='path to user bin',
- type=str,
- help='the path to user app bin',
- required=True)
- # Input file
- h2oparser.add_argument(
- '-i',
- metavar='input file',
- type=str,
- help='input file',
- required=True)
- # Driver file
- h2oparser.add_argument(
- '-d',
- metavar='driver file',
- type=str,
- help='driver file',
- required=True)
-
- # Execute the parse_args() method
- args = h2oparser.parse_args()
-
- # Get the path
- # fipath = args.b.replace('/dakota.json','')
- fipath = Path(args.b)
- if fipath.is_file():
- fipath = fipath.parent
- fipath = str(fipath)
-
- # Open the JSON file and load all objects
- with open(args.b) as f:
- data = json.load(f)
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the project name
- projname = hydroutil.extract_element_from_json(data, ["Events","ProjectName"])
- projname = ', '.join(projname)
-
- # Initialize a log ID number
- logID = 0
-
- # Initialize the log
- hydroutil.hydrolog(projname,fipath)
-
- # Start the log file with header and time and date
- logfiletext = hydroutil.general_header()
- hydroutil.flog.write(logfiletext)
- logID += 1
- hydroutil.flog.write('%d (%s): This log has started.\n' % (logID,datetime.datetime.now()))
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
- if int(simtype) == 0:
- hydroutil.flog.write('%d (%s): No simulation type selected in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('No simulation type selected in EVT.')
-
- # Get the solver type from the dakota file
- hydrosolver = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SolverChoice"]))
-
- # Find the solver
- # 0 - OpenFoam7 (+ olaFlow)
- # 1 - OpenFoam 8 (+ olaFlow)
- # default - OpenFoam7 (+ olaFlow)
- # Create related object
- if int(hydrosolver) == 0:
- solver = openfoam7()
-
- elif int(hydrosolver) == 1:
- print('This is not yet available')
- # OpenFoam 8 + olaFlow
- # solver = openfoam8()
-
- else:
- # Default is Openfoam7 + olaFlow
- solver = openfoam7()
-
- # Call the important routines
- # Create folders and files
- ecode = solver.createfolder(data,fipath,args)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error creating folders required for EVT solver.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error creating folders required for EVT solver.')
- else:
- hydroutil.flog.write('%d (%s): Folders required for EVT solver created.\n' % (logID,datetime.datetime.now()))
-
- # Create Geometry
- ecode = solver.creategeometry(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error creating geometry required for EVT solver.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error creating geometry required for EVT solver')
- else:
- hydroutil.flog.write('%d (%s): Geometry required for EVT solver created.\n' % (logID,datetime.datetime.now()))
-
- # Create meshing
- ecode = solver.createmesh(data,fipath)
- logID += 1
- if ecode == 0:
- hydroutil.flog.write('%d (%s): Files required for EVT meshing created.\n' % (logID,datetime.datetime.now()))
- else:
- hydroutil.flog.write('%d (%s): Error in Files required for EVT meshing.\n' % (logID,datetime.datetime.now()))
-
- # Material
- ecode = solver.materials(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with material parameters in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with material parameters in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Files required for materials definition successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Create initial condition
- ecode = solver.initial(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with initial condition definition in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Issues with definition of initial condition in EVT')
- else:
- hydroutil.flog.write('%d (%s): Files required for initial condition definition successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Create boundary condition - to do (alpha, k, omega, nut, nuTilda)
- ecode = solver.boundary(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with boundary condition definition in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Issues with definition of boundary condition in EVT')
- else:
- hydroutil.flog.write('%d (%s): Files required for boundary condition definition successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Turbulence
- ecode = solver.turbulence(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with turbulence parameters in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with turbulence parameters in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Files required for turbulence definition successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Parallelization
- ecode = solver.parallelize(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with parallelization parameters in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with parallelization parameters in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Files required for parallelization successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Solver settings
- ecode = solver.solve(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with solver parameters in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with solver parameters in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Files required for solver successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Other files
- ecode = solver.others(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with creating auxillary files in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with creating auxillary files in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Auxillary files required successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Dakota scripts
- solver.dakota(args)
-
- # Event post processing
- ecode = solver.postprocessing(data,fipath)
- logID += 1
- if ecode < 0:
- hydroutil.flog.write('%d (%s): Error with creating postprocessing files in EVT.\n' % (logID,datetime.datetime.now()))
- sys.exit('Error with creating postprocessing files in EVT.')
- else:
- hydroutil.flog.write('%d (%s): Postprocessing files required for EVT successfully created.\n' % (logID,datetime.datetime.now()))
-
- # Cleaning scripts
- solver.cleaning(args,fipath)
-
- # Write to caserun file
- caseruntext = 'echo HydroUQ complete'
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
+def main(): # noqa: C901
+ """This is the primary function
+
+ Objects:
+ h2oparser: Parser for CLI arguments
+
+ Functions:
+ main(): All necessary calls are made from this routine
+
+ Variables:
+ fipath: Path to dakota.json
+ """ # noqa: D400, D401, D404
+ # Get the system argument
+ # Create a parser Object
+ h2oparser = argparse.ArgumentParser(description='Get the Dakota.json file')
+
+ # Add the arguments
+ # Path to dakota.json input file
+ h2oparser.add_argument(
+ '-b',
+ metavar='path to input file',
+ type=str,
+ help='the path to input json file',
+ required=True,
+ )
+ # Input directory - templateDir
+ h2oparser.add_argument(
+ '-I',
+ metavar='path to input directory',
+ type=str,
+ help='the path to input directory',
+ required=True,
+ )
+ # Library
+ h2oparser.add_argument(
+ '-L',
+ metavar='path to library',
+ type=str,
+ help='the path to library',
+ required=True,
+ )
+ # User bin
+ h2oparser.add_argument(
+ '-P',
+ metavar='path to user bin',
+ type=str,
+ help='the path to user app bin',
+ required=True,
+ )
+ # Input file
+ h2oparser.add_argument(
+ '-i', metavar='input file', type=str, help='input file', required=True
+ )
+ # Driver file
+ h2oparser.add_argument(
+ '-d', metavar='driver file', type=str, help='driver file', required=True
+ )
+
+ # Execute the parse_args() method
+ args = h2oparser.parse_args()
+
+ # Get the path
+ # fipath = args.b.replace('/dakota.json','')
+ fipath = Path(args.b)
+ if fipath.is_file():
+ fipath = fipath.parent
+ fipath = str(fipath)
+
+ # Open the JSON file and load all objects
+ with open(args.b) as f: # noqa: PLW1514, PTH123
+ data = json.load(f)
+
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the project name
+ projname = hydroutil.extract_element_from_json(data, ['Events', 'ProjectName'])
+ projname = ', '.join(projname)
+
+ # Initialize a log ID number
+ logID = 0 # noqa: N806
+
+ # Initialize the log
+ hydroutil.hydrolog(projname, fipath)
+
+ # Start the log file with header and time and date
+ logfiletext = hydroutil.general_header()
+ hydroutil.flog.write(logfiletext)
+ logID += 1 # noqa: N806
+ hydroutil.flog.write(
+ '%d (%s): This log has started.\n' % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+ if int(simtype) == 0:
+ hydroutil.flog.write(
+ '%d (%s): No simulation type selected in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('No simulation type selected in EVT.')
+
+ # Get the solver type from the dakota file
+ hydrosolver = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SolverChoice'])
+ )
+
+ # Find the solver
+ # 0 - OpenFoam7 (+ olaFlow)
+ # 1 - OpenFoam 8 (+ olaFlow)
+ # default - OpenFoam7 (+ olaFlow)
+ # Create related object
+ if int(hydrosolver) == 0:
+ solver = openfoam7()
+
+ elif int(hydrosolver) == 1:
+ print('This is not yet available') # noqa: T201
+ # OpenFoam 8 + olaFlow
+ # solver = openfoam8()
+
+ else:
+ # Default is Openfoam7 + olaFlow
+ solver = openfoam7()
+
+ # Call the important routines
+ # Create folders and files
+ ecode = solver.createfolder(data, fipath, args)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error creating folders required for EVT solver.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error creating folders required for EVT solver.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Folders required for EVT solver created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Create Geometry
+ ecode = solver.creategeometry(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error creating geometry required for EVT solver.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error creating geometry required for EVT solver')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Geometry required for EVT solver created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Create meshing
+ ecode = solver.createmesh(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode == 0:
+ hydroutil.flog.write(
+ '%d (%s): Files required for EVT meshing created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Error in Files required for EVT meshing.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Material
+ ecode = solver.materials(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with material parameters in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with material parameters in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for materials definition successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Create initial condition
+ ecode = solver.initial(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with initial condition definition in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Issues with definition of initial condition in EVT')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for initial condition definition successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Create boundary condition - to do (alpha, k, omega, nut, nuTilda)
+ ecode = solver.boundary(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with boundary condition definition in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Issues with definition of boundary condition in EVT')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for boundary condition definition successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Turbulence
+ ecode = solver.turbulence(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with turbulence parameters in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with turbulence parameters in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for turbulence definition successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Parallelization
+ ecode = solver.parallelize(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with parallelization parameters in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with parallelization parameters in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for parallelization successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Solver settings
+ ecode = solver.solve(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with solver parameters in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with solver parameters in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Files required for solver successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Other files
+ ecode = solver.others(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with creating auxiliary files in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with creating auxiliary files in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Auxiliary files required successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Dakota scripts
+ solver.dakota(args)
+
+ # Event post processing
+ ecode = solver.postprocessing(data, fipath)
+ logID += 1 # noqa: N806
+ if ecode < 0:
+ hydroutil.flog.write(
+ '%d (%s): Error with creating postprocessing files in EVT.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+ sys.exit('Error with creating postprocessing files in EVT.')
+ else:
+ hydroutil.flog.write(
+ '%d (%s): Postprocessing files required for EVT successfully created.\n'
+ % (logID, datetime.datetime.now()) # noqa: DTZ005
+ )
+
+ # Cleaning scripts
+ solver.cleaning(args, fipath)
+
+ # Write to caserun file
+ caseruntext = 'echo HydroUQ complete'
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
+
####################################################################
# Primary function call
####################################################################
-if __name__ == "__main__":
-
- # Call the main routine
- main()
-
+if __name__ == '__main__':
+ # Call the main routine
+ main()
diff --git a/modules/createEVENT/GeoClawOpenFOAM/flume.py b/modules/createEVENT/GeoClawOpenFOAM/flume.py
index 98b2d8d21..d7cd179d7 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/flume.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/flume.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,385 +32,403 @@
####################################################################
# Standard python modules
import os
-import numpy as np
+
import meshio
-from shapely.geometry import Polygon, Point
+import numpy as np
import triangle as tr
+from shapely.geometry import Point, Polygon
# Other custom modules
-#from hydroUtils import hydroUtils
+# from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class flume():
- """
- This class includes the methods related to wave flume
-
- Methods
- --------
- generateflume: Create STL files for the flume
- extremedata: Get the extreme values and building information
- """
-
- #############################################################
- def generateflume(self,breadth,path):
- '''
- Creates the STL files for the flume
-
- Arguments
- -----------
- breadth: Breadth f the flume
- path: Path where dakota.json exists - where we need to put STL files
- '''
-
- # Get the triangulated flume
- extremeval = self.flumedata('FlumeData.txt')
- self.breadth = breadth
-
- # Right face
- self.right() # Right vertices
- self.npt_right = self.npt # Right triangles
- self.writeSTL("Right",self.npa_right,self.npt_right,path) # Write right STL file
-
- # Left face
- self.left() # Left vertices
- self.lefttri() # Left triangles
- self.writeSTL("Left",self.npa_left,self.npt_left,path) # Write left STL file
-
- # Front face
- self.front() # Front faces
- self.fronttri() # Front triangles
- self.writeSTL("Entry",self.npa_front,self.npt_front,path) # Write front STL file
-
- # Back face
- self.back() # Back vertices
- self.backtri() # Back triangles
- self.writeSTL("Exit",self.npa_back,self.npt_back,path) # Write back STL file
-
- # Top face
- self.top() # Top vertices
- self.toptri() # Top triangles
- self.writeSTL("Top",self.npa_top,self.npt_top,path) # Write top STL file
-
- # Bottom face
- self.bottom() # Bottom vertices
- self.bottomtri() # Bottom triangles
- self.writeSTL("Bottom",self.npa_bottom,self.npt_bottom,path) # Write bottom STL file
-
- # Return extreme values
- return extremeval
-
- #############################################################
- def flumedata(self,IpPTFile):
- '''
- Gets information about the flume to create STL files
-
- Arguments
- -----------
- IpPTFile: File with points of the flume
- '''
-
- # Get the data for the boundary
- data_boun = np.genfromtxt(IpPTFile, delimiter=',',dtype=(float, float))
-
- # Add extremum to the constants file
- maxvalues = np.max(data_boun,axis=0)
- minvalues = np.min(data_boun,axis=0)
- extremeval = np.array([minvalues[0],maxvalues[0],minvalues[1],maxvalues[1]])
-
- # Initialize segments for left and right
- segmentLR = []
-
- # Loop over all coordinates and create coordinates
- for ii in range(0,data_boun.shape[0]):
-
- # Get each of the user points
- if ii < data_boun.shape[0]-1:
- segmentLR.extend([(ii, ii+1)])
- else:
- segmentLR.extend([(ii, 0)])
-
- # Triangulate the polygon
- ALR = dict(vertices=data_boun,segments=segmentLR)
- BLR = tr.triangulate(ALR)
-
- # Get the tringles and vertices
- nm_triangle = BLR['triangles'].tolist()
- self.npt = np.asarray(nm_triangle, dtype=np.int32)
- nm_vertices = BLR['vertices'].tolist()
- self.npa = np.asarray(nm_vertices, dtype=np.float32)
-
- # Define the polygon
- mypoly = Polygon(data_boun)
-
- # Loop over all triangles to find if inside polygon
- indexes = []
- noindexes = []
- for ii in range(0,self.npt.shape[0]):
- n0 = self.npt[ii,0]
- n1 = self.npt[ii,1]
- n2 = self.npt[ii,2]
- centroidX = (1/3)*(self.npa[n0,0]+self.npa[n1,0]+self.npa[n2,0])
- centroidZ = (1/3)*(self.npa[n0,1]+self.npa[n1,1]+self.npa[n2,1])
- po = Point(centroidX,centroidZ)
- if mypoly.contains(po):
- indexes.extend([(ii)])
- else:
- noindexes.extend([(ii)])
-
- # Delete extra triangles
- self.npt = np.delete(self.npt, noindexes, axis=0)
-
- # Return extreme values
- return extremeval
-
- ####################################################################
- def right(self):
- '''
- Gets information/nodes about to create right face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npa_right = np.zeros(shape=(self.npa.shape[0],3))
- self.npa_right[:,0] = self.npa[:,0]
- self.npa_right[:,2] = self.npa[:,1]
- self.npa_right[:,1] = -self.breadth/2
-
- ####################################################################
- def left(self):
- '''
- Gets information/nodes about to create left face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npa_left = np.zeros(shape=(self.npa.shape[0],3))
- self.npa_left[:,0] = self.npa[:,0]
- self.npa_left[:,2] = self.npa[:,1]
- self.npa_left[:,1] = self.breadth/2
-
- ####################################################################
- def lefttri(self):
- '''
- Define triangles of the left face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npt_left = np.array(self.npt)
- self.npt_left[:, [1, 0]] = self.npt_left[:, [0, 1]]
-
- ####################################################################
- def front(self):
- '''
- Define information/nodes of the front face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npa_front = np.zeros(shape=(4,3))
- self.npa_front[0,:] = self.npa_right[0,:]
- self.npa_front[1,:] = self.npa_right[self.npa_right.shape[0]-1,:]
- self.npa_front[2,:] = self.npa_left[0,:]
- self.npa_front[3,:] = self.npa_left[self.npa_left.shape[0]-1,:]
-
- ####################################################################
- def fronttri(self):
- '''
- Define triangles of the front face of the flume
-
- Arguments
- -----------
- none
- '''
- self.npt_front = np.array([[0,1,2], [1,3,2]])
-
- ####################################################################
- def back(self):
- '''
- Define information/nodes of the back face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npa_back = np.zeros(shape=(4,3))
- self.npa_back[0,:] = self.npa_right[self.npa_right.shape[0]-3,:]
- self.npa_back[1,:] = self.npa_right[self.npa_right.shape[0]-2,:]
- self.npa_back[2,:] = self.npa_left[self.npa_left.shape[0]-3,:]
- self.npa_back[3,:] = self.npa_left[self.npa_left.shape[0]-2,:]
-
- ####################################################################
- def backtri(self):
- '''
- Define triangles of the back face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npt_back = np.array([[3,1,0], [0,2,3]])
-
- ####################################################################
- def top(self):
- '''
- Define information/nodes of the top face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npa_top = np.zeros(shape=(4,3))
- self.npa_top[0,:] = self.npa_right[self.npa_right.shape[0]-1,:]
- self.npa_top[1,:] = self.npa_right[self.npa_right.shape[0]-2,:]
- self.npa_top[2,:] = self.npa_left[self.npa_left.shape[0]-1,:]
- self.npa_top[3,:] = self.npa_left[self.npa_left.shape[0]-2,:]
-
- ####################################################################
- def toptri(self):
- '''
- Define triangles of the top face of the flume
-
- Arguments
- -----------
- none
- '''
-
- self.npt_top = np.array([[2,0,1], [2,1,3]])
-
- ####################################################################
- def bottom(self):
- '''
- Define information/nodes of the bottom face of the flume
-
- Arguments
- -----------
- none
- '''
-
- # Create the coordinate vector
- self.npa_bottom = []
-
- # Loop over all the points
- for ii in range(0,self.npa_right.shape[0]-3):
- npa_temp1 = np.zeros(shape=(4,3))
- npa_temp2 = np.zeros(shape=(2,3))
-
- # Get the points
- if ii ==0:
- npa_temp1[0,:] = self.npa_right[ii,:]
- npa_temp1[1,:] = self.npa_left[ii,:]
- npa_temp1[2,:] = self.npa_right[ii+1,:]
- npa_temp1[3,:] = self.npa_left[ii+1,:]
- else:
- npa_temp2[0,:] = self.npa_right[ii+1,:]
- npa_temp2[1,:] = self.npa_left[ii+1,:]
-
- # Concatenate as necessary
- if ii==0:
- self.npa_bottom = npa_temp1
- else:
- self.npa_bottom = np.concatenate((self.npa_bottom,npa_temp2),axis=0)
-
- ####################################################################
- def bottomtri(self):
- '''
- Define triangles of the bottom face of the flume
-
- Arguments
- -----------
- none
- '''
-
- # Create the coordinate vector
- self.npt_bottom = []
- ntri = 2
-
- # Loop over all the points
- for ii in range(0,self.npa_right.shape[0]-3):
- npt_temp = np.zeros(shape=(2,3))
-
- # Get the triangles
- npt_temp = np.array([[0,1,2], [1,3,2]])
- npt_temp = npt_temp + ii*ntri
-
- # Concatenate as necessary
- if ii==0:
- self.npt_bottom = npt_temp
- else:
- self.npt_bottom = np.concatenate((self.npt_bottom,npt_temp),axis=0)
-
- #############################################################
- def writeSTL(self,base_filename,npa,npt,path):
- '''
- Write the STL files for each patch
-
- Arguments
- -----------
- base_filename: Patchname of the flume
- npa: List of nodes
- npt: List of triangles
- path: Location where dakota.json file exists
- '''
-
- # Create a filename
- filename = base_filename + ".stl"
- # Create the STL file
- cells = [("triangle", npt)]
- meshio.write_points_cells(filename, npa, cells)
- # Modify first and last line
- with open(filename) as f:
- lines = f.readlines()
- lines[0] = 'solid '+ base_filename + '\n'
- lines[len(lines)-1] = 'endsolid ' + base_filename + '\n'
- # Write the updated file
- with open(filename, "w") as f:
- f.writelines(lines)
- # Move the file to constant/triSurface folder
- newfilepath = os.path.join(path,'constant','triSurface',filename)
- os.replace(filename,newfilepath)
-
- #############################################################
- def extremedata(self,extreme,breadth):
- '''
- Creates the STL files for the flume
-
- Arguments
- -----------
- data: content of JSON file
- extreme: Maximum limits
- breadth: Breadth of the flume
- '''
-
- # Write the Max-Min values for the blockMesh
- BMXmin = extreme[0] - 0.25*(extreme[1] - extreme[0])
- BMXmax = extreme[1] + 0.25*(extreme[1] - extreme[0])
- BMYmin = -0.625*breadth
- BMYmax = 0.625*breadth
- BMZmin = extreme[2] - 0.25*(extreme[3] - extreme[2])
- BMZmax = extreme[3] + 0.25*(extreme[3] - extreme[2])
-
- # Write the temporary file
- filename = 'temp_geometry.txt'
- if os.path.exists(filename):
- os.remove(filename)
- tempfileID = open("temp_geometry.txt","w")
-
- # Write the extreme values to the files
- tempfileID.write(str(BMXmin)+"\n"+str(BMXmax)+"\n"+str(BMYmin)+"\n"+str(BMYmax)+"\n"+str(BMZmin)+"\n"+str(BMZmax)+"\n")
- tempfileID.close
-
- return 0
\ No newline at end of file
+class flume:
+ """This class includes the methods related to wave flume
+
+ Methods
+ -------
+ generateflume: Create STL files for the flume
+ extremedata: Get the extreme values and building information
+
+ """ # noqa: D400, D404
+
+ #############################################################
+ def generateflume(self, breadth, path):
+ """Creates the STL files for the flume
+
+ Arguments:
+ ---------
+ breadth: Breadth f the flume
+ path: Path where dakota.json exists - where we need to put STL files
+
+ """ # noqa: D400, D401
+ # Get the triangulated flume
+ extremeval = self.flumedata('FlumeData.txt')
+ self.breadth = breadth
+
+ # Right face
+ self.right() # Right vertices
+ self.npt_right = self.npt # Right triangles
+ self.writeSTL(
+ 'Right', self.npa_right, self.npt_right, path
+ ) # Write right STL file
+
+ # Left face
+ self.left() # Left vertices
+ self.lefttri() # Left triangles
+ self.writeSTL(
+ 'Left', self.npa_left, self.npt_left, path
+ ) # Write left STL file
+
+ # Front face
+ self.front() # Front faces
+ self.fronttri() # Front triangles
+ self.writeSTL(
+ 'Entry', self.npa_front, self.npt_front, path
+ ) # Write front STL file
+
+ # Back face
+ self.back() # Back vertices
+ self.backtri() # Back triangles
+ self.writeSTL(
+ 'Exit', self.npa_back, self.npt_back, path
+ ) # Write back STL file
+
+ # Top face
+ self.top() # Top vertices
+ self.toptri() # Top triangles
+ self.writeSTL('Top', self.npa_top, self.npt_top, path) # Write top STL file
+
+ # Bottom face
+ self.bottom() # Bottom vertices
+ self.bottomtri() # Bottom triangles
+ self.writeSTL(
+ 'Bottom', self.npa_bottom, self.npt_bottom, path
+ ) # Write bottom STL file
+
+ # Return extreme values
+ return extremeval
+
+ #############################################################
+ def flumedata(self, IpPTFile): # noqa: N803
+ """Gets information about the flume to create STL files
+
+ Arguments:
+ ---------
+ IpPTFile: File with points of the flume
+
+ """ # noqa: D400, D401
+ # Get the data for the boundary
+ data_boun = np.genfromtxt(IpPTFile, delimiter=',', dtype=(float, float))
+
+ # Add extremum to the constants file
+ maxvalues = np.max(data_boun, axis=0)
+ minvalues = np.min(data_boun, axis=0)
+ extremeval = np.array(
+ [minvalues[0], maxvalues[0], minvalues[1], maxvalues[1]]
+ )
+
+ # Initialize segments for left and right
+ segmentLR = [] # noqa: N806
+
+ # Loop over all coordinates and create coordinates
+ for ii in range(data_boun.shape[0]):
+ # Get each of the user points
+ if ii < data_boun.shape[0] - 1:
+ segmentLR.extend([(ii, ii + 1)])
+ else:
+ segmentLR.extend([(ii, 0)])
+
+ # Triangulate the polygon
+ ALR = dict(vertices=data_boun, segments=segmentLR) # noqa: C408, N806
+ BLR = tr.triangulate(ALR) # noqa: N806
+
+ # Get the tringles and vertices
+ nm_triangle = BLR['triangles'].tolist()
+ self.npt = np.asarray(nm_triangle, dtype=np.int32)
+ nm_vertices = BLR['vertices'].tolist()
+ self.npa = np.asarray(nm_vertices, dtype=np.float32)
+
+ # Define the polygon
+ mypoly = Polygon(data_boun)
+
+ # Loop over all triangles to find if inside polygon
+ indexes = []
+ noindexes = []
+ for ii in range(self.npt.shape[0]):
+ n0 = self.npt[ii, 0]
+ n1 = self.npt[ii, 1]
+ n2 = self.npt[ii, 2]
+ centroidX = (1 / 3) * ( # noqa: N806
+ self.npa[n0, 0] + self.npa[n1, 0] + self.npa[n2, 0]
+ )
+ centroidZ = (1 / 3) * ( # noqa: N806
+ self.npa[n0, 1] + self.npa[n1, 1] + self.npa[n2, 1]
+ )
+ po = Point(centroidX, centroidZ)
+ if mypoly.contains(po):
+ indexes.extend([(ii)])
+ else:
+ noindexes.extend([(ii)])
+
+ # Delete extra triangles
+ self.npt = np.delete(self.npt, noindexes, axis=0)
+
+ # Return extreme values
+ return extremeval
+
+ ####################################################################
+ def right(self):
+ """Gets information/nodes about to create right face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400, D401
+ self.npa_right = np.zeros(shape=(self.npa.shape[0], 3))
+ self.npa_right[:, 0] = self.npa[:, 0]
+ self.npa_right[:, 2] = self.npa[:, 1]
+ self.npa_right[:, 1] = -self.breadth / 2
+
+ ####################################################################
+ def left(self):
+ """Gets information/nodes about to create left face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400, D401
+ self.npa_left = np.zeros(shape=(self.npa.shape[0], 3))
+ self.npa_left[:, 0] = self.npa[:, 0]
+ self.npa_left[:, 2] = self.npa[:, 1]
+ self.npa_left[:, 1] = self.breadth / 2
+
+ ####################################################################
+ def lefttri(self):
+ """Define triangles of the left face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npt_left = np.array(self.npt)
+ self.npt_left[:, [1, 0]] = self.npt_left[:, [0, 1]]
+
+ ####################################################################
+ def front(self):
+ """Define information/nodes of the front face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npa_front = np.zeros(shape=(4, 3))
+ self.npa_front[0, :] = self.npa_right[0, :]
+ self.npa_front[1, :] = self.npa_right[self.npa_right.shape[0] - 1, :]
+ self.npa_front[2, :] = self.npa_left[0, :]
+ self.npa_front[3, :] = self.npa_left[self.npa_left.shape[0] - 1, :]
+
+ ####################################################################
+ def fronttri(self):
+ """Define triangles of the front face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npt_front = np.array([[0, 1, 2], [1, 3, 2]])
+
+ ####################################################################
+ def back(self):
+ """Define information/nodes of the back face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npa_back = np.zeros(shape=(4, 3))
+ self.npa_back[0, :] = self.npa_right[self.npa_right.shape[0] - 3, :]
+ self.npa_back[1, :] = self.npa_right[self.npa_right.shape[0] - 2, :]
+ self.npa_back[2, :] = self.npa_left[self.npa_left.shape[0] - 3, :]
+ self.npa_back[3, :] = self.npa_left[self.npa_left.shape[0] - 2, :]
+
+ ####################################################################
+ def backtri(self):
+ """Define triangles of the back face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npt_back = np.array([[3, 1, 0], [0, 2, 3]])
+
+ ####################################################################
+ def top(self):
+ """Define information/nodes of the top face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npa_top = np.zeros(shape=(4, 3))
+ self.npa_top[0, :] = self.npa_right[self.npa_right.shape[0] - 1, :]
+ self.npa_top[1, :] = self.npa_right[self.npa_right.shape[0] - 2, :]
+ self.npa_top[2, :] = self.npa_left[self.npa_left.shape[0] - 1, :]
+ self.npa_top[3, :] = self.npa_left[self.npa_left.shape[0] - 2, :]
+
+ ####################################################################
+ def toptri(self):
+ """Define triangles of the top face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ self.npt_top = np.array([[2, 0, 1], [2, 1, 3]])
+
+ ####################################################################
+ def bottom(self):
+ """Define information/nodes of the bottom face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ # Create the coordinate vector
+ self.npa_bottom = []
+
+ # Loop over all the points
+ for ii in range(self.npa_right.shape[0] - 3):
+ npa_temp1 = np.zeros(shape=(4, 3))
+ npa_temp2 = np.zeros(shape=(2, 3))
+
+ # Get the points
+ if ii == 0:
+ npa_temp1[0, :] = self.npa_right[ii, :]
+ npa_temp1[1, :] = self.npa_left[ii, :]
+ npa_temp1[2, :] = self.npa_right[ii + 1, :]
+ npa_temp1[3, :] = self.npa_left[ii + 1, :]
+ else:
+ npa_temp2[0, :] = self.npa_right[ii + 1, :]
+ npa_temp2[1, :] = self.npa_left[ii + 1, :]
+
+ # Concatenate as necessary
+ if ii == 0:
+ self.npa_bottom = npa_temp1
+ else:
+ self.npa_bottom = np.concatenate(
+ (self.npa_bottom, npa_temp2), axis=0
+ )
+
+ ####################################################################
+ def bottomtri(self):
+ """Define triangles of the bottom face of the flume
+
+ Arguments:
+ ---------
+ none
+
+ """ # noqa: D400
+ # Create the coordinate vector
+ self.npt_bottom = []
+ ntri = 2
+
+ # Loop over all the points
+ for ii in range(self.npa_right.shape[0] - 3):
+ npt_temp = np.zeros(shape=(2, 3))
+
+ # Get the triangles
+ npt_temp = np.array([[0, 1, 2], [1, 3, 2]])
+ npt_temp = npt_temp + ii * ntri # noqa: PLR6104
+
+ # Concatenate as necessary
+ if ii == 0:
+ self.npt_bottom = npt_temp
+ else:
+ self.npt_bottom = np.concatenate((self.npt_bottom, npt_temp), axis=0)
+
+ #############################################################
+ def writeSTL(self, base_filename, npa, npt, path): # noqa: N802, PLR6301
+ """Write the STL files for each patch
+
+ Arguments:
+ ---------
+ base_filename: Patchname of the flume
+ npa: List of nodes
+ npt: List of triangles
+ path: Location where dakota.json file exists
+
+ """ # noqa: D400
+ # Create a filename
+ filename = base_filename + '.stl'
+ # Create the STL file
+ cells = [('triangle', npt)]
+ meshio.write_points_cells(filename, npa, cells)
+ # Modify first and last line
+ with open(filename) as f: # noqa: PLW1514, PTH123
+ lines = f.readlines()
+ lines[0] = 'solid ' + base_filename + '\n'
+ lines[len(lines) - 1] = 'endsolid ' + base_filename + '\n'
+ # Write the updated file
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
+ f.writelines(lines)
+ # Move the file to constant/triSurface folder
+ newfilepath = os.path.join(path, 'constant', 'triSurface', filename) # noqa: PTH118
+ os.replace(filename, newfilepath) # noqa: PTH105
+
+ #############################################################
+ def extremedata(self, extreme, breadth): # noqa: PLR6301
+ """Creates the STL files for the flume
+
+ Arguments:
+ ---------
+ data: content of JSON file
+ extreme: Maximum limits
+ breadth: Breadth of the flume
+
+ """ # noqa: D400, D401
+ # Write the Max-Min values for the blockMesh
+ BMXmin = extreme[0] - 0.25 * (extreme[1] - extreme[0]) # noqa: N806
+ BMXmax = extreme[1] + 0.25 * (extreme[1] - extreme[0]) # noqa: N806
+ BMYmin = -0.625 * breadth # noqa: N806
+ BMYmax = 0.625 * breadth # noqa: N806
+ BMZmin = extreme[2] - 0.25 * (extreme[3] - extreme[2]) # noqa: N806
+ BMZmax = extreme[3] + 0.25 * (extreme[3] - extreme[2]) # noqa: N806
+
+ # Write the temporary file
+ filename = 'temp_geometry.txt'
+ if os.path.exists(filename): # noqa: PTH110
+ os.remove(filename) # noqa: PTH107
+ tempfileID = open('temp_geometry.txt', 'w') # noqa: N806, PLW1514, PTH123, SIM115
+
+ # Write the extreme values to the files
+ tempfileID.write(
+ str(BMXmin)
+ + '\n'
+ + str(BMXmax)
+ + '\n'
+ + str(BMYmin)
+ + '\n'
+ + str(BMYmax)
+ + '\n'
+ + str(BMZmin)
+ + '\n'
+ + str(BMZmax)
+ + '\n'
+ )
+ tempfileID.close # noqa: B018
+
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/hydroUtils.py b/modules/createEVENT/GeoClawOpenFOAM/hydroUtils.py
index 4623b28b9..2722a554f 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/hydroUtils.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/hydroUtils.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -34,118 +33,113 @@
# Standard python modules
import os
+
####################################################################
# Hydro-UQ utilities class
####################################################################
-class hydroUtils():
- """
- This class includes all the general utilities that are
- required for the Hydro-UQ.
-
- Methods
- --------
- extract: Extracts an element from a nested json
- extract_element_from_json: Extracts an element from a nested json
- hydrolog: Initializes the log file
- general_header: Creates the header for the Hydro-UQ files
- """
-
- #############################################################
- def extract(self,obj,path,ind,arr):
- '''
- Extracts an element from a nested dictionary
- along a specified path and returns a list.
-
- Arguments
- -----------
- obj: A dict - input dictionary
- path: A list - list of strings that form the JSON path
- ind: An int - starting index
- arr: A list - output list
- '''
-
- key = path[ind]
- if ind + 1 < len(path):
- if isinstance(obj, dict):
- if key in obj.keys():
- self.extract(obj.get(key), path, ind + 1, arr)
- else:
- arr.append(None)
- elif isinstance(obj, list):
- if not obj:
- arr.append(None)
- else:
- for item in obj:
- self.extract(item, path, ind, arr)
- else:
- arr.append(None)
- if ind + 1 == len(path):
- if isinstance(obj, list):
- if not obj:
- arr.append(None)
- else:
- for item in obj:
- arr.append(item.get(key, None))
- elif isinstance(obj, dict):
- arr.append(obj.get(key, None))
- else:
- arr.append(None)
-
- return arr
-
- #############################################################
- def extract_element_from_json(self,obj,path):
- '''
- Extracts an element from a nested dictionary or
- a list of nested dictionaries along a specified path.
- If the input is a dictionary, a list is returned.
- If the input is a list of dictionary, a list of lists is returned.
-
- Arguments
- -----------
- obj: A list or dict - input dictionary or list of dictionaries
- path: A list - list of strings that form the path to the desired element
- '''
-
- if isinstance(obj, dict):
- return self.extract(obj, path, 0, [])
- elif isinstance(obj, list):
- outer_arr = []
- for item in obj:
- outer_arr.append(self.extract(item, path, 0, []))
- return outer_arr
-
- #############################################################
- def general_header(self):
- '''
- Used to create a general header for Hydro-UQ related files
-
- Variables
- -----------
- header: Stores the general header for the Hydro-UQ files
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class hydroUtils:
+ """This class includes all the general utilities that are
+ required for the Hydro-UQ.
+
+ Methods
+ -------
+ extract: Extracts an element from a nested json
+ extract_element_from_json: Extracts an element from a nested json
+ hydrolog: Initializes the log file
+ general_header: Creates the header for the Hydro-UQ files
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def extract(self, obj, path, ind, arr): # noqa: C901
+ """Extracts an element from a nested dictionary
+ along a specified path and returns a list.
+
+ Arguments:
+ ---------
+ obj: A dict - input dictionary
+ path: A list - list of strings that form the JSON path
+ ind: An int - starting index
+ arr: A list - output list
+
+ """ # noqa: D205, D401
+ key = path[ind]
+ if ind + 1 < len(path):
+ if isinstance(obj, dict):
+ if key in obj.keys(): # noqa: SIM118
+ self.extract(obj.get(key), path, ind + 1, arr)
+ else:
+ arr.append(None)
+ elif isinstance(obj, list):
+ if not obj:
+ arr.append(None)
+ else:
+ for item in obj:
+ self.extract(item, path, ind, arr)
+ else:
+ arr.append(None)
+ if ind + 1 == len(path):
+ if isinstance(obj, list):
+ if not obj:
+ arr.append(None)
+ else:
+ for item in obj:
+ arr.append(item.get(key, None))
+ elif isinstance(obj, dict):
+ arr.append(obj.get(key, None))
+ else:
+ arr.append(None)
+
+ return arr
+
+ #############################################################
+ def extract_element_from_json(self, obj, path):
+ """Extracts an element from a nested dictionary or
+ a list of nested dictionaries along a specified path.
+ If the input is a dictionary, a list is returned.
+ If the input is a list of dictionary, a list of lists is returned.
+
+ Arguments:
+ ---------
+ obj: A list or dict - input dictionary or list of dictionaries
+ path: A list - list of strings that form the path to the desired element
+
+ """ # noqa: D205, D401
+ if isinstance(obj, dict): # noqa: RET503
+ return self.extract(obj, path, 0, [])
+ elif isinstance(obj, list): # noqa: RET505
+ outer_arr = []
+ for item in obj:
+ outer_arr.append(self.extract(item, path, 0, [])) # noqa: PERF401
+ return outer_arr
+
+ #############################################################
+ def general_header(self): # noqa: PLR6301
+ """Used to create a general header for Hydro-UQ related files
+
+ Variables
+ -----------
+ header: Stores the general header for the Hydro-UQ files
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/ \n\n"""
-
- return header
+\\*---------------------------------------------------------------------------*/ \n\n""" # noqa: W291
- ####################################################################
- def of7header(self,OFclass,location,filename):
- '''
- Method to create a header for the input dictionaries.
+ return header # noqa: RET504
- Variables
- -----------
- header: FileID for the file being created
- '''
+ ####################################################################
+ def of7header(self, OFclass, location, filename): # noqa: N803, PLR6301
+ """Method to create a header for the input dictionaries.
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+ Variables
+ -----------
+ header: FileID for the file being created
+ """ # noqa: D401
+ header = rf"""/*--------------------------*- NHERI SimCenter -*----------------------------*\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
@@ -156,67 +150,64 @@ def of7header(self,OFclass,location,filename):
{{
version 2.0;
format ascii;
- class {};
- location "{}";
- object {};
+ class {OFclass};
+ location "{location}";
+ object {filename};
}}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""".format(OFclass,location,filename)
-
- return header
-
- #############################################################
- def hydrolog(self,projname,fipath):
- '''
- Used to initialize the log file for the Hydro-UQ program
-
- Arguments
- -----------
- projname: Name of the project as given by the user
- fipath: Path where the log file needs to be created
-
- Variables
- -----------
- flog: File pointer to the log file
- '''
-
- # Open a log file to write the outputs
- # Use project name for the log file
- # If no project name is specified, call it Untitled
- if projname != "":
- fname = ''.join(projname.split())+".h20log"
- else:
- fname = "Untitled.h20log"
-
- # Path to the file
- filepath = os.path.join(fipath, fname)
- self.flog = open(filepath, "w")
-
- #############################################################
- def getlist(self,data):
- '''
- Used to get the float from a list of negative string
-
- Arguments
- -----------
- userlist: Name of the project as given by the user
-
- '''
-
- # results = []
-
- # for line in data:
- # entry = []
- # for num in line.split(' '):
- # if num.replace('-', '').strip().isdigit():
- # entry.append(int(num))
- # else:
- # try:
- # entry.append(float(num))
- # except Exception:
- # pass
- # results.append(entry)
-
- data = data.replace(',',' ')
- results = [float(n) for n in data.split()]
-
- return results
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ return header # noqa: RET504
+
+ #############################################################
+ def hydrolog(self, projname, fipath):
+ """Used to initialize the log file for the Hydro-UQ program
+
+ Arguments:
+ ---------
+ projname: Name of the project as given by the user
+ fipath: Path where the log file needs to be created
+
+ Variables
+ -----------
+ flog: File pointer to the log file
+
+ """ # noqa: D400, D401
+ # Open a log file to write the outputs
+ # Use project name for the log file
+ # If no project name is specified, call it Untitled
+ if projname != '': # noqa: PLC1901
+ fname = ''.join(projname.split()) + '.h20log'
+ else:
+ fname = 'Untitled.h20log'
+
+ # Path to the file
+ filepath = os.path.join(fipath, fname) # noqa: PTH118
+ self.flog = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+
+ #############################################################
+ def getlist(self, data): # noqa: PLR6301
+ """Used to get the float from a list of negative string
+
+ Arguments:
+ ---------
+ userlist: Name of the project as given by the user
+
+ """ # noqa: D400, D401
+ # results = []
+
+ # for line in data:
+ # entry = []
+ # for num in line.split(' '):
+ # if num.replace('-', '').strip().isdigit():
+ # entry.append(int(num))
+ # else:
+ # try:
+ # entry.append(float(num))
+ # except Exception:
+ # pass
+ # results.append(entry)
+
+ data = data.replace(',', ' ')
+ results = [float(n) for n in data.split()]
+
+ return results # noqa: RET504
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Alpboundary.py b/modules/createEVENT/GeoClawOpenFOAM/of7Alpboundary.py
index abe27a220..de03da459 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Alpboundary.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Alpboundary.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,116 +31,113 @@
# Import all necessary modules
####################################################################
# Standard python modules
-import os
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Alpboundary():
- """
- This class includes the methods related to
- alpha boundary conditions for openfoam7.
-
- Methods
- --------
- Alptext: Get all the text for the p_rgh-file
- """
-
- #############################################################
- def Alptext(self,data,patches):
- '''
- Creates the necessary text for pressure bc for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- Alptext = self.Alpheader()
-
- # Start the outside
- Alptext = Alptext + "boundaryField\n{\n"
-
- # Loop over all patches
- for patchname in patches:
- Alptext = Alptext + "\t" + patchname + "\n"
- patch = hydroutil.extract_element_from_json(data, ["Events","PressureType_" + patchname])
- if patch == [None]:
- Alptype = -1
- else:
- Alptype = 0
- Alptext = Alptext + self.Alppatchtext(Alptype,patchname)
-
- # Check for building and other building
- Alptext = Alptext + '\tBuilding\n'
- Alptext = Alptext + self.Alppatchtext(0,'Building')
- Alptext = Alptext + '\tOtherBuilding\n'
- Alptext = Alptext + self.Alppatchtext(0,'OtherBuilding')
-
- # Close the outside
- Alptext = Alptext + "}\n\n"
-
- # Return the text for velocity BC
- return Alptext
-
- #############################################################
- def Alpheader(self):
- '''
- Creates the text for the header for pressure file
-
- Variable
- -----------
- header: Header for the p_rgh-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Alpboundary:
+ """This class includes the methods related to
+ alpha boundary conditions for openfoam7.
+
+ Methods
+ -------
+ Alptext: Get all the text for the p_rgh-file
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def Alptext(self, data, patches): # noqa: N802
+ """Creates the necessary text for pressure bc for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ Alptext = self.Alpheader() # noqa: N806
+
+ # Start the outside
+ Alptext = Alptext + 'boundaryField\n{\n' # noqa: N806, PLR6104
+
+ # Loop over all patches
+ for patchname in patches:
+ Alptext = Alptext + '\t' + patchname + '\n' # noqa: N806
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'PressureType_' + patchname]
+ )
+ if patch == [None]:
+ Alptype = -1 # noqa: N806
+ else:
+ Alptype = 0 # noqa: N806
+ Alptext = Alptext + self.Alppatchtext(Alptype, patchname) # noqa: N806, PLR6104
+
+ # Check for building and other building
+ Alptext = Alptext + '\tBuilding\n' # noqa: N806, PLR6104
+ Alptext = Alptext + self.Alppatchtext(0, 'Building') # noqa: N806, PLR6104
+ Alptext = Alptext + '\tOtherBuilding\n' # noqa: N806, PLR6104
+ Alptext = Alptext + self.Alppatchtext(0, 'OtherBuilding') # noqa: N806, PLR6104
+
+ # Close the outside
+ Alptext = Alptext + '}\n\n' # noqa: N806, PLR6104
+
+ # Return the text for velocity BC
+ return Alptext # noqa: RET504
+
+ #############################################################
+ def Alpheader(self): # noqa: N802, PLR6301
+ """Creates the text for the header for pressure file
+
+ Variable
+ -----------
+ header: Header for the p_rgh-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tvolScalarField;\n\tlocation\t"0";\n\tobject\talpha.water;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- header = header + "dimensions\t[0 0 0 0 0 0 0];\n\n"
- header = header + "internalField\tuniform\t0;\n\n"
-
- # Return the header for U file
- return header
-
- #############################################################
- def Alppatchtext(self,Alptype,patchname):
- '''
- Creates the text the pressure boundary condition
-
- Arguments
- -----------
- patchname: Name of the patch
-
- Variable
- -----------
- Alptext: Text for the particular patch
- '''
-
- if patchname == 'Top':
- Alptext = "\t{\n\t\t"
- Alptext = Alptext + "type\tinletOutlet;\n\t\t"
- Alptext = Alptext + "inletValue\tuniform 0;\n\t\t"
- Alptext = Alptext + "value\tuniform 0;\n\t}\n"
-
- else:
- Alptext = "\t{\n\t\t"
- Alptext = Alptext + "type\tzeroGradient;\n\t}\n"
-
-
- # Return the header for U file
- return Alptext
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ header = header + 'dimensions\t[0 0 0 0 0 0 0];\n\n' # noqa: PLR6104
+ header = header + 'internalField\tuniform\t0;\n\n' # noqa: PLR6104
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def Alppatchtext(self, Alptype, patchname): # noqa: ARG002, N802, N803, PLR6301
+ """Creates the text the pressure boundary condition
+
+ Arguments:
+ ---------
+ patchname: Name of the patch
+
+ Variable
+ -----------
+ Alptext: Text for the particular patch
+
+ """ # noqa: D400, D401
+ if patchname == 'Top':
+ Alptext = '\t{\n\t\t' # noqa: N806
+ Alptext = Alptext + 'type\tinletOutlet;\n\t\t' # noqa: N806, PLR6104
+ Alptext = Alptext + 'inletValue\tuniform 0;\n\t\t' # noqa: N806, PLR6104
+ Alptext = Alptext + 'value\tuniform 0;\n\t}\n' # noqa: N806, PLR6104
+
+ else:
+ Alptext = '\t{\n\t\t' # noqa: N806
+ Alptext = Alptext + 'type\tzeroGradient;\n\t}\n' # noqa: N806, PLR6104
+
+ # Return the header for U file
+ return Alptext
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Building.py b/modules/createEVENT/GeoClawOpenFOAM/of7Building.py
index bf0d7f8f8..3f3956ba1 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Building.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Building.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,6 +32,7 @@
####################################################################
# Standard python modules
import os
+
import meshio
import numpy as np
@@ -43,393 +43,464 @@
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Building():
- """
- This class includes the methods related to
- creating the building for openfoam7.
-
- Methods
- --------
- buildcheck: Checks if all files required for creating the building exists
- createbuilds: Creates the STL files
- """
-
- #############################################################
- def buildcheck(self,data,path):
- '''
- Checks if all files required for creating the building exists
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Check if a translate script exists.
- # If so delete it
- if os.path.exists('translate.sh'):
- os.remove('translate.sh')
-
- # Check for STL file
- # Get the type of building definition
- buildeftype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildData"]))
- if buildeftype == 'Manual':
- # Find number of buildings
- numbuild = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumBuild"]))
- if int(numbuild) > 0:
- # Number of buildings with response
- numbuildres = 0
- # Get data for each building
- for ii in range(int(numbuild)):
- builddata = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildingTable"+str(ii)]))
- builddata = builddata.replace(',',' ')
- nums = [float(n) for n in builddata.split()]
- buildtype = nums[0]
- if int(buildtype) == -1 or int(buildtype) == 2:
- stlfile = hydroutil.extract_element_from_json(data, ["Events","BuildingSTLFile"])
- if stlfile == [None]:
- return -1
- else:
- stlfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildingSTLFile"]))
- if not os.path.exists(os.path.join(path,stlfile)):
- return -1
-
- if int(buildtype) == -2 or int(buildtype) == -1:
- numbuildres += 1
- # Check GI
- depth = hydroutil.extract_element_from_json(data, ["GeneralInformation","depth"])
- if str(depth[0]) == [None]:
- return -1
-
- width = hydroutil.extract_element_from_json(data, ["GeneralInformation","width"])
- if str(width[0]) == [None]:
- return -1
-
- height = hydroutil.extract_element_from_json(data, ["GeneralInformation","height"])
- if str(height[0]) == [None]:
- return -1
-
- geninfo = hydroutil.extract_element_from_json(data, ["GeneralInformation"])
- geninfo = str(geninfo[0])
- xbuild = geninfo.partition("'location': {'latitude': ")[1].partition(", 'longitude':")[0]
- ybuild = geninfo.partition("'longitude': ")[2].partition("},")[0]
- # if not depth:
- # return -1
- # # else:
- # # depth = float(depth)
- # if not width:
- # return -1
- # # else:
- # # width = float(width)
- # if not height:
- # return -1
- # # else:
- # # height = float(height)
- if not xbuild:
- return -1
- # else:
- # xbuild = float(float)
- if not ybuild:
- return -1
- # else:
- # ybuild = float(ybuild)
-
- if numbuildres > 1:
- return -1
-
- elif buildeftype == 'Parameters':
- buildshape = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildShape"]))
- if int(buildshape) == 0:
- return -1
- elif int(buildshape) == 1:
- stlfile = hydroutil.extract_element_from_json(data, ["Events","BuildingSTLFile"])
- if stlfile == [None]:
- return -1
- else:
- stlfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildingSTLFile"]))
- if not os.path.exists(os.path.join(path,stlfile)):
- return -1
-
- # Check if building distribution selected
- buildDist = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildDist"]))
- if int(buildDist) == 0:
- return -1
-
-
- return 0
-
- #############################################################
- def createbuilds(self,data,path):
- '''
- Creates the STL files for the buildings and move to correct location
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the type of building definition
- buildeftype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildData"]))
- if buildeftype == 'Manual':
- self.buildmanual(data,path)
-
- elif buildeftype == 'Parameters':
- self.buildpara(data,path)
-
- return 0
-
- #############################################################
- def buildmanual(self,data,path):
- '''
- Creates the STL files for the buildings using manual data from table
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Number of types of buildings
- numresbuild = 0
- numotherbuild = 0
-
- # Get the coordinate and dimension data
-
-
- # Find number of buildings
- numbuild = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumBuild"]))
- if int(numbuild) > 0:
- # Get data for each building
- for ii in range(int(numbuild)):
- builddata = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildingTable"+str(ii)]))
- builddata = builddata.replace(',',' ')
- nums = [float(n) for n in builddata.split()]
- buildtype = nums[0]
-
- if int(buildtype) == -2:
- # Create a temporary file using GI information (Response)
- self.buildcubeGI(data,path)
- # Increment response buildign number
- numresbuild += 1
- elif int(buildtype) == -1:
- # Move the STL file to OF folder and change name to Building (Response)
- self.readResSTL(data,path,nums[3])
- # Increment response buildign number
- numresbuild += 1
- elif int(buildtype) == 1:
- print('no response + cuboid')
- # Create a temporary file
- # Call flume to build an STL
- # Combine all STL to building + number
- # Increment response buildign number
- numotherbuild += 1
- elif int(buildtype) == 2:
- print('no response + STL')
- # Check if STL file exists
- # Increment response buildign number
- numotherbuild += 1
-
- # Create other buildings STL if more than one exists (Join buildings)
-
- # Create the building flag
- self.buildflagadd(numresbuild,numotherbuild)
-
- #############################################################
- def buildpara(self,data,path):
- '''
- Creates the STL files for the buildings using parametrized data
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- #############################################################
- def buildcubeGI(self,data,path):
- '''
- Creates the STL files for the buildings using parametrized data
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Create the building STL file
- base_filename = 'Building'
- filename = base_filename + ".stl"
- # Define coordinates
- npa = np.array([\
- [-1, -1, -1],
- [+1, -1, -1],
- [+1, +1, -1],
- [-1, +1, -1],
- [-1, -1, +1],
- [+1, -1, +1],
- [+1, +1, +1],
- [-1, +1, +1]])
- npt = np.array([\
- [0,3,1],
- [1,3,2],
- [0,4,7],
- [0,7,3],
- [4,5,6],
- [4,6,7],
- [5,1,2],
- [5,2,6],
- [2,3,6],
- [3,7,6],
- [0,1,5],
- [0,5,4]])
- # Scaling
- npa = 0.5*npa
- npa[:,2] = 0.5+npa[:,2]
- # Temporary
- npa[:,0] = npa[:,0]*3
- npa[:,2] = npa[:,2]*1.2474
- npa[:,0] = npa[:,0] + 47
- npa[:,2] = npa[:,2] + 1.7526
-
- # Create the STL file
- cells = [("triangle", npt)]
- meshio.write_points_cells(filename, npa, cells)
- # Modify first and last line
- with open(filename) as f:
- lines = f.readlines()
- lines[0] = 'solid '+ base_filename + '\n'
- lines[len(lines)-1] = 'endsolid ' + base_filename + '\n'
- # Write the updated file
- with open(filename, "w") as f:
- f.writelines(lines)
-
- # Create the translation script
- if os.path.exists('translate.sh'):
- with open('translate.sh','a') as f:
- buildpath = os.path.join('constant','triSurface','Building.stl')
- lines = 'cp Building.stl ' + buildpath + '\n'
- f.writelines(lines)
- else:
- with open('translate.sh','w') as f:
- buildpath = os.path.join('constant','triSurface','Building.stl')
- lines = 'cp Building.stl ' + buildpath + '\n'
- f.writelines(lines)
-
- #############################################################
- def readResSTL(self,data,path,ztrans):
- '''
- Creates the STL files for the buildings using parametrized data
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- ztrans: Translation distance in z-direction
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Filename
- stlfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BuildingSTLFile"]))
-
- # Read the stlfile
- stlfilepath = os.path.join(path,stlfile)
- print(stlfilepath)
- mesh = meshio.read(stlfilepath,file_format="stl")
-
- mesh.points[:,0] = mesh.points[:,0]/(max(abs(mesh.points[:,0])))
- mesh.points[:,1] = mesh.points[:,1]/(max(abs(mesh.points[:,1])))
- mesh.points[:,2] = mesh.points[:,2]/(max(abs(mesh.points[:,2])))
-
- # Get GI
- geninfo = hydroutil.extract_element_from_json(data, ["GeneralInformation"])
- geninfo = str(geninfo[0])
- # depth = float(geninfo.partition("'depth': ")[2].partition(", 'height':")[0])
- # width = float(geninfo.partition("'width': ")[2].partition("}")[0])
- # height = float(geninfo.partition("'height': ")[2].partition(", 'location':")[0])
- xbuild = float(geninfo.partition("'location': {'latitude': ")[2].partition(", 'longitude':")[0])
- ybuild = float(geninfo.partition("'longitude': ")[2].partition("},")[0])
- depth = hydroutil.extract_element_from_json(data, ["GeneralInformation","depth"])
- depth = float(depth[0])
- width = hydroutil.extract_element_from_json(data, ["GeneralInformation","width"])
- width = float(width[0])
- height = hydroutil.extract_element_from_json(data, ["GeneralInformation","height"])
- height = float(height[0])
-
- # Scale the STL model
- mesh.points[:,0] = mesh.points[:,0]*depth
- mesh.points[:,1] = mesh.points[:,1]*width
- mesh.points[:,2] = mesh.points[:,2]*height
-
- # Write meshfile
- meshio.write_points_cells('Building.stl', mesh.points, mesh.cells)
-
- # Modify first and last line
- with open("Building.stl") as f:
- lines = f.readlines()
- lines[0] = 'solid '+ 'Building' + '\n'
- lines[len(lines)-1] = 'endsolid ' + 'Building' + '\n'
-
- # Write the updated file
- with open('Building.stl', "w") as f:
- f.writelines(lines)
-
- # Move the file to constant/triSurface folder
- newfilepath = os.path.join(path,'constant','triSurface','Building.stl')
- os.replace('Building.stl',newfilepath)
-
- # Create the translation script
- if os.path.exists('translate.sh'):
- with open('translate.sh','a') as f:
- buildpath = os.path.join('constant','triSurface','Building.stl')
- lines = 'export FILE="' + buildpath + '"\n'
- lines = lines + 'surfaceTransformPoints -translate "(' + str(xbuild) + ' ' + str(ybuild) + ' ' + str(ztrans) +')" $FILE $FILE\n'
- f.writelines(lines)
- else:
- with open('translate.sh','w') as f:
- buildpath = os.path.join('constant','triSurface','Building.stl')
- lines = 'export FILE="' + buildpath + '"\n'
- lines = lines + 'surfaceTransformPoints -translate "(' + str(xbuild) + ' ' + str(ybuild) + ' ' + str(ztrans) + ')" $FILE $FILE\n'
- f.writelines(lines)
-
- #############################################################
- def buildflagadd(self,numresbuild,numotherbuild):
- '''
- Add building flag to temp_geometry.txt
-
- Arguments
- -----------
- numresbuild: Number of building with response
- numotherbuild: NUmber of other buildings
- '''
-
- # Get building flag
- if numresbuild == 0 and numotherbuild == 0:
- flag = 0
- elif numresbuild > 0 and numotherbuild == 0:
- flag = 1
- elif numresbuild > 0 and numotherbuild > 0:
- flag = 2
- elif numresbuild == 0 and numotherbuild > 0:
- flag = 3
-
- # Add building flag to temp file
- with open('temp_geometry.txt', "a") as f:
- f.writelines(str(flag)+'\n')
+class of7Building:
+ """This class includes the methods related to
+ creating the building for openfoam7.
+
+ Methods
+ -------
+ buildcheck: Checks if all files required for creating the building exists
+ createbuilds: Creates the STL files
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def buildcheck(self, data, path): # noqa: C901, PLR0911, PLR6301
+ """Checks if all files required for creating the building exists
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Check if a translate script exists.
+ # If so delete it
+ if os.path.exists('translate.sh'): # noqa: PTH110
+ os.remove('translate.sh') # noqa: PTH107
+
+ # Check for STL file
+ # Get the type of building definition
+ buildeftype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'BuildData'])
+ )
+ if buildeftype == 'Manual': # noqa: PLR1702
+ # Find number of buildings
+ numbuild = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'NumBuild'])
+ )
+ if int(numbuild) > 0:
+ # Number of buildings with response
+ numbuildres = 0
+ # Get data for each building
+ for ii in range(int(numbuild)):
+ builddata = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingTable' + str(ii)]
+ )
+ )
+ builddata = builddata.replace(',', ' ')
+ nums = [float(n) for n in builddata.split()]
+ buildtype = nums[0]
+ if int(buildtype) == -1 or int(buildtype) == 2: # noqa: PLR2004
+ stlfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingSTLFile']
+ )
+ if stlfile == [None]:
+ return -1
+ else: # noqa: RET505
+ stlfile = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingSTLFile']
+ )
+ )
+ if not os.path.exists(os.path.join(path, stlfile)): # noqa: PTH110, PTH118
+ return -1
+
+ if int(buildtype) == -2 or int(buildtype) == -1: # noqa: PLR2004
+ numbuildres += 1
+ # Check GI
+ depth = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'depth']
+ )
+ if str(depth[0]) == [None]:
+ return -1
+
+ width = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'width']
+ )
+ if str(width[0]) == [None]:
+ return -1
+
+ height = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'height']
+ )
+ if str(height[0]) == [None]:
+ return -1
+
+ geninfo = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation']
+ )
+ geninfo = str(geninfo[0])
+ xbuild = geninfo.partition("'location': {'latitude': ")[
+ 1
+ ].partition(", 'longitude':")[0]
+ ybuild = geninfo.partition("'longitude': ")[2].partition(
+ '},'
+ )[0]
+ # if not depth:
+ # return -1
+ # # else:
+ # # depth = float(depth)
+ # if not width:
+ # return -1
+ # # else:
+ # # width = float(width)
+ # if not height:
+ # return -1
+ # # else:
+ # # height = float(height)
+ if not xbuild:
+ return -1
+ # else:
+ # xbuild = float(float)
+ if not ybuild:
+ return -1
+ # else:
+ # ybuild = float(ybuild)
+
+ if numbuildres > 1:
+ return -1
+
+ elif buildeftype == 'Parameters':
+ buildshape = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'BuildShape'])
+ )
+ if int(buildshape) == 0:
+ return -1
+ elif int(buildshape) == 1: # noqa: RET505
+ stlfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingSTLFile']
+ )
+ if stlfile == [None]:
+ return -1
+ else: # noqa: RET505
+ stlfile = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingSTLFile']
+ )
+ )
+ if not os.path.exists(os.path.join(path, stlfile)): # noqa: PTH110, PTH118
+ return -1
+
+ # Check if building distribution selected
+ buildDist = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'BuildDist'])
+ )
+ if int(buildDist) == 0:
+ return -1
+
+ return 0
+
+ #############################################################
+ def createbuilds(self, data, path):
+ """Creates the STL files for the buildings and move to correct location
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the type of building definition
+ buildeftype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'BuildData'])
+ )
+ if buildeftype == 'Manual':
+ self.buildmanual(data, path)
+
+ elif buildeftype == 'Parameters':
+ self.buildpara(data, path)
+
+ return 0
+
+ #############################################################
+ def buildmanual(self, data, path):
+ """Creates the STL files for the buildings using manual data from table
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Number of types of buildings
+ numresbuild = 0
+ numotherbuild = 0
+
+ # Get the coordinate and dimension data
+
+ # Find number of buildings
+ numbuild = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'NumBuild'])
+ )
+ if int(numbuild) > 0:
+ # Get data for each building
+ for ii in range(int(numbuild)):
+ builddata = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'BuildingTable' + str(ii)]
+ )
+ )
+ builddata = builddata.replace(',', ' ')
+ nums = [float(n) for n in builddata.split()]
+ buildtype = nums[0]
+
+ if int(buildtype) == -2: # noqa: PLR2004
+ # Create a temporary file using GI information (Response)
+ self.buildcubeGI(data, path)
+ # Increment response buildign number
+ numresbuild += 1
+ elif int(buildtype) == -1:
+ # Move the STL file to OF folder and change name to Building (Response)
+ self.readResSTL(data, path, nums[3])
+ # Increment response buildign number
+ numresbuild += 1
+ elif int(buildtype) == 1:
+ print('no response + cuboid') # noqa: T201
+ # Create a temporary file
+ # Call flume to build an STL
+ # Combine all STL to building + number
+ # Increment response buildign number
+ numotherbuild += 1
+ elif int(buildtype) == 2: # noqa: PLR2004
+ print('no response + STL') # noqa: T201
+ # Check if STL file exists
+ # Increment response buildign number
+ numotherbuild += 1
+
+ # Create other buildings STL if more than one exists (Join buildings)
+
+ # Create the building flag
+ self.buildflagadd(numresbuild, numotherbuild)
+
+ #############################################################
+ def buildpara(self, data, path): # noqa: ARG002, PLR6301
+ """Creates the STL files for the buildings using parametrized data
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils() # noqa: F841
+
+ #############################################################
+ def buildcubeGI(self, data, path): # noqa: ARG002, N802, PLR6301
+ """Creates the STL files for the buildings using parametrized data
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils() # noqa: F841
+
+ # Create the building STL file
+ base_filename = 'Building'
+ filename = base_filename + '.stl'
+ # Define coordinates
+ npa = np.array(
+ [
+ [-1, -1, -1],
+ [+1, -1, -1],
+ [+1, +1, -1],
+ [-1, +1, -1],
+ [-1, -1, +1],
+ [+1, -1, +1],
+ [+1, +1, +1],
+ [-1, +1, +1],
+ ]
+ )
+ npt = np.array(
+ [
+ [0, 3, 1],
+ [1, 3, 2],
+ [0, 4, 7],
+ [0, 7, 3],
+ [4, 5, 6],
+ [4, 6, 7],
+ [5, 1, 2],
+ [5, 2, 6],
+ [2, 3, 6],
+ [3, 7, 6],
+ [0, 1, 5],
+ [0, 5, 4],
+ ]
+ )
+ # Scaling
+ npa = 0.5 * npa # noqa: PLR6104
+ npa[:, 2] = 0.5 + npa[:, 2] # noqa: PLR6104
+ # Temporary
+ npa[:, 0] = npa[:, 0] * 3 # noqa: PLR6104
+ npa[:, 2] = npa[:, 2] * 1.2474 # noqa: PLR6104
+ npa[:, 0] = npa[:, 0] + 47 # noqa: PLR6104
+ npa[:, 2] = npa[:, 2] + 1.7526 # noqa: PLR6104
+
+ # Create the STL file
+ cells = [('triangle', npt)]
+ meshio.write_points_cells(filename, npa, cells)
+ # Modify first and last line
+ with open(filename) as f: # noqa: PLW1514, PTH123
+ lines = f.readlines()
+ lines[0] = 'solid ' + base_filename + '\n'
+ lines[len(lines) - 1] = 'endsolid ' + base_filename + '\n'
+ # Write the updated file
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
+ f.writelines(lines)
+
+ # Create the translation script
+ if os.path.exists('translate.sh'): # noqa: PTH110
+ with open('translate.sh', 'a') as f: # noqa: PLW1514, PTH123
+ buildpath = os.path.join('constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ lines = 'cp Building.stl ' + buildpath + '\n'
+ f.writelines(lines)
+ else:
+ with open('translate.sh', 'w') as f: # noqa: PLW1514, PTH123
+ buildpath = os.path.join('constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ lines = 'cp Building.stl ' + buildpath + '\n'
+ f.writelines(lines)
+
+ #############################################################
+ def readResSTL(self, data, path, ztrans): # noqa: N802, PLR6301
+ """Creates the STL files for the buildings using parametrized data
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+ ztrans: Translation distance in z-direction
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Filename
+ stlfile = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'BuildingSTLFile'])
+ )
+
+ # Read the stlfile
+ stlfilepath = os.path.join(path, stlfile) # noqa: PTH118
+ print(stlfilepath) # noqa: T201
+ mesh = meshio.read(stlfilepath, file_format='stl')
+
+ mesh.points[:, 0] = mesh.points[:, 0] / (max(abs(mesh.points[:, 0]))) # noqa: PLR6104
+ mesh.points[:, 1] = mesh.points[:, 1] / (max(abs(mesh.points[:, 1]))) # noqa: PLR6104
+ mesh.points[:, 2] = mesh.points[:, 2] / (max(abs(mesh.points[:, 2]))) # noqa: PLR6104
+
+ # Get GI
+ geninfo = hydroutil.extract_element_from_json(data, ['GeneralInformation'])
+ geninfo = str(geninfo[0])
+ # depth = float(geninfo.partition("'depth': ")[2].partition(", 'height':")[0])
+ # width = float(geninfo.partition("'width': ")[2].partition("}")[0])
+ # height = float(geninfo.partition("'height': ")[2].partition(", 'location':")[0])
+ xbuild = float(
+ geninfo.partition("'location': {'latitude': ")[2].partition(
+ ", 'longitude':"
+ )[0]
+ )
+ ybuild = float(geninfo.partition("'longitude': ")[2].partition('},')[0])
+ depth = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'depth']
+ )
+ depth = float(depth[0])
+ width = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'width']
+ )
+ width = float(width[0])
+ height = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'height']
+ )
+ height = float(height[0])
+
+ # Scale the STL model
+ mesh.points[:, 0] = mesh.points[:, 0] * depth # noqa: PLR6104
+ mesh.points[:, 1] = mesh.points[:, 1] * width # noqa: PLR6104
+ mesh.points[:, 2] = mesh.points[:, 2] * height # noqa: PLR6104
+
+ # Write meshfile
+ meshio.write_points_cells('Building.stl', mesh.points, mesh.cells)
+
+ # Modify first and last line
+ with open('Building.stl') as f: # noqa: PLW1514, PTH123
+ lines = f.readlines()
+ lines[0] = 'solid ' + 'Building' + '\n'
+ lines[len(lines) - 1] = 'endsolid ' + 'Building' + '\n'
+
+ # Write the updated file
+ with open('Building.stl', 'w') as f: # noqa: PLW1514, PTH123
+ f.writelines(lines)
+
+ # Move the file to constant/triSurface folder
+ newfilepath = os.path.join(path, 'constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ os.replace('Building.stl', newfilepath) # noqa: PTH105
+
+ # Create the translation script
+ if os.path.exists('translate.sh'): # noqa: PTH110
+ with open('translate.sh', 'a') as f: # noqa: PLW1514, PTH123
+ buildpath = os.path.join('constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ lines = 'export FILE="' + buildpath + '"\n'
+ lines = (
+ lines
+ + 'surfaceTransformPoints -translate "('
+ + str(xbuild)
+ + ' '
+ + str(ybuild)
+ + ' '
+ + str(ztrans)
+ + ')" $FILE $FILE\n'
+ )
+ f.writelines(lines)
+ else:
+ with open('translate.sh', 'w') as f: # noqa: PLW1514, PTH123
+ buildpath = os.path.join('constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ lines = 'export FILE="' + buildpath + '"\n'
+ lines = (
+ lines
+ + 'surfaceTransformPoints -translate "('
+ + str(xbuild)
+ + ' '
+ + str(ybuild)
+ + ' '
+ + str(ztrans)
+ + ')" $FILE $FILE\n'
+ )
+ f.writelines(lines)
+
+ #############################################################
+ def buildflagadd(self, numresbuild, numotherbuild): # noqa: PLR6301
+ """Add building flag to temp_geometry.txt
+
+ Arguments:
+ ---------
+ numresbuild: Number of building with response
+ numotherbuild: NUmber of other buildings
+
+ """ # noqa: D400
+ # Get building flag
+ if numresbuild == 0 and numotherbuild == 0:
+ flag = 0
+ elif numresbuild > 0 and numotherbuild == 0:
+ flag = 1
+ elif numresbuild > 0 and numotherbuild > 0:
+ flag = 2
+ elif numresbuild == 0 and numotherbuild > 0:
+ flag = 3
+
+ # Add building flag to temp file
+ with open('temp_geometry.txt', 'a') as f: # noqa: PLW1514, PTH123
+ f.writelines(str(flag) + '\n')
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Dakota.py b/modules/createEVENT/GeoClawOpenFOAM/of7Dakota.py
index 60862f36c..27c62c985 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Dakota.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Dakota.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -35,67 +34,85 @@
import os
# Other custom modules
-from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Dakota():
- """
- This class includes the methods related to
- dakota for openfoam7.
-
- Methods
- --------
- scripts: Generate relevant scripts
- """
-
- #############################################################
- def dakotascripts(self,args):
- '''
- Create the scripts for caserun.sh
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where dakota.json file is located
- '''
-
- caseruntext = 'echo Starting Dakota preparation...\n'
- caseruntext = caseruntext + 'python3 $HYDROBRAIN/GetOpenFOAMEvent.py -b '+args.b+'\n'
-
- # Openfoam cleanup
- caseruntext = caseruntext + 'rm -fr processor*\n'
- caseruntext = caseruntext + 'rm -fr 0\n'
- caseruntext = caseruntext + 'mkdir EVTfiles\n'
- caseruntext = caseruntext + 'mv 0.org '+os.path.join('EVTfiles','0.org')+'\n'
- caseruntext = caseruntext + 'mv constant '+os.path.join('EVTfiles','constant')+'\n'
- caseruntext = caseruntext + 'mv system '+os.path.join('EVTfiles','system')+'\n'
- caseruntext = caseruntext + 'mv postProcessing '+os.path.join('EVTfiles','postProcessing')+'\n'
- caseruntext = caseruntext + 'mv *.log EVTfiles\n'
- caseruntext = caseruntext + 'mv *.stl EVTfiles\n'
- caseruntext = caseruntext + 'mv *.sh EVTfiles\n'
- caseruntext = caseruntext + 'mv *.txt EVTfiles\n'
- caseruntext = caseruntext + 'mv cdict* EVTfiles\n'
- caseruntext = caseruntext + 'tar zcBf EVTfiles.tar.gz EVTfiles\n'
- caseruntext = caseruntext + 'rm -fr EVTfiles\n\n'
-
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
-
- #############################################################
- def cleaning(self,args,path):
- '''
- Create the scripts for cleaning
-
- Arguments
- -----------
- args: all the arguments
- '''
-
- print('No OF cleaning')
+class of7Dakota:
+ """This class includes the methods related to
+ dakota for openfoam7.
+
+ Methods
+ -------
+ scripts: Generate relevant scripts
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def dakotascripts(self, args): # noqa: PLR6301
+ """Create the scripts for caserun.sh
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where dakota.json file is located
+
+ """ # noqa: D400
+ caseruntext = 'echo Starting Dakota preparation...\n'
+ caseruntext = (
+ caseruntext
+ + 'python3 $HYDROBRAIN/GetOpenFOAMEvent.py -b '
+ + args.b
+ + '\n'
+ )
+
+ # Openfoam cleanup
+ caseruntext = caseruntext + 'rm -fr processor*\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'rm -fr 0\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'mkdir EVTfiles\n' # noqa: PLR6104
+ caseruntext = (
+ caseruntext + 'mv 0.org ' + os.path.join('EVTfiles', '0.org') + '\n' # noqa: PTH118
+ )
+ caseruntext = (
+ caseruntext
+ + 'mv constant '
+ + os.path.join('EVTfiles', 'constant') # noqa: PTH118
+ + '\n'
+ )
+ caseruntext = (
+ caseruntext + 'mv system ' + os.path.join('EVTfiles', 'system') + '\n' # noqa: PTH118
+ )
+ caseruntext = (
+ caseruntext
+ + 'mv postProcessing '
+ + os.path.join('EVTfiles', 'postProcessing') # noqa: PTH118
+ + '\n'
+ )
+ caseruntext = caseruntext + 'mv *.log EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'mv *.stl EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'mv *.sh EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'mv *.txt EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'mv cdict* EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'tar zcBf EVTfiles.tar.gz EVTfiles\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'rm -fr EVTfiles\n\n' # noqa: PLR6104
+
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
+
+ #############################################################
+ def cleaning(self, args, path): # noqa: ARG002, PLR6301
+ """Create the scripts for cleaning
+
+ Arguments:
+ ---------
+ args: all the arguments
+
+ """ # noqa: D400
+ print('No OF cleaning') # noqa: T201
+
# # tar -c -f trial.tar $(readlink -e a b c d)
# # tar -xvf trial.tar
@@ -109,7 +126,7 @@ def cleaning(self,args,path):
# # Tar all files and folder
# caseruntext = caseruntext + 'tar -c -f Files.tar $(cdictpp cdictforce FlumeData.txt sample temp_geometry.txt translate.sh caserun.sh 0 0.org constant system postProcessing logfiles ' + path + ')\n'
-# # Remove all folders
+# # Remove all folders
# caseruntext = caseruntext + 'rm -rf ./*/' + '\n'
# # Untar
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Decomp.py b/modules/createEVENT/GeoClawOpenFOAM/of7Decomp.py
index e289b6be2..11af41f32 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Decomp.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Decomp.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,113 +32,131 @@
####################################################################
# Standard python modules
-
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Decomp():
- """
- This class includes the methods related to
- parallelization for openfoam7.
+class of7Decomp:
+ """This class includes the methods related to
+ parallelization for openfoam7.
- Methods
- --------
- decomptext: Get all the text for the decomposeParDict
- """
+ Methods
+ -------
+ decomptext: Get all the text for the decomposeParDict
- #############################################################
- def decomptext(self,data):
- '''
- Creates the necessary files for domain decomposition for openfoam7
+ """ # noqa: D205, D404
- Arguments
- -----------
- data: all the JSON data
- '''
+ #############################################################
+ def decomptext(self, data):
+ """Creates the necessary files for domain decomposition for openfoam7
- # Create a utilities object
- hydroutil = hydroUtils()
+ Arguments:
+ ---------
+ data: all the JSON data
- # Get the header text for the U-file
- decomptext = self.decompheader()
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
- # Get number of subdomains
- subdomains = ', '.join(hydroutil.extract_element_from_json(data, ["Events","DomainDecomposition"]))
+ # Get the header text for the U-file
+ decomptext = self.decompheader()
- decomptext = decomptext + "numberOfSubdomains\t" + subdomains + ";\n\n"
+ # Get number of subdomains
+ subdomains = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'DomainDecomposition']
+ )
+ )
- decomptext = decomptext + "method\tscotch;\n\n"
+ decomptext = decomptext + 'numberOfSubdomains\t' + subdomains + ';\n\n'
- return decomptext
+ decomptext = decomptext + 'method\tscotch;\n\n' # noqa: PLR6104
- #############################################################
- def decompheader(self):
- '''
- Creates the text for the header
+ return decomptext # noqa: RET504
- Variable
- -----------
- header: Header for the decomposeparDict-file
- '''
+ #############################################################
+ def decompheader(self): # noqa: PLR6301
+ """Creates the text for the header
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+ Variable
+ -----------
+ header: Header for the decomposeparDict-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\tdecomposeParDict;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
-
- #############################################################
- def scripts(self,data,path):
- '''
- Create the scripts for caserun.sh
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where dakota.json file is located
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get number of subdomains
- totalprocs = ', '.join(hydroutil.extract_element_from_json(data, ["Events","DomainDecomposition"]))
-
- # Get simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Decompose for parallel, else serial
- if int(totalprocs) > 1:
- # Decompose the domain
- caseruntext = 'echo Decomposing domain...\n'
- caseruntext = caseruntext + 'decomposePar > decomposePar.log\n\n'
-
- # Start the CFD simulation
- caseruntext = caseruntext + 'echo Starting CFD simulation in parallel...\n'
- if int(simtype) == 4:
- caseruntext = caseruntext + 'ibrun -n ' + totalprocs + ' -o 0 olaDyMFlow -parallel > olaDyMFlow.log\n\n'
- else:
- caseruntext = caseruntext + 'ibrun -n ' + totalprocs + ' -o 0 olaFlow -parallel > olaFlow.log\n\n'
-
- else:
- caseruntext = 'echo Starting CFD simulation in serial...\n'
- if int(simtype) == 4:
- caseruntext = caseruntext + 'olaDyMFlow > olaDyMFlow.log\n\n'
- else:
- caseruntext = caseruntext + 'olaFlow > olaFlow.log\n\n'
-
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def scripts(self, data, path): # noqa: ARG002, PLR6301
+ """Create the scripts for caserun.sh
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where dakota.json file is located
+
+ """ # noqa: D400
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get number of subdomains
+ totalprocs = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'DomainDecomposition']
+ )
+ )
+
+ # Get simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Decompose for parallel, else serial
+ if int(totalprocs) > 1:
+ # Decompose the domain
+ caseruntext = 'echo Decomposing domain...\n'
+ caseruntext = caseruntext + 'decomposePar > decomposePar.log\n\n' # noqa: PLR6104
+
+ # Start the CFD simulation
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'echo Starting CFD simulation in parallel...\n'
+ )
+ if int(simtype) == 4: # noqa: PLR2004
+ caseruntext = (
+ caseruntext
+ + 'ibrun -n '
+ + totalprocs
+ + ' -o 0 olaDyMFlow -parallel > olaDyMFlow.log\n\n'
+ )
+ else:
+ caseruntext = (
+ caseruntext
+ + 'ibrun -n '
+ + totalprocs
+ + ' -o 0 olaFlow -parallel > olaFlow.log\n\n'
+ )
+
+ else:
+ caseruntext = 'echo Starting CFD simulation in serial...\n'
+ if int(simtype) == 4: # noqa: PLR2004
+ caseruntext = caseruntext + 'olaDyMFlow > olaDyMFlow.log\n\n' # noqa: PLR6104
+ else:
+ caseruntext = caseruntext + 'olaFlow > olaFlow.log\n\n' # noqa: PLR6104
+
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Geometry.py b/modules/createEVENT/GeoClawOpenFOAM/of7Geometry.py
index b639574ac..dc41e46c9 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Geometry.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Geometry.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,273 +32,354 @@
####################################################################
# Standard python modules
import os
-import numpy as np
-# Other custom modules
-from hydroUtils import hydroUtils
+import numpy as np
from GeoClaw import GeoClaw
from GeoClawBathy import GeoClawBathy
-from userFlume import userFlume
+
+# Other custom modules
+from hydroUtils import hydroUtils
from osuFlume import osuFlume
+from userFlume import userFlume
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Geometry():
- """
- This class includes the methods related to
- creating the geometry for openfoam7.
-
- Methods
- --------
- geomcheck: Checks if all files required for creating the geometry exists
- createSTL: Creates the STL files
- """
-
- #############################################################
- def geomcheck(self,data,path):
- '''
- Checks if all files required for creating the geometry exists
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Simtype: Multiscale with SW solutions
- if int(simtype) == 1 or int(simtype) == 2:
-
- # Get the number of bathymetry files
- numbathy = hydroutil.extract_element_from_json(data, ["Events","NumBathymetryFiles"])
- if numbathy == [None]:
- return -1
- else:
- numbathy = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumBathymetryFiles"]))
-
- # Loop to get the name of each bathymetry file
- # Check if it exists. If not, return -1
- for ii in range(int(numbathy)):
- # Get the file name
- bathyfilename = hydroutil.extract_element_from_json(data, ["Events","BathymetryFile"+str(ii)])
- if bathyfilename == [None]:
- return -1
- else:
- bathyfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","BathymetryFile"+str(ii)]))
- bathyfilepath = os.join.path(path,bathyfilename)
- if not os.path.isfile(bathyfilepath):
- return -1
-
- if int(simtype) == 1:
-
- # Get the number of solution files
- numsoln = hydroutil.extract_element_from_json(data, ["Events","NumSolutionFiles"])
- if numsoln == [None]:
- return -1
- else:
- numsoln = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumSolutionFiles"]))
-
- # Loop to get the name of each solution file
- # Check if it exists. If not, return -1
- for ii in range(int(numsoln)):
- # Get the file name
- solnfilename = hydroutil.extract_element_from_json(data, ["Events","SWSolutionFile"+str(ii)])
- if solnfilename == [None]:
- return -1
- else:
- solnfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SWSolutionFile"+str(ii)]))
- solnfilepath = os.join.path(path,solnfilename)
- if not os.path.isfile(solnfilepath):
- return -1
-
- # Check the SW-CFD interface file
- swcfdfile = hydroutil.extract_element_from_json(data, ["Events","SWCFDInteFile"])
- if swcfdfile == [None]:
- return -1
- else:
- swcfdfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SWCFDInteFile"]))
- swcfdfilepath = os.join.path(path,swcfdfile)
- if not os.path.isfile(swcfdfilepath):
- return -1
-
- # STL file
- elif int(simtype) == 3:
-
- # Entry.stl
- entrypath = os.join.path(path,'Entry.stl')
- if not os.path.isfile(entrypath):
- return -1
-
- # Exit.stl
- exitpath = os.join.path(path,'Exit.stl')
- if not os.path.isfile(exitpath):
- return -1
-
- # Top.stl
- toppath = os.join.path(path,'Top.stl')
- if not os.path.isfile(toppath):
- return -1
-
- # Bottom.stl
- bottompath = os.join.path(path,'Bottom.stl')
- if not os.path.isfile(bottompath):
- return -1
-
- # Left.stl
- leftpath = os.join.path(path,'Left.stl')
- if not os.path.isfile(leftpath):
- return -1
-
- # Right.stl
- rightpath = os.join.path(path,'Right.stl')
- if not os.path.isfile(rightpath):
- return -1
-
- # Wave flume
- elif int(simtype) == 4:
-
- # Get the flume type
- flumetype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","FlumeInfoType"]))
-
- # Using user coordinates
- if int(flumetype) == 0:
- # Get the number of segments
- numsegs = hydroutil.extract_element_from_json(data, ["Events","NumFlumeSegments"])
- if numsegs == [None]:
- return -1
- else:
- numsegs = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumFlumeSegments"]))
- if int(numsegs) < 4:
- return -1
- flumesegs = hydroutil.extract_element_from_json(data, ["Events","FlumeSegments"])
- if flumesegs == [None]:
- return -1
- # Standard flume
- elif int(flumetype) == 1:
- return 0
-
- return 0
-
- #############################################################
- def createOFSTL(self,data,path):
- '''
- Creates the STL files
-
- Arguments
- -----------
- data: all the JSON data
- path: Path to where the dakota.json exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Bathymetry + SW solutions
- if int(simtype) == 1:
- finalgeom = GeoClaw()
- # Create geometry (i.e. STL files) and extreme file
- ecode = finalgeom.creategeom(data,path)
- if ecode < 0:
- return -1
-
- # Bathymetry only
- elif int(simtype) == 2:
- print('Bathy')
- finalgeom = GeoClawBathy()
- # Create geometry (i.e. STL files) and extreme file
- ecode = finalgeom.creategeom(data,path)
- if ecode < 0:
- return -1
-
- elif int(simtype) == 3:
- return 0
-
- elif int(simtype) == 4:
-
- # Get the flume type
- flumetype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","FlumeInfoType"]))
-
- # Using user coordinates
- if int(flumetype) == 0:
- finalgeom = userFlume()
- # Create geometry (i.e. STL files) and extreme file
- ecode = finalgeom.creategeom(data,path)
- if ecode < 0:
- return -1
-
- # Standard flume
- elif int(flumetype) == 1:
- finalgeom = osuFlume()
- # Create geometry (i.e. STL files) and extreme file
- ecode = finalgeom.creategeom(data,path)
- if ecode < 0:
- return -1
-
- return 0
-
- #############################################################
- def scripts(self,data):
- '''
- Add to caserun.sh
-
- Arguments
- -----------
- NONE
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the mesher
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
-
- # Combine STL files for Hydro mesh or using mesh dict
- if int(mesher[0]) == 0 or int(mesher[0]) == 2:
- # Get building flag from temp-geometry file
- geofile = 'temp_geometry.txt'
- data_geoext = np.genfromtxt(geofile, dtype=(float))
- flag = int(data_geoext[6])
-
- # If translate file exists, use it
- if os.path.exists('translate.sh'):
- caseruntext = 'echo Translating building STL files...\n'
- caseruntext = caseruntext + 'chmod +x translate.sh\n'
- caseruntext = caseruntext + './translate.sh\n\n'
- caseruntext = caseruntext + 'echo Combining STL files for usage...\n'
- else:
- caseruntext = 'echo Combining STL files for usage...\n'
-
- # Join all paths
- entryf = os.path.join('constant','triSurface' , 'Entry.stl')
- exitf = os.path.join('constant' , 'triSurface' , 'Exit.stl')
- topf = os.path.join('constant' , 'triSurface' , 'Top.stl')
- bottomf = os.path.join('constant' , 'triSurface' , 'Bottom.stl')
- leftf = os.path.join('constant' , 'triSurface' , 'Left.stl')
- rightf = os.path.join('constant' , 'triSurface' , 'Right.stl')
- buildingf = os.path.join('constant' , 'triSurface' , 'Building.stl')
- otherbuildingf = os.path.join('constant' , 'triSurface' , 'OtherBuilding.stl')
- all01 = 'cat '+ entryf + ' ' + exitf + ' ' + topf + ' ' + bottomf + ' ' + leftf + ' ' + rightf
- full = os.path.join('constant' , 'triSurface' , 'Full.stl')
-
- # For different building cases
- if flag == 0:
- caseruntext = caseruntext + all01 + ' > ' + full + '\n\n'
- elif flag == 1:
- caseruntext = caseruntext + all01 + ' ' + buildingf + ' > ' + full + '\n\n'
- elif flag == 2:
- caseruntext = caseruntext + all01 + ' ' + buildingf + ' ' + otherbuildingf + ' > ' + full + '\n\n'
- elif flag == 3:
- caseruntext = caseruntext + all01 + ' ' + otherbuildingf + ' > ' + full + '\n\n'
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
\ No newline at end of file
+class of7Geometry:
+ """This class includes the methods related to
+ creating the geometry for openfoam7.
+
+ Methods
+ -------
+ geomcheck: Checks if all files required for creating the geometry exists
+ createSTL: Creates the STL files
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def geomcheck(self, data, path): # noqa: C901, PLR0911, PLR6301
+ """Checks if all files required for creating the geometry exists
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Simtype: Multiscale with SW solutions
+ if int(simtype) == 1 or int(simtype) == 2: # noqa: PLR2004
+ # Get the number of bathymetry files
+ numbathy = hydroutil.extract_element_from_json(
+ data, ['Events', 'NumBathymetryFiles']
+ )
+ if numbathy == [None]:
+ return -1
+ else: # noqa: RET505
+ numbathy = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'NumBathymetryFiles']
+ )
+ )
+
+ # Loop to get the name of each bathymetry file
+ # Check if it exists. If not, return -1
+ for ii in range(int(numbathy)):
+ # Get the file name
+ bathyfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'BathymetryFile' + str(ii)]
+ )
+ if bathyfilename == [None]:
+ return -1
+ else: # noqa: RET505
+ bathyfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'BathymetryFile' + str(ii)]
+ )
+ )
+ bathyfilepath = os.join.path(path, bathyfilename)
+ if not os.path.isfile(bathyfilepath): # noqa: PTH113
+ return -1
+
+ if int(simtype) == 1:
+ # Get the number of solution files
+ numsoln = hydroutil.extract_element_from_json(
+ data, ['Events', 'NumSolutionFiles']
+ )
+ if numsoln == [None]:
+ return -1
+ else: # noqa: RET505
+ numsoln = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'NumSolutionFiles']
+ )
+ )
+
+ # Loop to get the name of each solution file
+ # Check if it exists. If not, return -1
+ for ii in range(int(numsoln)):
+ # Get the file name
+ solnfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'SWSolutionFile' + str(ii)]
+ )
+ if solnfilename == [None]:
+ return -1
+ else: # noqa: RET505
+ solnfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'SWSolutionFile' + str(ii)]
+ )
+ )
+ solnfilepath = os.join.path(path, solnfilename)
+ if not os.path.isfile(solnfilepath): # noqa: PTH113
+ return -1
+
+ # Check the SW-CFD interface file
+ swcfdfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'SWCFDInteFile']
+ )
+ if swcfdfile == [None]:
+ return -1
+ else: # noqa: RET505
+ swcfdfile = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'SWCFDInteFile']
+ )
+ )
+ swcfdfilepath = os.join.path(path, swcfdfile)
+ if not os.path.isfile(swcfdfilepath): # noqa: PTH113
+ return -1
+
+ # STL file
+ elif int(simtype) == 3: # noqa: PLR2004
+ # Entry.stl
+ entrypath = os.join.path(path, 'Entry.stl')
+ if not os.path.isfile(entrypath): # noqa: PTH113
+ return -1
+
+ # Exit.stl
+ exitpath = os.join.path(path, 'Exit.stl')
+ if not os.path.isfile(exitpath): # noqa: PTH113
+ return -1
+
+ # Top.stl
+ toppath = os.join.path(path, 'Top.stl')
+ if not os.path.isfile(toppath): # noqa: PTH113
+ return -1
+
+ # Bottom.stl
+ bottompath = os.join.path(path, 'Bottom.stl')
+ if not os.path.isfile(bottompath): # noqa: PTH113
+ return -1
+
+ # Left.stl
+ leftpath = os.join.path(path, 'Left.stl')
+ if not os.path.isfile(leftpath): # noqa: PTH113
+ return -1
+
+ # Right.stl
+ rightpath = os.join.path(path, 'Right.stl')
+ if not os.path.isfile(rightpath): # noqa: PTH113
+ return -1
+
+ # Wave flume
+ elif int(simtype) == 4: # noqa: PLR2004
+ # Get the flume type
+ flumetype = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'FlumeInfoType']
+ )
+ )
+
+ # Using user coordinates
+ if int(flumetype) == 0:
+ # Get the number of segments
+ numsegs = hydroutil.extract_element_from_json(
+ data, ['Events', 'NumFlumeSegments']
+ )
+ if numsegs == [None]:
+ return -1
+ else: # noqa: RET505
+ numsegs = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'NumFlumeSegments']
+ )
+ )
+ if int(numsegs) < 4: # noqa: PLR2004
+ return -1
+ flumesegs = hydroutil.extract_element_from_json(
+ data, ['Events', 'FlumeSegments']
+ )
+ if flumesegs == [None]:
+ return -1
+ # Standard flume
+ elif int(flumetype) == 1:
+ return 0
+
+ return 0
+
+ #############################################################
+ def createOFSTL(self, data, path): # noqa: C901, N802, PLR6301
+ """Creates the STL files
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path to where the dakota.json exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Bathymetry + SW solutions
+ if int(simtype) == 1:
+ finalgeom = GeoClaw()
+ # Create geometry (i.e. STL files) and extreme file
+ ecode = finalgeom.creategeom(data, path)
+ if ecode < 0:
+ return -1
+
+ # Bathymetry only
+ elif int(simtype) == 2: # noqa: PLR2004
+ print('Bathy') # noqa: T201
+ finalgeom = GeoClawBathy()
+ # Create geometry (i.e. STL files) and extreme file
+ ecode = finalgeom.creategeom(data, path)
+ if ecode < 0:
+ return -1
+
+ elif int(simtype) == 3: # noqa: PLR2004
+ return 0
+
+ elif int(simtype) == 4: # noqa: PLR2004
+ # Get the flume type
+ flumetype = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'FlumeInfoType']
+ )
+ )
+
+ # Using user coordinates
+ if int(flumetype) == 0:
+ finalgeom = userFlume()
+ # Create geometry (i.e. STL files) and extreme file
+ ecode = finalgeom.creategeom(data, path)
+ if ecode < 0:
+ return -1
+
+ # Standard flume
+ elif int(flumetype) == 1:
+ finalgeom = osuFlume()
+ # Create geometry (i.e. STL files) and extreme file
+ ecode = finalgeom.creategeom(data, path)
+ if ecode < 0:
+ return -1
+
+ return 0
+
+ #############################################################
+ def scripts(self, data): # noqa: PLR6301
+ """Add to caserun.sh
+
+ Arguments:
+ ---------
+ NONE
+
+ """ # noqa: D400
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the mesher
+ mesher = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+
+ # Combine STL files for Hydro mesh or using mesh dict
+ if int(mesher[0]) == 0 or int(mesher[0]) == 2: # noqa: PLR2004
+ # Get building flag from temp-geometry file
+ geofile = 'temp_geometry.txt'
+ data_geoext = np.genfromtxt(geofile, dtype=(float))
+ flag = int(data_geoext[6])
+
+ # If translate file exists, use it
+ if os.path.exists('translate.sh'): # noqa: PTH110
+ caseruntext = 'echo Translating building STL files...\n'
+ caseruntext = caseruntext + 'chmod +x translate.sh\n' # noqa: PLR6104
+ caseruntext = caseruntext + './translate.sh\n\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'echo Combining STL files for usage...\n' # noqa: PLR6104
+ else:
+ caseruntext = 'echo Combining STL files for usage...\n'
+
+ # Join all paths
+ entryf = os.path.join('constant', 'triSurface', 'Entry.stl') # noqa: PTH118
+ exitf = os.path.join('constant', 'triSurface', 'Exit.stl') # noqa: PTH118
+ topf = os.path.join('constant', 'triSurface', 'Top.stl') # noqa: PTH118
+ bottomf = os.path.join('constant', 'triSurface', 'Bottom.stl') # noqa: PTH118
+ leftf = os.path.join('constant', 'triSurface', 'Left.stl') # noqa: PTH118
+ rightf = os.path.join('constant', 'triSurface', 'Right.stl') # noqa: PTH118
+ buildingf = os.path.join('constant', 'triSurface', 'Building.stl') # noqa: PTH118
+ otherbuildingf = os.path.join( # noqa: PTH118
+ 'constant', 'triSurface', 'OtherBuilding.stl'
+ )
+ all01 = (
+ 'cat '
+ + entryf
+ + ' '
+ + exitf
+ + ' '
+ + topf
+ + ' '
+ + bottomf
+ + ' '
+ + leftf
+ + ' '
+ + rightf
+ )
+ full = os.path.join('constant', 'triSurface', 'Full.stl') # noqa: PTH118
+
+ # For different building cases
+ if flag == 0:
+ caseruntext = caseruntext + all01 + ' > ' + full + '\n\n'
+ elif flag == 1:
+ caseruntext = (
+ caseruntext + all01 + ' ' + buildingf + ' > ' + full + '\n\n'
+ )
+ elif flag == 2: # noqa: PLR2004
+ caseruntext = (
+ caseruntext
+ + all01
+ + ' '
+ + buildingf
+ + ' '
+ + otherbuildingf
+ + ' > '
+ + full
+ + '\n\n'
+ )
+ elif flag == 3: # noqa: PLR2004
+ caseruntext = (
+ caseruntext
+ + all01
+ + ' '
+ + otherbuildingf
+ + ' > '
+ + full
+ + '\n\n'
+ )
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Initial.py b/modules/createEVENT/GeoClawOpenFOAM/of7Initial.py
index fc8b25676..da6e38042 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Initial.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Initial.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -37,185 +36,257 @@
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Initial():
- """
- This class includes the methods related to
- initial conditions for openfoam7.
-
- Methods
- --------
- alphatext: Get all the text for the setFieldsDict
- """
-
- #############################################################
- def alphatext(self,data,fipath):
- '''
- Creates the necessary files for alpha - setFields for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Get the header text for the U-file
- alphatext = self.alphaheader()
-
- # Read the values
- if int(simtype) == 1:
- fname = "SWAlpha.txt"
- swalphafile = os.path.join(fipath,fname)
- with open(swalphafile) as f:
- gloalpha, localalpha, x1,y1,z1,x2,y2,z2 = [float(x) for x in next(f).split(',')]
-
- alphatext = alphatext + 'defaultFieldValues\n(\n\tvolScalarFieldValue\talpha.water\t' + str(gloalpha) + '\n);\n\n'
-
- alphatext = alphatext + 'regions\n(\n'
- alphatext = alphatext + '\tboxToCell\n\t{\n\t\t'
- alphatext = alphatext + 'box\t(' + str(x1) + '\t' + str(y1) + '\t' + str(z1) + ')\t(' + str(x2) + '\t' + str(y2) + '\t' + str(z2) +');\n\n\t\t'
- alphatext = alphatext + 'fieldValues\n\t\t(\n\t\t\tvolScalarFieldValue\talpha.water\t' + str(localalpha) + '\n\t\t);\n\t}\n\n'
-
- else:
- gloalpha = ', '.join(hydroutil.extract_element_from_json(data, ["Events","InitialAlphaGlobal"]))
-
- numregs = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumAlphaRegion"]))
-
- alphatext = alphatext + 'defaultFieldValues\n(\n\tvolScalarFieldValue\talpha.water\t' + str(gloalpha) + '\n);\n\n'
-
- alphatext = alphatext + 'regions\n(\n'
-
- # Check for each alpha region
- for ii in range(int(numregs)):
-
- # Get the region
- # We dont check if region is inside the geometry
- # Should be added later on
- region = ', '.join(hydroutil.extract_element_from_json(data, ["Events","InitialAlphaRegion"+str(ii)]))
- regsegs = region.replace(',', ' ')
- # Convert the regions to list of floats
- nums = [float(n) for n in regsegs.split()]
-
- alphatext = alphatext + '\tboxToCell\n\t{\n\t\t'
- alphatext = alphatext + 'box\t(' + str(nums[0]) + '\t' + str(nums[1]) + '\t' + str(nums[2]) + ')\t(' + str(nums[3]) + '\t' + str(nums[4]) + '\t' + str(nums[5]) +');\n\n\t\t'
- alphatext = alphatext + 'fieldValues\n\t\t(\n\t\t\tvolScalarFieldValue\talpha.water\t' + str(nums[6]) + '\n\t\t);\n\t}\n\n'
-
- alphatext = alphatext + '\n);'
-
- return alphatext
-
- #############################################################
- def alphaheader(self):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the setFields-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Initial:
+ """This class includes the methods related to
+ initial conditions for openfoam7.
+
+ Methods
+ -------
+ alphatext: Get all the text for the setFieldsDict
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def alphatext(self, data, fipath):
+ """Creates the necessary files for alpha - setFields for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Get the header text for the U-file
+ alphatext = self.alphaheader()
+
+ # Read the values
+ if int(simtype) == 1:
+ fname = 'SWAlpha.txt'
+ swalphafile = os.path.join(fipath, fname) # noqa: PTH118
+ with open(swalphafile) as f: # noqa: PLW1514, PTH123
+ gloalpha, localalpha, x1, y1, z1, x2, y2, z2 = (
+ float(x) for x in next(f).split(',')
+ )
+
+ alphatext = (
+ alphatext
+ + 'defaultFieldValues\n(\n\tvolScalarFieldValue\talpha.water\t'
+ + str(gloalpha)
+ + '\n);\n\n'
+ )
+
+ alphatext = alphatext + 'regions\n(\n' # noqa: PLR6104
+ alphatext = alphatext + '\tboxToCell\n\t{\n\t\t' # noqa: PLR6104
+ alphatext = (
+ alphatext
+ + 'box\t('
+ + str(x1)
+ + '\t'
+ + str(y1)
+ + '\t'
+ + str(z1)
+ + ')\t('
+ + str(x2)
+ + '\t'
+ + str(y2)
+ + '\t'
+ + str(z2)
+ + ');\n\n\t\t'
+ )
+ alphatext = (
+ alphatext
+ + 'fieldValues\n\t\t(\n\t\t\tvolScalarFieldValue\talpha.water\t'
+ + str(localalpha)
+ + '\n\t\t);\n\t}\n\n'
+ )
+
+ else:
+ gloalpha = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'InitialAlphaGlobal']
+ )
+ )
+
+ numregs = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'NumAlphaRegion']
+ )
+ )
+
+ alphatext = (
+ alphatext
+ + 'defaultFieldValues\n(\n\tvolScalarFieldValue\talpha.water\t'
+ + str(gloalpha)
+ + '\n);\n\n'
+ )
+
+ alphatext = alphatext + 'regions\n(\n' # noqa: PLR6104
+
+ # Check for each alpha region
+ for ii in range(int(numregs)):
+ # Get the region
+ # We dont check if region is inside the geometry
+ # Should be added later on
+ region = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'InitialAlphaRegion' + str(ii)]
+ )
+ )
+ regsegs = region.replace(',', ' ')
+ # Convert the regions to list of floats
+ nums = [float(n) for n in regsegs.split()]
+
+ alphatext = alphatext + '\tboxToCell\n\t{\n\t\t' # noqa: PLR6104
+ alphatext = (
+ alphatext
+ + 'box\t('
+ + str(nums[0])
+ + '\t'
+ + str(nums[1])
+ + '\t'
+ + str(nums[2])
+ + ')\t('
+ + str(nums[3])
+ + '\t'
+ + str(nums[4])
+ + '\t'
+ + str(nums[5])
+ + ');\n\n\t\t'
+ )
+ alphatext = (
+ alphatext
+ + 'fieldValues\n\t\t(\n\t\t\tvolScalarFieldValue\talpha.water\t'
+ + str(nums[6])
+ + '\n\t\t);\n\t}\n\n'
+ )
+
+ alphatext = alphatext + '\n);' # noqa: PLR6104
+
+ return alphatext # noqa: RET504
+
+ #############################################################
+ def alphaheader(self): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the setFields-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\tsetFieldsDict;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
-
- #############################################################
- def alphacheck(self,data,fipath):
- '''
- Checks for initial conditions for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # For SW-CFD coupling
- if simtype == 1:
-
- # Check for the file exists
- fname = "SWAlpha.txt"
- swalphafile = os.path.join(fipath,fname)
- if not os.path.exists(swalphafile):
- return -1
-
- # For all types other than the shallow water
- else:
-
- # Check global alpha value
- alphaglobal = hydroutil.extract_element_from_json(data, ["Events","InitialAlphaGlobal"])
- if alphaglobal == [None]:
- return -1
-
- # Check number of alpha region
- numreg = hydroutil.extract_element_from_json(data, ["Events","NumAlphaRegion"])
- if numreg == [None]:
- return -1
- else:
- numreg = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumAlphaRegion"]))
- if int(numreg) < 1:
- return -1
-
- # Check for each alpha region
- for ii in range(int(numreg)):
-
- # Get the region
- # We dont check if region is inside the geometry
- # Should be added later on
- region = hydroutil.extract_element_from_json(data, ["Events","InitialAlphaRegion"+str(ii)])
- if region == [None]:
- return -1
- else:
- region = ', '.join(hydroutil.extract_element_from_json(data, ["Events","InitialAlphaRegion"+str(ii)]))
- regsegs = region.replace(',', ' ')
- # Convert the regions to list of floats
- nums = [float(n) for n in regsegs.split()]
- # Check if 6 coordinates + 1 alpha number
- if len(nums) != 7:
- return -1
-
-
-
- # Return 0 if all is right
- return 0
-
- #############################################################
- def scripts(self,data,path):
- '''
- Create the scripts for caserun.sh
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where dakota.json file is located
- '''
-
- # Setfields
- caseruntext = 'echo Setting fields...\n'
- caseruntext = caseruntext + 'setFields > setFields.log\n\n'
-
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def alphacheck(self, data, fipath): # noqa: PLR6301
+ """Checks for initial conditions for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # For SW-CFD coupling
+ if simtype == 1:
+ # Check for the file exists
+ fname = 'SWAlpha.txt'
+ swalphafile = os.path.join(fipath, fname) # noqa: PTH118
+ if not os.path.exists(swalphafile): # noqa: PTH110
+ return -1
+
+ # For all types other than the shallow water
+ else:
+ # Check global alpha value
+ alphaglobal = hydroutil.extract_element_from_json(
+ data, ['Events', 'InitialAlphaGlobal']
+ )
+ if alphaglobal == [None]:
+ return -1
+
+ # Check number of alpha region
+ numreg = hydroutil.extract_element_from_json(
+ data, ['Events', 'NumAlphaRegion']
+ )
+ if numreg == [None]:
+ return -1
+ else: # noqa: RET505
+ numreg = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'NumAlphaRegion']
+ )
+ )
+ if int(numreg) < 1:
+ return -1
+
+ # Check for each alpha region
+ for ii in range(int(numreg)):
+ # Get the region
+ # We dont check if region is inside the geometry
+ # Should be added later on
+ region = hydroutil.extract_element_from_json(
+ data, ['Events', 'InitialAlphaRegion' + str(ii)]
+ )
+ if region == [None]:
+ return -1
+ else: # noqa: RET505
+ region = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'InitialAlphaRegion' + str(ii)]
+ )
+ )
+ regsegs = region.replace(',', ' ')
+ # Convert the regions to list of floats
+ nums = [float(n) for n in regsegs.split()]
+ # Check if 6 coordinates + 1 alpha number
+ if len(nums) != 7: # noqa: PLR2004
+ return -1
+
+ # Return 0 if all is right
+ return 0
+
+ #############################################################
+ def scripts(self, data, path): # noqa: ARG002, PLR6301
+ """Create the scripts for caserun.sh
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where dakota.json file is located
+
+ """ # noqa: D400
+ # Setfields
+ caseruntext = 'echo Setting fields...\n'
+ caseruntext = caseruntext + 'setFields > setFields.log\n\n' # noqa: PLR6104
+
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Materials.py b/modules/createEVENT/GeoClawOpenFOAM/of7Materials.py
index a22e63f66..ea7a5190f 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Materials.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Materials.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,147 +31,173 @@
# Import all necessary modules
####################################################################
# Standard python modules
-import os
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Materials():
- """
- This class includes the methods related to
- material properties for openfoam7.
-
- Methods
- --------
- mattext: Get all the text for the transportProperties
- """
-
- #############################################################
- def mattext(self,data):
- '''
- Creates the necessary files for materials for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- mattext = self.matheader()
-
- # Start by stating phases
- mattext = mattext + "phases (water air);\n\n"
-
- # Water phase
- # Viscosity
- nuwater = ', '.join(hydroutil.extract_element_from_json(data, ["Events","WaterViscosity"]))
- # Exponent
- nuwaterexp = ', '.join(hydroutil.extract_element_from_json(data, ["Events","WaterViscosityExp"]))
- # Density
- rhowater = ', '.join(hydroutil.extract_element_from_json(data, ["Events","WaterDensity"]))
-
- mattext = mattext + "water\n{\n"
- mattext = mattext + "\ttransportModel\tNewtonian;\n"
- mattext = mattext + "\tnu\t[0 2 -1 0 0 0 0]\t" + nuwater + "e" + nuwaterexp + ";\n"
- mattext = mattext + "\trho\t[1 -3 0 0 0 0 0]\t" + rhowater + ";\n"
- mattext = mattext + "}\n\n"
-
- # Air properties
- # Viscosity
- nuair = ', '.join(hydroutil.extract_element_from_json(data, ["Events","AirViscosity"]))
- # Exponent
- nuairexp = ', '.join(hydroutil.extract_element_from_json(data, ["Events","AirViscosityExp"]))
- # Density
- rhoair = ', '.join(hydroutil.extract_element_from_json(data, ["Events","AirDensity"]))
-
- mattext = mattext + "air\n{\n"
- mattext = mattext + "\ttransportModel\tNewtonian;\n"
- mattext = mattext + "\tnu\t[0 2 -1 0 0 0 0]\t" + nuair + "e" + nuairexp + ";\n"
- mattext = mattext + "\trho\t[1 -3 0 0 0 0 0]\t" + rhoair + ";\n"
- mattext = mattext + "}\n\n"
-
- # Surface tension between water and air
- sigma = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SurfaceTension"]))
-
- mattext = mattext + "sigma\t[1 0 -2 0 0 0 0]\t"+sigma +";\n"
-
- return mattext
-
- #############################################################
- def matheader(self):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the transportProp-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Materials:
+ """This class includes the methods related to
+ material properties for openfoam7.
+
+ Methods
+ -------
+ mattext: Get all the text for the transportProperties
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def mattext(self, data):
+ """Creates the necessary files for materials for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ mattext = self.matheader()
+
+ # Start by stating phases
+ mattext = mattext + 'phases (water air);\n\n' # noqa: PLR6104
+
+ # Water phase
+ # Viscosity
+ nuwater = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'WaterViscosity'])
+ )
+ # Exponent
+ nuwaterexp = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'WaterViscosityExp']
+ )
+ )
+ # Density
+ rhowater = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'WaterDensity'])
+ )
+
+ mattext = mattext + 'water\n{\n' # noqa: PLR6104
+ mattext = mattext + '\ttransportModel\tNewtonian;\n' # noqa: PLR6104
+ mattext = (
+ mattext + '\tnu\t[0 2 -1 0 0 0 0]\t' + nuwater + 'e' + nuwaterexp + ';\n'
+ )
+ mattext = mattext + '\trho\t[1 -3 0 0 0 0 0]\t' + rhowater + ';\n'
+ mattext = mattext + '}\n\n' # noqa: PLR6104
+
+ # Air properties
+ # Viscosity
+ nuair = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'AirViscosity'])
+ )
+ # Exponent
+ nuairexp = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'AirViscosityExp'])
+ )
+ # Density
+ rhoair = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'AirDensity'])
+ )
+
+ mattext = mattext + 'air\n{\n' # noqa: PLR6104
+ mattext = mattext + '\ttransportModel\tNewtonian;\n' # noqa: PLR6104
+ mattext = (
+ mattext + '\tnu\t[0 2 -1 0 0 0 0]\t' + nuair + 'e' + nuairexp + ';\n'
+ )
+ mattext = mattext + '\trho\t[1 -3 0 0 0 0 0]\t' + rhoair + ';\n'
+ mattext = mattext + '}\n\n' # noqa: PLR6104
+
+ # Surface tension between water and air
+ sigma = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SurfaceTension'])
+ )
+
+ mattext = mattext + 'sigma\t[1 0 -2 0 0 0 0]\t' + sigma + ';\n'
+
+ return mattext # noqa: RET504
+
+ #############################################################
+ def matheader(self): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the transportProp-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"constant";\n\tobject\ttransportProperties;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
-
- #############################################################
- def matcheck(self,data):
- '''
- Checks for material properties for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Check water properties
- # Viscosity
- nuwater = hydroutil.extract_element_from_json(data, ["Events","WaterViscosity"])
- if nuwater == [None]:
- return -1
- # Exponent
- nuwaterexp = hydroutil.extract_element_from_json(data, ["Events","WaterViscosityExp"])
- if nuwaterexp == [None]:
- return -1
- # Density
- rhowater = hydroutil.extract_element_from_json(data, ["Events","WaterDensity"])
- if rhowater == [None]:
- return -1
-
- # Check air properties
- # Viscosity
- nuair = hydroutil.extract_element_from_json(data, ["Events","AirViscosity"])
- if nuair == [None]:
- return -1
- # Exponent
- nuairexp = hydroutil.extract_element_from_json(data, ["Events","AirViscosityExp"])
- if nuairexp == [None]:
- return -1
- # Density
- rhoair = hydroutil.extract_element_from_json(data, ["Events","AirDensity"])
- if rhoair == [None]:
- return -1
-
- # Surface tension between water and air
- sigma = hydroutil.extract_element_from_json(data, ["Events","SurfaceTension"])
- if sigma == [None]:
- return -1
-
- # Return 0 if all is right
- return 0
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def matcheck(self, data): # noqa: PLR6301
+ """Checks for material properties for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Check water properties
+ # Viscosity
+ nuwater = hydroutil.extract_element_from_json(
+ data, ['Events', 'WaterViscosity']
+ )
+ if nuwater == [None]:
+ return -1
+ # Exponent
+ nuwaterexp = hydroutil.extract_element_from_json(
+ data, ['Events', 'WaterViscosityExp']
+ )
+ if nuwaterexp == [None]:
+ return -1
+ # Density
+ rhowater = hydroutil.extract_element_from_json(
+ data, ['Events', 'WaterDensity']
+ )
+ if rhowater == [None]:
+ return -1
+
+ # Check air properties
+ # Viscosity
+ nuair = hydroutil.extract_element_from_json(data, ['Events', 'AirViscosity'])
+ if nuair == [None]:
+ return -1
+ # Exponent
+ nuairexp = hydroutil.extract_element_from_json(
+ data, ['Events', 'AirViscosityExp']
+ )
+ if nuairexp == [None]:
+ return -1
+ # Density
+ rhoair = hydroutil.extract_element_from_json(data, ['Events', 'AirDensity'])
+ if rhoair == [None]:
+ return -1
+
+ # Surface tension between water and air
+ sigma = hydroutil.extract_element_from_json(
+ data, ['Events', 'SurfaceTension']
+ )
+ if sigma == [None]:
+ return -1
+
+ # Return 0 if all is right
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Meshing.py b/modules/createEVENT/GeoClawOpenFOAM/of7Meshing.py
index 7e4f3bc24..8f852de88 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Meshing.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Meshing.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,452 +31,609 @@
# Import all necessary modules
####################################################################
# Standard python modules
+import math
import os
+
import numpy as np
-import math
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Meshing():
- """
- This class includes the methods related to
- meshing for openfoam7.
-
- Methods
- --------
- meshcheck: Check all the meshing
- """
-
- #############################################################
- def meshcheck(self,data,fipath):
- '''
- Checks for material properties for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get mesher type
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
-
- # If hydro mesher - nothing to check
- if int(mesher[0]) == 0:
- return 0
-
- # Other mesh softwares
- elif int(mesher[0]) == 1:
- meshfile = hydroutil.extract_element_from_json(data, ["Events","MeshFile"])
- if meshfile == [None]:
- return -1
- else:
- meshfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshFile"]))
- meshfilepath = os.path.join(fipath,meshfile)
- if not os.path.isfile(meshfilepath):
- return -1
-
- # Mesh dictionaries
- elif int(mesher[0]) == 2:
- # Get path of bm and shm
- bmfile = os.path.join(fipath,'blockMeshDict')
- shmfile = os.path.join(fipath,'snappyHexMeshDict')
-
- # Check if both blockmeshdict or SHM do not exist
- if (not os.path.isfile(bmfile)) and (not os.path.isfile(shmfile)):
- return -1
-
- # Return 0 if all is right
- return 0
-
- #############################################################
- def meshheader(self,fileobjec):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the solver-files
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Meshing:
+ """This class includes the methods related to
+ meshing for openfoam7.
+
+ Methods
+ -------
+ meshcheck: Check all the meshing
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def meshcheck(self, data, fipath): # noqa: PLR6301
+ """Checks for material properties for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get mesher type
+ mesher = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+
+ # If hydro mesher - nothing to check
+ if int(mesher[0]) == 0:
+ return 0
+
+ # Other mesh software
+ elif int(mesher[0]) == 1: # noqa: RET505
+ meshfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'MeshFile']
+ )
+ if meshfile == [None]:
+ return -1
+ else: # noqa: RET505
+ meshfile = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshFile'])
+ )
+ meshfilepath = os.path.join(fipath, meshfile) # noqa: PTH118
+ if not os.path.isfile(meshfilepath): # noqa: PTH113
+ return -1
+
+ # Mesh dictionaries
+ elif int(mesher[0]) == 2: # noqa: PLR2004
+ # Get path of bm and shm
+ bmfile = os.path.join(fipath, 'blockMeshDict') # noqa: PTH118
+ shmfile = os.path.join(fipath, 'snappyHexMeshDict') # noqa: PTH118
+
+ # Check if both blockmeshdict or SHM do not exist
+ if (not os.path.isfile(bmfile)) and (not os.path.isfile(shmfile)): # noqa: PTH113
+ return -1
+
+ # Return 0 if all is right
+ return 0
+
+ #############################################################
+ def meshheader(self, fileobjec): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the solver-files
+ """ # noqa: D400, D401
+ header = (
+ """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
-{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\t"""+fileobjec+""";\n}
+{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\t""" # noqa: W291
+ + fileobjec
+ + """;\n}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for meshing file
- return header
-
- #############################################################
- def bmeshtext(self,data):
- '''
- Creates the necessary files for blockMeshDict for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Read the geometry data file
- data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
-
- # Create a utilities object
- hydroutil = hydroUtils()
- meshsize = ''.join(hydroutil.extract_element_from_json(data, ["Events","MeshSize"]))
-
- # Get the mesh sizes
- nx = 100*int(meshsize)
- if( abs(data_geoext[1] - data_geoext[0]) > 0.000001):
- ny = math.ceil(5*nx*((data_geoext[3]-data_geoext[2])/(data_geoext[1]-data_geoext[0])))
- nz = math.ceil(5*nx*((data_geoext[5]-data_geoext[4])/(data_geoext[1]-data_geoext[0])))
-
- # Get the header text for the blockMeshDict
- bmeshtext = self.meshheader('blockMeshDict')
-
- # Convert units
- bmeshtext = bmeshtext + 'convertToMeters\t1;\n\n'
-
- # Add vertices
- bmeshtext = bmeshtext + 'vertices\n(\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[0])+'\t'+str(data_geoext[2])+'\t'+ str(data_geoext[4])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[1])+'\t'+str(data_geoext[2])+'\t'+ str(data_geoext[4])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[1])+'\t'+str(data_geoext[3])+'\t'+ str(data_geoext[4])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[0])+'\t'+str(data_geoext[3])+'\t'+ str(data_geoext[4])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[0])+'\t'+str(data_geoext[2])+'\t'+ str(data_geoext[5])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[1])+'\t'+str(data_geoext[2])+'\t'+ str(data_geoext[5])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[1])+'\t'+str(data_geoext[3])+'\t'+ str(data_geoext[5])+')\n\t'
- bmeshtext = bmeshtext+'('+str(data_geoext[0])+'\t'+str(data_geoext[3])+'\t'+ str(data_geoext[5])+')\n);\n\n'
-
- # Add blocks
- bmeshtext = bmeshtext + 'blocks\n(\n\t'
- bmeshtext = bmeshtext + 'hex (0 1 2 3 4 5 6 7) (' + str(nx) + '\t' + str(ny) + '\t' + str(nz) + ') simpleGrading (1 1 1)\n);\n\n'
-
- # Add edges
- bmeshtext = bmeshtext + 'edges\n(\n);\n\n'
-
- # Add patches
- bmeshtext = bmeshtext + 'patches\n(\n\t'
- bmeshtext = bmeshtext + 'patch maxY\n\t(\n\t\t(3 7 6 2)\n\t)\n\t'
- bmeshtext = bmeshtext + 'patch minX\n\t(\n\t\t(0 4 7 3)\n\t)\n\t'
- bmeshtext = bmeshtext + 'patch maxX\n\t(\n\t\t(2 6 5 1)\n\t)\n\t'
- bmeshtext = bmeshtext + 'patch minY\n\t(\n\t\t(1 5 4 0)\n\t)\n\t'
- bmeshtext = bmeshtext + 'patch minZ\n\t(\n\t\t(0 3 2 1)\n\t)\n\t'
- bmeshtext = bmeshtext + 'patch maxZ\n\t(\n\t\t(4 5 6 7)\n\t)\n'
- bmeshtext = bmeshtext + ');\n\n'
-
- # Add merge patch pairs
- bmeshtext = bmeshtext + 'mergePatchPairs\n(\n);\n'
-
- return bmeshtext
-
- #############################################################
- def sfetext(self):
- '''
- Creates the necessary files for new controldict for post-processing for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Read the geometry data file
- data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
-
- # Get the header text for the blockMeshDict
- sfetext = self.meshheader('surfaceFeatureExtractDict')
-
- # Rest of text
- stlinfo = '{\n\textractionMethod\textractFromSurface;\n'
- stlinfo = stlinfo + '\textractFromSurfaceCoeffs\n'
- stlinfo = stlinfo + '\t{includedAngle\t150;}\n'
- stlinfo = stlinfo + '\twriteObj\tyes;\n}'
- sfetext = sfetext + 'Entry.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'Exit.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'Top.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'Bottom.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'Left.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'Right.stl\n' + stlinfo + '\n\n'
- if int(data_geoext[6]) == 1:
- sfetext = sfetext + 'Building.stl\n' + stlinfo + '\n\n'
- elif int(data_geoext[6]) == 2:
- sfetext = sfetext + 'Building.stl\n' + stlinfo + '\n\n'
- sfetext = sfetext + 'OtherBuilding.stl\n' + stlinfo + '\n\n'
- elif int(data_geoext[6]) == 3:
- sfetext = sfetext + 'OtherBuilding.stl\n' + stlinfo + '\n\n'
-
- return sfetext
-
- #############################################################
- def shmtext(self,data):
- '''
- Creates the necessary files for new controldict for post-processing for openfoam7
-
- Arguments
- -----------
- None
- '''
-
- # Read the geometry data file
- data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
-
- # Create a utilities object
- hydroutil = hydroUtils()
- meshsize = ''.join(hydroutil.extract_element_from_json(data, ["Events","MeshSize"]))
-
- # Get the header text for the blockMeshDict
- shmtext = self.meshheader('snappyHexMeshDict')
-
- # Rest of text
- shmtext = shmtext + 'castellatedMesh\ttrue;\n\n'
- shmtext = shmtext + 'snap\ttrue;\n\n'
- shmtext = shmtext + 'addLayers\tfalse;\n\n'
-
- # Geometry. Definition of all surfaces.
- shmtext = shmtext + 'geometry\n{\n\t'
- shmtext = shmtext + 'Entry.stl {type triSurfaceMesh; name Entry;}\n\t'
- shmtext = shmtext + 'Exit.stl {type triSurfaceMesh; name Exit;}\n\t'
- shmtext = shmtext + 'Top.stl {type triSurfaceMesh; name Top;}\n\t'
- shmtext = shmtext + 'Bottom.stl {type triSurfaceMesh; name Bottom;}\n\t'
- shmtext = shmtext + 'Left.stl {type triSurfaceMesh; name Left;}\n\t'
- shmtext = shmtext + 'Right.stl {type triSurfaceMesh; name Right;}\n'
- if int(data_geoext[6]) == 1:
- shmtext = shmtext + '\tBuilding.stl {type triSurfaceMesh; name Building;}\n'
- elif int(data_geoext[6]) == 2:
- shmtext = shmtext + '\tBuilding.stl {type triSurfaceMesh; name Building;}\n'
- shmtext = shmtext + '\tOtherBuilding.stl {type triSurfaceMesh; name OtherBuilding;}\n'
- elif int(data_geoext[6]) == 3:
- shmtext = shmtext + '\tOtherBuilding.stl {type triSurfaceMesh; name OtherBuilding;}\n'
- shmtext = shmtext + '\tFull.stl {type triSurfaceMesh; name Full;}\n'
- shmtext = shmtext + '};\n\n'
-
- # Castellated mesh generation
- maxLocalCells = int(meshsize)*2000000
- maxGlobalCells = int(meshsize)*10000000
- shmtext = shmtext + 'castellatedMeshControls\n{\n\t'
- shmtext = shmtext + 'maxLocalCells\t' + str(maxLocalCells) + ';\n\t'
- shmtext = shmtext + 'maxGlobalCells\t' + str(maxGlobalCells) + ';\n\t'
- shmtext = shmtext + 'minRefinementCells\t10;\n\t'
- shmtext = shmtext + 'maxLoadUnbalance\t0.1;\n\t'
- shmtext = shmtext + 'nCellsBetweenLevels\t1;\n\n'
-
- # Explicit feature edge refinement
- shmtext = shmtext + '\tfeatures\n\t(\n\t\t'
- shmtext = shmtext + '{file "Entry.eMesh"; level 3;}\n\t\t'
- shmtext = shmtext + '{file "Exit.eMesh"; level 3;}\n\t\t'
- shmtext = shmtext + '{file "Top.eMesh"; level 3;}\n\t\t'
- shmtext = shmtext + '{file "Bottom.eMesh"; level 3;}\n\t\t'
- shmtext = shmtext + '{file "Left.eMesh"; level 3;}\n\t\t'
- shmtext = shmtext + '{file "Right.eMesh"; level 3;}\n'
- if int(data_geoext[6]) == 1:
- shmtext = shmtext + '\t\t{file "Building.eMesh"; level 3;}\n'
- elif int(data_geoext[6]) == 2:
- shmtext = shmtext + '\t\t{file "Building.eMesh"; level 3;}\n'
- shmtext = shmtext + '\t\t{file "OtherBuilding.eMesh"; level 3;}\n'
- elif int(data_geoext[6]) == 3:
- shmtext = shmtext + '\t\t{file "OtherBuilding.eMesh"; level 3;}\n'
- shmtext = shmtext + '\t);\n\n'
-
- # Surface based refinement
- shmtext = shmtext + '\trefinementSurfaces\n\t{\n\t\t'
- shmtext = shmtext + 'Entry {level (0 0);}\n\t\t'
- shmtext = shmtext + 'Exit {level (0 0);}\n\t\t'
- shmtext = shmtext + 'Top {level (0 0);}\n\t\t'
- shmtext = shmtext + 'Bottom {level (2 2);}\n\t\t'
- shmtext = shmtext + 'Left {level (2 2);}\n\t\t'
- shmtext = shmtext + 'Right {level (2 2);}\n'
- if int(data_geoext[6]) == 1:
- shmtext = shmtext + '\t\tBuilding {level (2 2);}\n'
- elif int(data_geoext[6]) == 2:
- shmtext = shmtext + '\t\tBuilding {level (2 2);}\n'
- shmtext = shmtext + '\t\tOtherBuilding {level (2 2);}\n'
- elif int(data_geoext[6]) == 3:
- shmtext = shmtext + '\t\tOtherBuilding {level (2 2);}\n'
- shmtext = shmtext + '\t};\n\n'
-
- # Resolve sharp angles
- shmtext = shmtext + '\tresolveFeatureAngle 80;\n\n'
-
- # Regional refinement
- # This needs to be added and corrected
- shmtext = shmtext + '\trefinementRegions\n\t{\n\t\t//Nothing here for now\n\t}\n\n'
-
- # Get the point inside the body
- px = 0.5*(data_geoext[1]+data_geoext[0])
- py = 0.5*(data_geoext[3]+data_geoext[2])
- pz = 0.5*(data_geoext[5]+data_geoext[4])
-
- # Mesh selection
- shmtext = shmtext + '\tlocationInMesh (' + str(px) + '\t' + str(py) + '\t' + str(pz) + ');\n\n'
- shmtext = shmtext + '\tallowFreeStandingZoneFaces\tfalse;\n'
- shmtext = shmtext + '}\n\n'
-
- # Snaping settings
- shmtext = shmtext + 'snapControls\n{\n\t'
- shmtext = shmtext + 'nSmoothPatch\t3;\n\t'
- shmtext = shmtext + 'tolerance\t4.0;\n\t'
- shmtext = shmtext + 'nSolveIter\t30;\n\t'
- shmtext = shmtext + 'nRelaxIter\t5;\n'
- shmtext = shmtext + '}\n\n'
-
- # Settings for layer addition
- # This is presently not being used
- shmtext = shmtext + 'addLayersControls\n{\n\t'
- shmtext = shmtext + 'relativeSizes\ttrue;\n\t'
- shmtext = shmtext + 'layers\n\t{\n\t'
- shmtext = shmtext + 'Bottom\n\t\t{nSurfaceLayers\t3;}\n\t'
- shmtext = shmtext + 'Left\n\t\t{nSurfaceLayers\t3;}\n\t'
- shmtext = shmtext + 'Right\n\t\t{nSurfaceLayers\t3;}\n\t}\n\n\t'
- shmtext = shmtext + 'expansionRatio\t1;\n\t'
- shmtext = shmtext + 'finalLayerThickness\t0.3;\n\t'
- shmtext = shmtext + 'minThickness\t0.1;\n\t'
- shmtext = shmtext + 'nGrow\t0;\n\t'
-
- # Advanced settings for layer addition
- shmtext = shmtext + 'featureAngle\t80;\n\t'
- shmtext = shmtext + 'nRelaxIter\t3;\n\t'
- shmtext = shmtext + 'nSmoothSurfaceNormals\t1;\n\t'
- shmtext = shmtext + 'nSmoothNormals\t3;\n\t'
- shmtext = shmtext + 'nSmoothThickness\t10;\n\t'
- shmtext = shmtext + 'maxFaceThicknessRatio\t0.5;\n\t'
- shmtext = shmtext + 'maxThicknessToMedialRatio\t0.3;\n\t'
- shmtext = shmtext + 'minMedianAxisAngle\t130;\n\t'
- shmtext = shmtext + 'nBufferCellsNoExtrude\t0;\n\t'
- shmtext = shmtext + 'nLayerIter\t50;\n'
- shmtext = shmtext + '}\n\n'
-
- # Mesh quality settings
- shmtext = shmtext + 'meshQualityControls\n{\n\t'
- shmtext = shmtext + 'maxNonOrtho\t180;\n\t'
- shmtext = shmtext + 'maxBoundarySkewness\t20;\n\t'
- shmtext = shmtext + 'maxInternalSkewness\t4;\n\t'
- shmtext = shmtext + 'maxConcave\t80;\n\t'
- shmtext = shmtext + 'minFlatness\t0.5;\n\t'
- shmtext = shmtext + 'minVol\t1e-13;\n\t'
- shmtext = shmtext + 'minTetQuality\t1e-30;\n\t'
- shmtext = shmtext + 'minArea\t-1;\n\t'
- shmtext = shmtext + 'minTwist\t0.02;\n\t'
- shmtext = shmtext + 'minDeterminant\t0.001;\n\t'
- shmtext = shmtext + 'minFaceWeight\t0.02;\n\t'
- shmtext = shmtext + 'minVolRatio\t0.01;\n\t'
- shmtext = shmtext + 'minTriangleTwist\t-1;\n\t'
- shmtext = shmtext + 'nSmoothScale\t4;\n\t'
- shmtext = shmtext + 'errorReduction\t0.75;\n'
- shmtext = shmtext + '}\n\n'
-
- # Advanced
- shmtext = shmtext + 'debug\t0;\n'
- shmtext = shmtext + 'mergeTolerance\t1E-6;\n'
-
- return shmtext
-
- #############################################################
- def scripts(self,data,path):
- '''
- Create the scripts for caserun.sh
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where dakota.json file is located
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the mesher
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
-
- # For the Hydro mesher
- if int(mesher[0]) == 0:
- caseruntext = 'echo blockMesh running...\n'
- caseruntext = caseruntext + 'blockMesh > blockMesh.log\n\n'
- # surfaceFeatureExtract
- caseruntext = caseruntext + 'echo surfaceFeatureExtract running...\n'
- caseruntext = caseruntext + 'surfaceFeatureExtract -force > sFeatureExt.log\n\n'
- # snappyHexMesh
- caseruntext = caseruntext + 'echo snappyHexMesh running...\n'
- caseruntext = caseruntext + 'snappyHexMesh > snappyHexMesh.log\n'
- # Copy polyMesh folder
- path2c = os.path.join('2','polyMesh')
- caseruntext = caseruntext + 'cp -r ' + path2c + ' constant\n'
- caseruntext = caseruntext + 'rm -fr 1 2\n\n'
-
- elif int(mesher[0]) == 1:
- # Get the mesh software
- meshsoftware = hydroutil.extract_element_from_json(data, ["Events","MeshSoftware"])
- # Get the mesh file name
- meshfile = hydroutil.extract_element_from_json(data, ["Events","MeshFile"])
- # Get the mesh file name
- caseruntext = 'Converting the mesh files...\n'
- caseruntext = caseruntext + 'MESHFILE=${inputDirectory}/templatedir/'+meshfile[0]+'\n\n'
- # Write out the appropriate commands
- if int(meshsoftware[0]) == 0:
- caseruntext = caseruntext + 'fluentMeshToFoam $MESHFILE > fluentMeshToFoam.log\n\n'
- elif int(meshsoftware[0]) == 1:
- caseruntext = caseruntext + 'ideasToFoam $MESHFILE > ideasToFoam.log\n\n'
- elif int(meshsoftware[0]) == 2:
- caseruntext = caseruntext + 'cfx4ToFoam $MESHFILE > cfx4ToFoam.log\n\n'
- elif int(meshsoftware[0]) == 3:
- caseruntext = caseruntext + 'gambitToFoam $MESHFILE > gambitToFoam.log\n\n'
- elif int(meshsoftware[0]) == 4:
- caseruntext = caseruntext + 'gmshToFoam $MESHFILE > gmshToFoam.log\n\n'
-
- elif int(mesher[0]) == 2:
- # COPY THE FILES TO THE RIGHT LOCATION
- caseruntext = 'Copying mesh dictionaries...\n'
- # blockMesh
- bmfile = os.path.join(path,'blockMeshDict')
- if os.path.isfile(bmfile):
- bmfilenew = os.path.join('system','blockMeshDict')
- caseruntext = caseruntext + 'cp ' + bmfile + ' ' + bmfilenew + '\n'
- caseruntext = caseruntext + 'echo blockMesh running...\n'
- caseruntext = caseruntext + 'blockMesh > blockMesh.log\n\n'
-
- #surfaceFeatureExtract
- sfdfile = os.path.join(path,'surfaceFeatureExtractDict')
- if os.path.isfile(sfdfile):
- sfdfilenew = os.path.join('system','surfaceFeatureExtractDict')
- caseruntext = caseruntext + 'cp ' + sfdfile + ' ' + sfdfilenew + '\n'
- caseruntext = caseruntext + 'echo surfaceFeatureExtract running...\n'
- caseruntext = caseruntext + 'surfaceFeatureExtract -force > sFeatureExt.log\n\n'
-
- # snappyHexMesh
- shmfile = os.path.join(path,'snappyHexMeshDict')
- if os.path.isfile(shmfile):
- shmfilenew = os.path.join('system','snappyHexMeshDict')
- caseruntext = caseruntext + 'cp ' + shmfile + ' ' + shmfilenew + '\n'
- caseruntext = caseruntext + 'echo snappyHexMesh running...\n'
- caseruntext = caseruntext + 'snappyHexMesh > snappyHexMesh.log\n'
- path2c = os.path.join('2','polyMesh')
- caseruntext = caseruntext + 'cp -r ' + path2c + ' constant\n'
- caseruntext = caseruntext + 'rm -fr 1 2\n'
-
- # All other items
- caseruntext = caseruntext + 'echo Checking mesh...\n'
- caseruntext = caseruntext + 'checkMesh > Meshcheck.log\n\n'
-
- # Create 0-folder
- caseruntext = caseruntext + 'echo Creating 0-folder...\n'
- caseruntext = caseruntext + 'rm -fr 0\n'
- caseruntext = caseruntext + 'cp -r 0.org 0\n\n'
-
- # Copy new force-based controldict
- caseruntext = caseruntext + 'echo Copying force-based controlDict...\n'
- caseruntext = caseruntext + 'cp cdictforce ' + os.path.join('system','controlDict') + '\n\n'
-
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
+ )
+
+ # Return the header for meshing file
+ return header # noqa: RET504
+
+ #############################################################
+ def bmeshtext(self, data):
+ """Creates the necessary files for blockMeshDict for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Read the geometry data file
+ data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
+
+ # Create a utilities object
+ hydroutil = hydroUtils()
+ meshsize = ''.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshSize'])
+ )
+
+ # Get the mesh sizes
+ nx = 100 * int(meshsize)
+ if abs(data_geoext[1] - data_geoext[0]) > 0.000001: # noqa: PLR2004
+ ny = math.ceil(
+ 5
+ * nx
+ * (
+ (data_geoext[3] - data_geoext[2])
+ / (data_geoext[1] - data_geoext[0])
+ )
+ )
+ nz = math.ceil(
+ 5
+ * nx
+ * (
+ (data_geoext[5] - data_geoext[4])
+ / (data_geoext[1] - data_geoext[0])
+ )
+ )
+
+ # Get the header text for the blockMeshDict
+ bmeshtext = self.meshheader('blockMeshDict')
+
+ # Convert units
+ bmeshtext = bmeshtext + 'convertToMeters\t1;\n\n' # noqa: PLR6104
+
+ # Add vertices
+ bmeshtext = bmeshtext + 'vertices\n(\n\t' # noqa: PLR6104
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[0])
+ + '\t'
+ + str(data_geoext[2])
+ + '\t'
+ + str(data_geoext[4])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[1])
+ + '\t'
+ + str(data_geoext[2])
+ + '\t'
+ + str(data_geoext[4])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[1])
+ + '\t'
+ + str(data_geoext[3])
+ + '\t'
+ + str(data_geoext[4])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[0])
+ + '\t'
+ + str(data_geoext[3])
+ + '\t'
+ + str(data_geoext[4])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[0])
+ + '\t'
+ + str(data_geoext[2])
+ + '\t'
+ + str(data_geoext[5])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[1])
+ + '\t'
+ + str(data_geoext[2])
+ + '\t'
+ + str(data_geoext[5])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[1])
+ + '\t'
+ + str(data_geoext[3])
+ + '\t'
+ + str(data_geoext[5])
+ + ')\n\t'
+ )
+ bmeshtext = (
+ bmeshtext
+ + '('
+ + str(data_geoext[0])
+ + '\t'
+ + str(data_geoext[3])
+ + '\t'
+ + str(data_geoext[5])
+ + ')\n);\n\n'
+ )
+
+ # Add blocks
+ bmeshtext = bmeshtext + 'blocks\n(\n\t' # noqa: PLR6104
+ bmeshtext = (
+ bmeshtext
+ + 'hex (0 1 2 3 4 5 6 7) ('
+ + str(nx)
+ + '\t'
+ + str(ny)
+ + '\t'
+ + str(nz)
+ + ') simpleGrading (1 1 1)\n);\n\n'
+ )
+
+ # Add edges
+ bmeshtext = bmeshtext + 'edges\n(\n);\n\n' # noqa: PLR6104
+
+ # Add patches
+ bmeshtext = bmeshtext + 'patches\n(\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch maxY\n\t(\n\t\t(3 7 6 2)\n\t)\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch minX\n\t(\n\t\t(0 4 7 3)\n\t)\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch maxX\n\t(\n\t\t(2 6 5 1)\n\t)\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch minY\n\t(\n\t\t(1 5 4 0)\n\t)\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch minZ\n\t(\n\t\t(0 3 2 1)\n\t)\n\t' # noqa: PLR6104
+ bmeshtext = bmeshtext + 'patch maxZ\n\t(\n\t\t(4 5 6 7)\n\t)\n' # noqa: PLR6104
+ bmeshtext = bmeshtext + ');\n\n' # noqa: PLR6104
+
+ # Add merge patch pairs
+ bmeshtext = bmeshtext + 'mergePatchPairs\n(\n);\n' # noqa: PLR6104
+
+ return bmeshtext # noqa: RET504
+
+ #############################################################
+ def sfetext(self):
+ """Creates the necessary files for new controldict for post-processing for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Read the geometry data file
+ data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
+
+ # Get the header text for the blockMeshDict
+ sfetext = self.meshheader('surfaceFeatureExtractDict')
+
+ # Rest of text
+ stlinfo = '{\n\textractionMethod\textractFromSurface;\n'
+ stlinfo = stlinfo + '\textractFromSurfaceCoeffs\n' # noqa: PLR6104
+ stlinfo = stlinfo + '\t{includedAngle\t150;}\n' # noqa: PLR6104
+ stlinfo = stlinfo + '\twriteObj\tyes;\n}' # noqa: PLR6104
+ sfetext = sfetext + 'Entry.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'Exit.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'Top.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'Bottom.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'Left.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'Right.stl\n' + stlinfo + '\n\n'
+ if int(data_geoext[6]) == 1:
+ sfetext = sfetext + 'Building.stl\n' + stlinfo + '\n\n'
+ elif int(data_geoext[6]) == 2: # noqa: PLR2004
+ sfetext = sfetext + 'Building.stl\n' + stlinfo + '\n\n'
+ sfetext = sfetext + 'OtherBuilding.stl\n' + stlinfo + '\n\n'
+ elif int(data_geoext[6]) == 3: # noqa: PLR2004
+ sfetext = sfetext + 'OtherBuilding.stl\n' + stlinfo + '\n\n'
+
+ return sfetext
+
+ #############################################################
+ def shmtext(self, data):
+ """Creates the necessary files for new controldict for post-processing for openfoam7
+
+ Arguments:
+ ---------
+ None
+
+ """ # noqa: D400, D401
+ # Read the geometry data file
+ data_geoext = np.genfromtxt('temp_geometry.txt', dtype=(float))
+
+ # Create a utilities object
+ hydroutil = hydroUtils()
+ meshsize = ''.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshSize'])
+ )
+
+ # Get the header text for the blockMeshDict
+ shmtext = self.meshheader('snappyHexMeshDict')
+
+ # Rest of text
+ shmtext = shmtext + 'castellatedMesh\ttrue;\n\n' # noqa: PLR6104
+ shmtext = shmtext + 'snap\ttrue;\n\n' # noqa: PLR6104
+ shmtext = shmtext + 'addLayers\tfalse;\n\n' # noqa: PLR6104
+
+ # Geometry. Definition of all surfaces.
+ shmtext = shmtext + 'geometry\n{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Entry.stl {type triSurfaceMesh; name Entry;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Exit.stl {type triSurfaceMesh; name Exit;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Top.stl {type triSurfaceMesh; name Top;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Bottom.stl {type triSurfaceMesh; name Bottom;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Left.stl {type triSurfaceMesh; name Left;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Right.stl {type triSurfaceMesh; name Right;}\n' # noqa: PLR6104
+ if int(data_geoext[6]) == 1:
+ shmtext = ( # noqa: PLR6104
+ shmtext + '\tBuilding.stl {type triSurfaceMesh; name Building;}\n'
+ )
+ elif int(data_geoext[6]) == 2: # noqa: PLR2004
+ shmtext = ( # noqa: PLR6104
+ shmtext + '\tBuilding.stl {type triSurfaceMesh; name Building;}\n'
+ )
+ shmtext = ( # noqa: PLR6104
+ shmtext
+ + '\tOtherBuilding.stl {type triSurfaceMesh; name OtherBuilding;}\n'
+ )
+ elif int(data_geoext[6]) == 3: # noqa: PLR2004
+ shmtext = ( # noqa: PLR6104
+ shmtext
+ + '\tOtherBuilding.stl {type triSurfaceMesh; name OtherBuilding;}\n'
+ )
+ shmtext = shmtext + '\tFull.stl {type triSurfaceMesh; name Full;}\n' # noqa: PLR6104
+ shmtext = shmtext + '};\n\n' # noqa: PLR6104
+
+ # Castellated mesh generation
+ maxLocalCells = int(meshsize) * 2000000 # noqa: N806
+ maxGlobalCells = int(meshsize) * 10000000 # noqa: N806
+ shmtext = shmtext + 'castellatedMeshControls\n{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxLocalCells\t' + str(maxLocalCells) + ';\n\t'
+ shmtext = shmtext + 'maxGlobalCells\t' + str(maxGlobalCells) + ';\n\t'
+ shmtext = shmtext + 'minRefinementCells\t10;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxLoadUnbalance\t0.1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nCellsBetweenLevels\t1;\n\n' # noqa: PLR6104
+
+ # Explicit feature edge refinement
+ shmtext = shmtext + '\tfeatures\n\t(\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Entry.eMesh"; level 3;}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Exit.eMesh"; level 3;}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Top.eMesh"; level 3;}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Bottom.eMesh"; level 3;}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Left.eMesh"; level 3;}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + '{file "Right.eMesh"; level 3;}\n' # noqa: PLR6104
+ if int(data_geoext[6]) == 1:
+ shmtext = shmtext + '\t\t{file "Building.eMesh"; level 3;}\n' # noqa: PLR6104
+ elif int(data_geoext[6]) == 2: # noqa: PLR2004
+ shmtext = shmtext + '\t\t{file "Building.eMesh"; level 3;}\n' # noqa: PLR6104
+ shmtext = shmtext + '\t\t{file "OtherBuilding.eMesh"; level 3;}\n' # noqa: PLR6104
+ elif int(data_geoext[6]) == 3: # noqa: PLR2004
+ shmtext = shmtext + '\t\t{file "OtherBuilding.eMesh"; level 3;}\n' # noqa: PLR6104
+ shmtext = shmtext + '\t);\n\n' # noqa: PLR6104
+
+ # Surface based refinement
+ shmtext = shmtext + '\trefinementSurfaces\n\t{\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Entry {level (0 0);}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Exit {level (0 0);}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Top {level (0 0);}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Bottom {level (2 2);}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Left {level (2 2);}\n\t\t' # noqa: PLR6104
+ shmtext = shmtext + 'Right {level (2 2);}\n' # noqa: PLR6104
+ if int(data_geoext[6]) == 1:
+ shmtext = shmtext + '\t\tBuilding {level (2 2);}\n' # noqa: PLR6104
+ elif int(data_geoext[6]) == 2: # noqa: PLR2004
+ shmtext = shmtext + '\t\tBuilding {level (2 2);}\n' # noqa: PLR6104
+ shmtext = shmtext + '\t\tOtherBuilding {level (2 2);}\n' # noqa: PLR6104
+ elif int(data_geoext[6]) == 3: # noqa: PLR2004
+ shmtext = shmtext + '\t\tOtherBuilding {level (2 2);}\n' # noqa: PLR6104
+ shmtext = shmtext + '\t};\n\n' # noqa: PLR6104
+
+ # Resolve sharp angles
+ shmtext = shmtext + '\tresolveFeatureAngle 80;\n\n' # noqa: PLR6104
+
+ # Regional refinement
+ # This needs to be added and corrected
+ shmtext = ( # noqa: PLR6104
+ shmtext + '\trefinementRegions\n\t{\n\t\t//Nothing here for now\n\t}\n\n'
+ )
+
+ # Get the point inside the body
+ px = 0.5 * (data_geoext[1] + data_geoext[0])
+ py = 0.5 * (data_geoext[3] + data_geoext[2])
+ pz = 0.5 * (data_geoext[5] + data_geoext[4])
+
+ # Mesh selection
+ shmtext = (
+ shmtext
+ + '\tlocationInMesh ('
+ + str(px)
+ + '\t'
+ + str(py)
+ + '\t'
+ + str(pz)
+ + ');\n\n'
+ )
+ shmtext = shmtext + '\tallowFreeStandingZoneFaces\tfalse;\n' # noqa: PLR6104
+ shmtext = shmtext + '}\n\n' # noqa: PLR6104
+
+ # Snapping settings
+ shmtext = shmtext + 'snapControls\n{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSmoothPatch\t3;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'tolerance\t4.0;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSolveIter\t30;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nRelaxIter\t5;\n' # noqa: PLR6104
+ shmtext = shmtext + '}\n\n' # noqa: PLR6104
+
+ # Settings for layer addition
+ # This is presently not being used
+ shmtext = shmtext + 'addLayersControls\n{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'relativeSizes\ttrue;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'layers\n\t{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Bottom\n\t\t{nSurfaceLayers\t3;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Left\n\t\t{nSurfaceLayers\t3;}\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'Right\n\t\t{nSurfaceLayers\t3;}\n\t}\n\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'expansionRatio\t1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'finalLayerThickness\t0.3;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minThickness\t0.1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nGrow\t0;\n\t' # noqa: PLR6104
+
+ # Advanced settings for layer addition
+ shmtext = shmtext + 'featureAngle\t80;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nRelaxIter\t3;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSmoothSurfaceNormals\t1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSmoothNormals\t3;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSmoothThickness\t10;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxFaceThicknessRatio\t0.5;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxThicknessToMedialRatio\t0.3;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minMedianAxisAngle\t130;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nBufferCellsNoExtrude\t0;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nLayerIter\t50;\n' # noqa: PLR6104
+ shmtext = shmtext + '}\n\n' # noqa: PLR6104
+
+ # Mesh quality settings
+ shmtext = shmtext + 'meshQualityControls\n{\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxNonOrtho\t180;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxBoundarySkewness\t20;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxInternalSkewness\t4;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'maxConcave\t80;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minFlatness\t0.5;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minVol\t1e-13;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minTetQuality\t1e-30;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minArea\t-1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minTwist\t0.02;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minDeterminant\t0.001;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minFaceWeight\t0.02;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minVolRatio\t0.01;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'minTriangleTwist\t-1;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'nSmoothScale\t4;\n\t' # noqa: PLR6104
+ shmtext = shmtext + 'errorReduction\t0.75;\n' # noqa: PLR6104
+ shmtext = shmtext + '}\n\n' # noqa: PLR6104
+
+ # Advanced
+ shmtext = shmtext + 'debug\t0;\n' # noqa: PLR6104
+ shmtext = shmtext + 'mergeTolerance\t1E-6;\n' # noqa: PLR6104
+
+ return shmtext # noqa: RET504
+
+ #############################################################
+ def scripts(self, data, path): # noqa: C901, PLR6301
+ """Create the scripts for caserun.sh
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where dakota.json file is located
+
+ """ # noqa: D400
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the mesher
+ mesher = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+
+ # For the Hydro mesher
+ if int(mesher[0]) == 0:
+ caseruntext = 'echo blockMesh running...\n'
+ caseruntext = caseruntext + 'blockMesh > blockMesh.log\n\n' # noqa: PLR6104
+ # surfaceFeatureExtract
+ caseruntext = caseruntext + 'echo surfaceFeatureExtract running...\n' # noqa: PLR6104
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'surfaceFeatureExtract -force > sFeatureExt.log\n\n'
+ )
+ # snappyHexMesh
+ caseruntext = caseruntext + 'echo snappyHexMesh running...\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'snappyHexMesh > snappyHexMesh.log\n' # noqa: PLR6104
+ # Copy polyMesh folder
+ path2c = os.path.join('2', 'polyMesh') # noqa: PTH118
+ caseruntext = caseruntext + 'cp -r ' + path2c + ' constant\n'
+ caseruntext = caseruntext + 'rm -fr 1 2\n\n' # noqa: PLR6104
+
+ elif int(mesher[0]) == 1:
+ # Get the mesh software
+ meshsoftware = hydroutil.extract_element_from_json(
+ data, ['Events', 'MeshSoftware']
+ )
+ # Get the mesh file name
+ meshfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'MeshFile']
+ )
+ # Get the mesh file name
+ caseruntext = 'Converting the mesh files...\n'
+ caseruntext = (
+ caseruntext
+ + 'MESHFILE=${inputDirectory}/templatedir/'
+ + meshfile[0]
+ + '\n\n'
+ )
+ # Write out the appropriate commands
+ if int(meshsoftware[0]) == 0:
+ caseruntext = ( # noqa: PLR6104
+ caseruntext
+ + 'fluentMeshToFoam $MESHFILE > fluentMeshToFoam.log\n\n'
+ )
+ elif int(meshsoftware[0]) == 1:
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'ideasToFoam $MESHFILE > ideasToFoam.log\n\n'
+ )
+ elif int(meshsoftware[0]) == 2: # noqa: PLR2004
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'cfx4ToFoam $MESHFILE > cfx4ToFoam.log\n\n'
+ )
+ elif int(meshsoftware[0]) == 3: # noqa: PLR2004
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'gambitToFoam $MESHFILE > gambitToFoam.log\n\n'
+ )
+ elif int(meshsoftware[0]) == 4: # noqa: PLR2004
+ caseruntext = ( # noqa: PLR6104
+ caseruntext + 'gmshToFoam $MESHFILE > gmshToFoam.log\n\n'
+ )
+
+ elif int(mesher[0]) == 2: # noqa: PLR2004
+ # COPY THE FILES TO THE RIGHT LOCATION
+ caseruntext = 'Copying mesh dictionaries...\n'
+ # blockMesh
+ bmfile = os.path.join(path, 'blockMeshDict') # noqa: PTH118
+ if os.path.isfile(bmfile): # noqa: PTH113
+ bmfilenew = os.path.join('system', 'blockMeshDict') # noqa: PTH118
+ caseruntext = caseruntext + 'cp ' + bmfile + ' ' + bmfilenew + '\n'
+ caseruntext = caseruntext + 'echo blockMesh running...\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'blockMesh > blockMesh.log\n\n' # noqa: PLR6104
+
+ # surfaceFeatureExtract
+ sfdfile = os.path.join(path, 'surfaceFeatureExtractDict') # noqa: PTH118
+ if os.path.isfile(sfdfile): # noqa: PTH113
+ sfdfilenew = os.path.join('system', 'surfaceFeatureExtractDict') # noqa: PTH118
+ caseruntext = caseruntext + 'cp ' + sfdfile + ' ' + sfdfilenew + '\n'
+ caseruntext = caseruntext + 'echo surfaceFeatureExtract running...\n' # noqa: PLR6104
+ caseruntext = ( # noqa: PLR6104
+ caseruntext
+ + 'surfaceFeatureExtract -force > sFeatureExt.log\n\n'
+ )
+
+ # snappyHexMesh
+ shmfile = os.path.join(path, 'snappyHexMeshDict') # noqa: PTH118
+ if os.path.isfile(shmfile): # noqa: PTH113
+ shmfilenew = os.path.join('system', 'snappyHexMeshDict') # noqa: PTH118
+ caseruntext = caseruntext + 'cp ' + shmfile + ' ' + shmfilenew + '\n'
+ caseruntext = caseruntext + 'echo snappyHexMesh running...\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'snappyHexMesh > snappyHexMesh.log\n' # noqa: PLR6104
+ path2c = os.path.join('2', 'polyMesh') # noqa: PTH118
+ caseruntext = caseruntext + 'cp -r ' + path2c + ' constant\n'
+ caseruntext = caseruntext + 'rm -fr 1 2\n' # noqa: PLR6104
+
+ # All other items
+ caseruntext = caseruntext + 'echo Checking mesh...\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'checkMesh > Meshcheck.log\n\n' # noqa: PLR6104
+
+ # Create 0-folder
+ caseruntext = caseruntext + 'echo Creating 0-folder...\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'rm -fr 0\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'cp -r 0.org 0\n\n' # noqa: PLR6104
+
+ # Copy new force-based controldict
+ caseruntext = caseruntext + 'echo Copying force-based controlDict...\n' # noqa: PLR6104
+ caseruntext = (
+ caseruntext
+ + 'cp cdictforce '
+ + os.path.join('system', 'controlDict') # noqa: PTH118
+ + '\n\n'
+ )
+
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Others.py b/modules/createEVENT/GeoClawOpenFOAM/of7Others.py
index 7520bee97..e8da1252a 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Others.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Others.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,92 +31,113 @@
# Import all necessary modules
####################################################################
# Standard python modules
-import os
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Others():
- """
- This class includes the methods related to
- auxillary files for openfoam7.
-
- Methods
- --------
- gfiletext: Get all the text for the gravity file
- """
-
- #############################################################
- def othersheader(self,fileclas,fileloc,fileobjec):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the other-files
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Others:
+ """This class includes the methods related to
+ auxiliary files for openfoam7.
+
+ Methods
+ -------
+ gfiletext: Get all the text for the gravity file
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def othersheader(self, fileclas, fileloc, fileobjec): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the other-files
+ """ # noqa: D400, D401
+ header = (
+ """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
-{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\t"""+fileclas+""";\n\tlocation\t""" + '"' + fileloc + """";\n\tobject\t"""+fileobjec+""";\n}
+{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\t""" # noqa: W291
+ + fileclas
+ + """;\n\tlocation\t"""
+ + '"'
+ + fileloc
+ + """";\n\tobject\t"""
+ + fileobjec
+ + """;\n}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
-
- #############################################################
- def gfiletext(self,data):
- '''
- Creates the necessary text for gravity file for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Initialize gravity
- gx = 0.0
- gy = 0.0
- gz = 0.0
-
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- if int(simtype) == 4:
- gz = -9.81
- else:
- # Get the gravity from dakota.json file
- gravity = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Gravity"]))
- # Depending on the inputs, initialize gravity in the right direction
- if int(gravity) == 11:
- gx = 9.81
- elif int(gravity) == 12:
- gy = 9.81
- elif int(gravity) == 13:
- gz = 9.81
- elif int(gravity) == 21:
- gx = -9.81
- elif int(gravity) == 22:
- gy = -9.81
- elif int(gravity) == 23:
- gz = -9.81
-
- # Get the header text for the gravity-file
- gfiletext = self.othersheader("uniformDimensionedVectorField","constant","g")
-
- # All other data
- gfiletext = gfiletext + 'dimensions\t[0 1 -2 0 0 0 0];\n'
- gfiletext = gfiletext + 'value\t(' + str(gx) + '\t' + str(gy) + '\t' + str(gz) + ');\n'
-
- return gfiletext
\ No newline at end of file
+ )
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def gfiletext(self, data):
+ """Creates the necessary text for gravity file for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Initialize gravity
+ gx = 0.0
+ gy = 0.0
+ gz = 0.0
+
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ if int(simtype) == 4: # noqa: PLR2004
+ gz = -9.81
+ else:
+ # Get the gravity from dakota.json file
+ gravity = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'Gravity'])
+ )
+ # Depending on the inputs, initialize gravity in the right direction
+ if int(gravity) == 11: # noqa: PLR2004
+ gx = 9.81
+ elif int(gravity) == 12: # noqa: PLR2004
+ gy = 9.81
+ elif int(gravity) == 13: # noqa: PLR2004
+ gz = 9.81
+ elif int(gravity) == 21: # noqa: PLR2004
+ gx = -9.81
+ elif int(gravity) == 22: # noqa: PLR2004
+ gy = -9.81
+ elif int(gravity) == 23: # noqa: PLR2004
+ gz = -9.81
+
+ # Get the header text for the gravity-file
+ gfiletext = self.othersheader(
+ 'uniformDimensionedVectorField', 'constant', 'g'
+ )
+
+ # All other data
+ gfiletext = gfiletext + 'dimensions\t[0 1 -2 0 0 0 0];\n' # noqa: PLR6104
+ gfiletext = (
+ gfiletext
+ + 'value\t('
+ + str(gx)
+ + '\t'
+ + str(gy)
+ + '\t'
+ + str(gz)
+ + ');\n'
+ )
+
+ return gfiletext # noqa: RET504
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Prboundary.py b/modules/createEVENT/GeoClawOpenFOAM/of7Prboundary.py
index e16cf6e71..f44d4ba98 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Prboundary.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Prboundary.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,167 +31,181 @@
# Import all necessary modules
####################################################################
# Standard python modules
-import os
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Prboundary():
- """
- This class includes the methods related to
- pressure boundary conditions for openfoam7.
-
- Methods
- --------
- Prtext: Get all the text for the p_rgh-file
- """
-
- #############################################################
- def Prtext(self,data,patches):
- '''
- Creates the necessary text for pressure bc for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- prtext = self.Prheader()
-
- # Start the outside
- prtext = prtext + "boundaryField\n{\n"
-
- # Loop over all patches
- for patchname in patches:
- prtext = prtext + "\t" + patchname + "\n"
- patch = hydroutil.extract_element_from_json(data, ["Events","PressureType_" + patchname])
- if patch == [None]:
- prtype = -1
- else:
- prtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PressureType_" + patchname]))
- prtext = prtext + self.Prpatchtext(data,prtype,patchname)
-
- # Check for building and other building
- prtext = prtext + '\tBuilding\n'
- prtext = prtext + self.Prpatchtext(data,'201','Building')
- prtext = prtext + '\tOtherBuilding\n'
- prtext = prtext + self.Prpatchtext(data,'201','OtherBuilding')
-
- # Close the outside
- prtext = prtext + "}\n\n"
-
- # Return the text for velocity BC
- return prtext
-
- #############################################################
- def Prheader(self):
- '''
- Creates the text for the header for pressure file
-
- Variable
- -----------
- header: Header for the p_rgh-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Prboundary:
+ """This class includes the methods related to
+ pressure boundary conditions for openfoam7.
+
+ Methods
+ -------
+ Prtext: Get all the text for the p_rgh-file
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def Prtext(self, data, patches): # noqa: N802
+ """Creates the necessary text for pressure bc for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ prtext = self.Prheader()
+
+ # Start the outside
+ prtext = prtext + 'boundaryField\n{\n' # noqa: PLR6104
+
+ # Loop over all patches
+ for patchname in patches:
+ prtext = prtext + '\t' + patchname + '\n'
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'PressureType_' + patchname]
+ )
+ if patch == [None]:
+ prtype = -1
+ else:
+ prtype = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'PressureType_' + patchname]
+ )
+ )
+ prtext = prtext + self.Prpatchtext(data, prtype, patchname) # noqa: PLR6104
+
+ # Check for building and other building
+ prtext = prtext + '\tBuilding\n' # noqa: PLR6104
+ prtext = prtext + self.Prpatchtext(data, '201', 'Building') # noqa: PLR6104
+ prtext = prtext + '\tOtherBuilding\n' # noqa: PLR6104
+ prtext = prtext + self.Prpatchtext(data, '201', 'OtherBuilding') # noqa: PLR6104
+
+ # Close the outside
+ prtext = prtext + '}\n\n' # noqa: PLR6104
+
+ # Return the text for velocity BC
+ return prtext # noqa: RET504
+
+ #############################################################
+ def Prheader(self): # noqa: N802, PLR6301
+ """Creates the text for the header for pressure file
+
+ Variable
+ -----------
+ header: Header for the p_rgh-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tvolScalarField;\n\tlocation\t"0";\n\tobject\tp_rgh;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- header = header + "dimensions\t[1 -1 -2 0 0 0 0];\n\n"
- header = header + "internalField\tuniform\t0;\n\n"
-
- # Return the header for U file
- return header
-
- #############################################################
- def Prpatchtext(self,data,Prtype,patchname):
- '''
- Creates the text the pressure boundary condition
-
- Arguments
- -----------
- Prtype: Type of velocity b.c
- patchname: Name of the patch
-
- Variable
- -----------
- Prtext: Text for the particular patch
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Inlet types
- # For each type, get the text
- if int(Prtype) == 0:
- # Default for velocity type
- # inlet = fixedFluxPressure
- # wall/outlet = zeroGradient
- # Empty = Empty
- Upatch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- if Upatch == [None]:
- Utype = -1
- else:
- Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
-
- if (int(Utype) > 100) and (int(Utype) < 200):
- Prtype2 = "102"
- elif (int(Utype) > 200) and (int(Utype) < 300):
- Prtype2 = "202"
- elif int(Utype) > 300:
- Prtype2 = "201"
- else:
- Prtype2 = "-1"
- else:
- Prtype2 = Prtype
-
- # Test for different pressure types
- if int(Prtype2) == 101:
- # fixedValue
- # Get the pressure values
- pres = hydroutil.extract_element_from_json(data, ["Events","Pressure_"+patchname])
- if pres == [None]:
- pr = 0.0
- else:
- presvals = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Pressure_"+patchname]))
- pr = float(presvals)
- # Get the text
- Prtext = "\t{\n\t\t"
- Prtext = Prtext + "type\tfixedValue;\n\t\t"
- Prtext = Prtext + "value\t" + str(pr) + ";\n"
- Prtext = Prtext + "\t}\n"
- elif int(Prtype2) == 102:
- # fixedFluxPressure
- Prtext = "\t{\n\t\t"
- Prtext = Prtext + "type\tfixedFluxPressure;\n\t\t"
- Prtext = Prtext + "value\tuniform 0;\n\t}\n"
- elif int(Prtype2) == 201:
- # Outlet zero gradient
- Prtext = "\t{\n\t\t"
- Prtext = Prtext + "type\tzeroGradient;\n\t}\n"
- elif int(Prtype2) == 202:
- Prtext = "\t{\n\t\t"
- Prtext = Prtext + "type\tfixedValue;\n\t\t"
- Prtext = Prtext + "value\t0;\n"
- Prtext = Prtext + "\t}\n"
- else:
- # Default: Empty
- Prtext = "\t{\n\t\t"
- Prtext = Prtext + "type\tempty;\n\t}\n"
-
- # Return the header for U file
- return Prtext
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ header = header + 'dimensions\t[1 -1 -2 0 0 0 0];\n\n' # noqa: PLR6104
+ header = header + 'internalField\tuniform\t0;\n\n' # noqa: PLR6104
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def Prpatchtext(self, data, Prtype, patchname): # noqa: C901, N802, N803, PLR6301
+ """Creates the text the pressure boundary condition
+
+ Arguments:
+ ---------
+ Prtype: Type of velocity b.c
+ patchname: Name of the patch
+
+ Variable
+ -----------
+ Prtext: Text for the particular patch
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Inlet types
+ # For each type, get the text
+ if int(Prtype) == 0:
+ # Default for velocity type
+ # inlet = fixedFluxPressure
+ # wall/outlet = zeroGradient
+ # Empty = Empty
+ Upatch = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ if Upatch == [None]:
+ Utype = -1 # noqa: N806
+ else:
+ Utype = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ )
+
+ if (int(Utype) > 100) and (int(Utype) < 200): # noqa: PLR2004
+ Prtype2 = '102' # noqa: N806
+ elif (int(Utype) > 200) and (int(Utype) < 300): # noqa: PLR2004
+ Prtype2 = '202' # noqa: N806
+ elif int(Utype) > 300: # noqa: PLR2004
+ Prtype2 = '201' # noqa: N806
+ else:
+ Prtype2 = '-1' # noqa: N806
+ else:
+ Prtype2 = Prtype # noqa: N806
+
+ # Test for different pressure types
+ if int(Prtype2) == 101: # noqa: PLR2004
+ # fixedValue
+ # Get the pressure values
+ pres = hydroutil.extract_element_from_json(
+ data, ['Events', 'Pressure_' + patchname]
+ )
+ if pres == [None]:
+ pr = 0.0
+ else:
+ presvals = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'Pressure_' + patchname]
+ )
+ )
+ pr = float(presvals)
+ # Get the text
+ Prtext = '\t{\n\t\t' # noqa: N806
+ Prtext = Prtext + 'type\tfixedValue;\n\t\t' # noqa: N806, PLR6104
+ Prtext = Prtext + 'value\t' + str(pr) + ';\n' # noqa: N806
+ Prtext = Prtext + '\t}\n' # noqa: N806, PLR6104
+ elif int(Prtype2) == 102: # noqa: PLR2004
+ # fixedFluxPressure
+ Prtext = '\t{\n\t\t' # noqa: N806
+ Prtext = Prtext + 'type\tfixedFluxPressure;\n\t\t' # noqa: N806, PLR6104
+ Prtext = Prtext + 'value\tuniform 0;\n\t}\n' # noqa: N806, PLR6104
+ elif int(Prtype2) == 201: # noqa: PLR2004
+ # Outlet zero gradient
+ Prtext = '\t{\n\t\t' # noqa: N806
+ Prtext = Prtext + 'type\tzeroGradient;\n\t}\n' # noqa: N806, PLR6104
+ elif int(Prtype2) == 202: # noqa: PLR2004
+ Prtext = '\t{\n\t\t' # noqa: N806
+ Prtext = Prtext + 'type\tfixedValue;\n\t\t' # noqa: N806, PLR6104
+ Prtext = Prtext + 'value\t0;\n' # noqa: N806, PLR6104
+ Prtext = Prtext + '\t}\n' # noqa: N806, PLR6104
+ else:
+ # Default: Empty
+ Prtext = '\t{\n\t\t' # noqa: N806
+ Prtext = Prtext + 'type\tempty;\n\t}\n' # noqa: N806, PLR6104
+
+ # Return the header for U file
+ return Prtext
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Process.py b/modules/createEVENT/GeoClawOpenFOAM/of7Process.py
index 54ae95176..72682f23d 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Process.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Process.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,275 +32,348 @@
####################################################################
# Standard python modules
import os
+
import numpy as np
# Other custom modules
from hydroUtils import hydroUtils
from of7Solve import of7Solve
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Process():
- """
- This class includes the methods related to
- post-processing for openfoam7.
-
- Methods
- --------
- pprocesstext: Get all the text for the post-processing
- """
-
- #############################################################
- def pprocesstext(self,data,path):
- '''
- Creates the necessary files for post-processing for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
- solver = of7Solve()
-
- # Point data from file
- pprocessfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PProcessFile"]))
- pprocesspath = os.path.join(path,pprocessfile)
- pp_data = np.genfromtxt(pprocesspath, delimiter=',')
- num_points = np.shape(pp_data)[0]
- ptext = '\t\t(\n'
- for ii in range(num_points):
- ptext = ptext + '\t\t\t(' + str(pp_data[ii,0]) + '\t' + str(pp_data[ii,1]) + '\t' + str(pp_data[ii,2]) + ')\n'
- ptext = ptext + '\t\t);\n'
-
- # Fields required
- value = 0
- pprocessV = hydroutil.extract_element_from_json(data, ["Events","PPVelocity"])
- if pprocessV != [None]:
- pprocessV = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPVelocity"]))
- if pprocessV == 'Yes':
- value += 1
- pprocessP = hydroutil.extract_element_from_json(data, ["Events","PPPressure"])
- if pprocessP != [None]:
- pprocessP = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPPressure"]))
- if pprocessP == 'Yes':
- value += 2
- if value == 1:
- fieldtext = '(U)'
- elif value == 2:
- fieldtext = '(p_rgh)'
- else:
- fieldtext = '(U p_rgh)'
-
- # Get the header text for the U-file
- sampletext = solver.solverheader("sample")
-
- # Other information
- sampletext = sampletext + '\ntype sets;\n'
- sampletext = sampletext + 'libs\t("libsampling.so");\n\n'
- sampletext = sampletext + 'interpolationScheme\tcellPoint;\n\n'
- sampletext = sampletext + 'setFormat\traw;\n\n'
- sampletext = sampletext + 'sets\n(\n\tdata\n\t{\n'
- sampletext = sampletext + '\t\ttype\tpoints;\n'
- sampletext = sampletext + '\t\tpoints\n'
- sampletext = sampletext + ptext
- sampletext = sampletext + '\t\tordered\tyes;\n'
- sampletext = sampletext + '\t\taxis\tx;\n'
- sampletext = sampletext + '\t}\n'
- sampletext = sampletext + ');\n\n'
- sampletext = sampletext + 'fields\t' + fieldtext + ';\n'
-
- return sampletext
-
- #############################################################
- def pprocesscdict(self,data,path):
- '''
- Creates the necessary files for new controldict for post-processing for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
- solver = of7Solve()
-
- # Get the header text for the U-file
- cdicttext = solver.solverheader("controlDict")
-
- # Get the simulation type: Solver
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
- if int(simtype) == 4:
- cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n'
- else:
- cdicttext = cdicttext + '\napplication \t olaFlow;\n\n'
-
- # Check restart situation and give start time
- restart = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Restart"]))
- if restart == "Yes":
- cdicttext = cdicttext + 'startFrom \t latestTime;\n\n'
- elif restart == "No":
- # Start time
- startT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","StartTime"]))
- cdicttext = cdicttext + 'startFrom \t startTime;\n\n'
- cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
-
- # End time
- endT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","EndTime"]))
- cdicttext = cdicttext + 'stopAt \t endTime;\n\n'
- cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
-
- # Time interval
- deltaT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","TimeInterval"]))
- cdicttext = cdicttext + 'deltaT \t' + deltaT + ';\n\n'
-
- # Write control
- cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n'
-
- # Write interval
- writeT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","WriteInterval"]))
- cdicttext = cdicttext + 'writeInterval \t' + writeT + ';\n\n'
-
- # All others
- cdicttext = cdicttext + 'purgeWrite \t 0;\n\n'
- cdicttext = cdicttext + 'writeFormat \t ascii;\n\n'
- cdicttext = cdicttext + 'writePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n'
- cdicttext = cdicttext + 'timeFormat \t general;\n\n'
- cdicttext = cdicttext + 'timePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n'
- cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n'
- cdicttext = cdicttext + 'maxCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n'
-
- # Point data from file
- pprocessfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PProcessFile"]))
- pprocesspath = os.path.join(path,pprocessfile)
- pp_data = np.genfromtxt(pprocesspath, delimiter=',')
- num_points = np.shape(pp_data)[0]
- ptext = '\t\t\t\t(\n'
- for ii in range(num_points):
- ptext = ptext + '\t\t\t\t\t(' + str(pp_data[ii,0]) + '\t' + str(pp_data[ii,1]) + '\t' + str(pp_data[ii,2]) + ')\n'
- ptext = ptext + '\t\t\t\t);\n'
-
- # Fields required
- value = 0
- pprocessV = hydroutil.extract_element_from_json(data, ["Events","PPVelocity"])
- if pprocessV != [None]:
- pprocessV = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPVelocity"]))
- if pprocessV == 'Yes':
- value += 1
- pprocessP = hydroutil.extract_element_from_json(data, ["Events","PPPressure"])
- if pprocessP != [None]:
- pprocessP = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPPressure"]))
- if pprocessP == 'Yes':
- value += 2
- if value == 1:
- fieldtext = '(U)'
- elif value == 2:
- fieldtext = '(p_rgh)'
- else:
- fieldtext = '(U p_rgh)'
-
- # Get the library data
- cdicttext = cdicttext + 'function\n{\n\tlinesample\n\t{\n'
- cdicttext = cdicttext + '\t\ttype\tsets;\n'
- cdicttext = cdicttext + '\t\tfunctionObjectLibs\t("libsampling.so");\n'
- cdicttext = cdicttext + '\t\twriteControl\ttimeStep;\n'
- cdicttext = cdicttext + '\t\toutputInterval\t1;\n'
- cdicttext = cdicttext + '\t\tinterpolationScheme\tcellPoint;\n'
- cdicttext = cdicttext + '\t\tsetFormat\traw;\n\n'
- cdicttext = cdicttext + '\t\tsets\n\t\t(\n'
- cdicttext = cdicttext + '\t\t\tdata\n\t\t\t{\n'
- cdicttext = cdicttext + '\t\t\t\ttype\tpoints;\n'
- cdicttext = cdicttext + '\t\t\t\tpoints\n'
- cdicttext = cdicttext + ptext
- cdicttext = cdicttext + '\t\t\t\tordered\tyes;\n'
- cdicttext = cdicttext + '\t\t\t\taxis\tx;\n'
- cdicttext = cdicttext + '\t\t\t}\n\t\t);\n'
- cdicttext = cdicttext + '\t\tfields\t' + fieldtext + ';\n'
- cdicttext = cdicttext + '\t}\n}'
-
- return cdicttext
-
- #############################################################
- def scripts(self,data,path):
- '''
- Creates the necessary postprocessing in scripts
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- pprocess = hydroutil.extract_element_from_json(data, ["Events","Postprocessing"])
- if pprocess == [None]:
- return 0
- else:
- pprocess = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Postprocessing"]))
- if pprocess == 'No':
- caseruntext = 'echo no postprocessing for EVT\n'
- elif pprocess == 'Yes':
- caseruntext = 'echo postprocessing for EVT\n'
- # Reconstruct case
- caseruntext = caseruntext + 'reconstructPar > reconstruct.log \n'
- # Move new controlDict
- cdictpppath = os.path.join('system','controlDict')
- caseruntext = caseruntext + 'cp cdictpp ' + cdictpppath + '\n'
- # Move the wavemakerfile (if exists)
- if os.path.exists(os.path.join('constant','wavemakerMovement.txt')):
- caseruntext = caseruntext + 'mkdir extras\n'
- wavepath = os.path.join('constant','wavemakerMovement.txt')
- wavepathnew = os.path.join('extras','wavemakerMovement.txt')
- caseruntext = caseruntext + 'mv ' + wavepath + ' ' + wavepathnew + '\n'
- # Copy sample file
- caseruntext = caseruntext + 'cp sample ' + os.path.join('system','sample') + '\n'
- # Start the postprocessing
- caseruntext = caseruntext + 'postProcess -func sample \n\n'
-
- # Write to caserun file
- scriptfile = open('caserun.sh',"a")
- scriptfile.write(caseruntext)
- scriptfile.close()
-
- #############################################################
- def pprocesscheck(self,data,path):
- '''
- Checks for material properties for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Find if pprocess is required
- pprocess = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Postprocessing"]))
-
- if pprocess == 'No':
- return 0
- else:
- pprocessV = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPVelocity"]))
- pprocessP = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PPPressure"]))
- if pprocessV == 'Yes' or pprocessP == 'Yes':
- pprocessfile = hydroutil.extract_element_from_json(data, ["Events","PProcessFile"])
- if pprocessfile == [None]:
- return -1
- else:
- pprocessfile = ', '.join(hydroutil.extract_element_from_json(data, ["Events","PProcessFile"]))
- if not os.path.exists(os.path.join(path,pprocessfile)):
- return -1
- else:
- return 0
-
- # Return 0 if all is right
- return 1
\ No newline at end of file
+class of7Process:
+ """This class includes the methods related to
+ post-processing for openfoam7.
+
+ Methods
+ -------
+ pprocesstext: Get all the text for the post-processing
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def pprocesstext(self, data, path): # noqa: PLR6301
+ """Creates the necessary files for post-processing for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+ solver = of7Solve()
+
+ # Point data from file
+ pprocessfile = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'PProcessFile'])
+ )
+ pprocesspath = os.path.join(path, pprocessfile) # noqa: PTH118
+ pp_data = np.genfromtxt(pprocesspath, delimiter=',')
+ num_points = np.shape(pp_data)[0]
+ ptext = '\t\t(\n'
+ for ii in range(num_points):
+ ptext = (
+ ptext
+ + '\t\t\t('
+ + str(pp_data[ii, 0])
+ + '\t'
+ + str(pp_data[ii, 1])
+ + '\t'
+ + str(pp_data[ii, 2])
+ + ')\n'
+ )
+ ptext = ptext + '\t\t);\n' # noqa: PLR6104
+
+ # Fields required
+ value = 0
+ pprocessV = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'PPVelocity']
+ )
+ if pprocessV != [None]:
+ pprocessV = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPVelocity'])
+ )
+ if pprocessV == 'Yes':
+ value += 1
+ pprocessP = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'PPPressure']
+ )
+ if pprocessP != [None]:
+ pprocessP = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPPressure'])
+ )
+ if pprocessP == 'Yes':
+ value += 2
+ if value == 1:
+ fieldtext = '(U)'
+ elif value == 2: # noqa: PLR2004
+ fieldtext = '(p_rgh)'
+ else:
+ fieldtext = '(U p_rgh)'
+
+ # Get the header text for the U-file
+ sampletext = solver.solverheader('sample')
+
+ # Other information
+ sampletext = sampletext + '\ntype sets;\n' # noqa: PLR6104
+ sampletext = sampletext + 'libs\t("libsampling.so");\n\n' # noqa: PLR6104
+ sampletext = sampletext + 'interpolationScheme\tcellPoint;\n\n' # noqa: PLR6104
+ sampletext = sampletext + 'setFormat\traw;\n\n' # noqa: PLR6104
+ sampletext = sampletext + 'sets\n(\n\tdata\n\t{\n' # noqa: PLR6104
+ sampletext = sampletext + '\t\ttype\tpoints;\n' # noqa: PLR6104
+ sampletext = sampletext + '\t\tpoints\n' # noqa: PLR6104
+ sampletext = sampletext + ptext # noqa: PLR6104
+ sampletext = sampletext + '\t\tordered\tyes;\n' # noqa: PLR6104
+ sampletext = sampletext + '\t\taxis\tx;\n' # noqa: PLR6104
+ sampletext = sampletext + '\t}\n' # noqa: PLR6104
+ sampletext = sampletext + ');\n\n' # noqa: PLR6104
+ sampletext = sampletext + 'fields\t' + fieldtext + ';\n'
+
+ return sampletext # noqa: RET504
+
+ #############################################################
+ def pprocesscdict(self, data, path): # noqa: C901, PLR6301
+ """Creates the necessary files for new controldict for post-processing for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+ solver = of7Solve()
+
+ # Get the header text for the U-file
+ cdicttext = solver.solverheader('controlDict')
+
+ # Get the simulation type: Solver
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+ if int(simtype) == 4: # noqa: PLR2004
+ cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n' # noqa: PLR6104
+ else:
+ cdicttext = cdicttext + '\napplication \t olaFlow;\n\n' # noqa: PLR6104
+
+ # Check restart situation and give start time
+ restart = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'Restart'])
+ )
+ if restart == 'Yes':
+ cdicttext = cdicttext + 'startFrom \t latestTime;\n\n' # noqa: PLR6104
+ elif restart == 'No':
+ # Start time
+ startT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'StartTime'])
+ )
+ cdicttext = cdicttext + 'startFrom \t startTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
+
+ # End time
+ endT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'EndTime'])
+ )
+ cdicttext = cdicttext + 'stopAt \t endTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
+
+ # Time interval
+ deltaT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'TimeInterval'])
+ )
+ cdicttext = cdicttext + 'deltaT \t' + deltaT + ';\n\n'
+
+ # Write control
+ cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n' # noqa: PLR6104
+
+ # Write interval
+ writeT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'WriteInterval'])
+ )
+ cdicttext = cdicttext + 'writeInterval \t' + writeT + ';\n\n'
+
+ # All others
+ cdicttext = cdicttext + 'purgeWrite \t 0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeFormat \t ascii;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timeFormat \t general;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n' # noqa: PLR6104
+
+ # Point data from file
+ pprocessfile = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'PProcessFile'])
+ )
+ pprocesspath = os.path.join(path, pprocessfile) # noqa: PTH118
+ pp_data = np.genfromtxt(pprocesspath, delimiter=',')
+ num_points = np.shape(pp_data)[0]
+ ptext = '\t\t\t\t(\n'
+ for ii in range(num_points):
+ ptext = (
+ ptext
+ + '\t\t\t\t\t('
+ + str(pp_data[ii, 0])
+ + '\t'
+ + str(pp_data[ii, 1])
+ + '\t'
+ + str(pp_data[ii, 2])
+ + ')\n'
+ )
+ ptext = ptext + '\t\t\t\t);\n' # noqa: PLR6104
+
+ # Fields required
+ value = 0
+ pprocessV = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'PPVelocity']
+ )
+ if pprocessV != [None]:
+ pprocessV = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPVelocity'])
+ )
+ if pprocessV == 'Yes':
+ value += 1
+ pprocessP = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'PPPressure']
+ )
+ if pprocessP != [None]:
+ pprocessP = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPPressure'])
+ )
+ if pprocessP == 'Yes':
+ value += 2
+ if value == 1:
+ fieldtext = '(U)'
+ elif value == 2: # noqa: PLR2004
+ fieldtext = '(p_rgh)'
+ else:
+ fieldtext = '(U p_rgh)'
+
+ # Get the library data
+ cdicttext = cdicttext + 'function\n{\n\tlinesample\n\t{\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\ttype\tsets;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\tfunctionObjectLibs\t("libsampling.so");\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\twriteControl\ttimeStep;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\toutputInterval\t1;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\tinterpolationScheme\tcellPoint;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\tsetFormat\traw;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\tsets\n\t\t(\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\tdata\n\t\t\t{\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\t\ttype\tpoints;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\t\tpoints\n' # noqa: PLR6104
+ cdicttext = cdicttext + ptext # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\t\tordered\tyes;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\t\taxis\tx;\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\t\t}\n\t\t);\n' # noqa: PLR6104
+ cdicttext = cdicttext + '\t\tfields\t' + fieldtext + ';\n'
+ cdicttext = cdicttext + '\t}\n}' # noqa: PLR6104
+
+ return cdicttext # noqa: RET504
+
+ #############################################################
+ def scripts(self, data, path): # noqa: ARG002, PLR6301
+ """Creates the necessary postprocessing in scripts
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ pprocess = hydroutil.extract_element_from_json(
+ data, ['Events', 'Postprocessing']
+ )
+ if pprocess == [None]:
+ return 0
+ else: # noqa: RET505
+ pprocess = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'Postprocessing']
+ )
+ )
+ if pprocess == 'No':
+ caseruntext = 'echo no postprocessing for EVT\n'
+ elif pprocess == 'Yes':
+ caseruntext = 'echo postprocessing for EVT\n'
+ # Reconstruct case
+ caseruntext = caseruntext + 'reconstructPar > reconstruct.log \n' # noqa: PLR6104
+ # Move new controlDict
+ cdictpppath = os.path.join('system', 'controlDict') # noqa: PTH118
+ caseruntext = caseruntext + 'cp cdictpp ' + cdictpppath + '\n'
+ # Move the wavemakerfile (if exists)
+ if os.path.exists(os.path.join('constant', 'wavemakerMovement.txt')): # noqa: PTH110, PTH118
+ caseruntext = caseruntext + 'mkdir extras\n' # noqa: PLR6104
+ wavepath = os.path.join('constant', 'wavemakerMovement.txt') # noqa: PTH118
+ wavepathnew = os.path.join('extras', 'wavemakerMovement.txt') # noqa: PTH118
+ caseruntext = (
+ caseruntext + 'mv ' + wavepath + ' ' + wavepathnew + '\n'
+ )
+ # Copy sample file
+ caseruntext = (
+ caseruntext
+ + 'cp sample '
+ + os.path.join('system', 'sample') # noqa: PTH118
+ + '\n'
+ )
+ # Start the postprocessing
+ caseruntext = caseruntext + 'postProcess -func sample \n\n' # noqa: PLR6104
+
+ # Write to caserun file
+ scriptfile = open('caserun.sh', 'a') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close() # noqa: RET503
+
+ #############################################################
+ def pprocesscheck(self, data, path): # noqa: PLR6301
+ """Checks for material properties for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Find if pprocess is required
+ pprocess = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'Postprocessing'])
+ )
+
+ if pprocess == 'No':
+ return 0
+ else: # noqa: RET505
+ pprocessV = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPVelocity'])
+ )
+ pprocessP = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'PPPressure'])
+ )
+ if pprocessV == 'Yes' or pprocessP == 'Yes':
+ pprocessfile = hydroutil.extract_element_from_json(
+ data, ['Events', 'PProcessFile']
+ )
+ if pprocessfile == [None]:
+ return -1
+ else: # noqa: RET505
+ pprocessfile = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'PProcessFile']
+ )
+ )
+ if not os.path.exists(os.path.join(path, pprocessfile)): # noqa: PTH110, PTH118
+ return -1
+ else:
+ return 0
+
+ # Return 0 if all is right
+ return 1
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7PtDboundary.py b/modules/createEVENT/GeoClawOpenFOAM/of7PtDboundary.py
index 4e4ed09f5..72a5a8615 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7PtDboundary.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7PtDboundary.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -32,235 +31,240 @@
# Import all necessary modules
####################################################################
# Standard python modules
-import os
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7PtDboundary():
- """
- This class includes the methods related to
- point displacement boundary conditions for openfoam7.
-
- Methods
- --------
- PDtext: Get all the text for the pointDisplacement-file
- """
-
- # #############################################################
- # def PtDtext(self,data,fipath,patches):
- # '''
- # Creates the necessary text for point displacement for openfoam7
-
- # Arguments
- # -----------
- # data: all the JSON data
- # patches: List of boundary patches
- # fipath: Path where the dakota.json file exists
- # '''
-
- # # Create a utilities object
- # hydroutil = hydroUtils()
-
- # # Number of moving walls
- # numMovWall = 0
-
- # # Loop over all patches
- # for patchname in patches:
- # # Get the type of velocity bc
- # patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- # if patch == [None]:
- # Utype = -1
- # else:
- # Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
-
- # # If any moving walls (103 - 104)
- # if (int(Utype) == 103) or (int(Utype) == 104):
- # print(patchname)
-
- #############################################################
- def PtDcheck(self,data,patches):
- '''
- Checks if a point displacement for openfoam7 is required
-
- Arguments
- -----------
- data: all the JSON data
- patches: List of boundary patches
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Number of moving walls
- numMovWall = 0
-
- # Loop over all patches
- for patchname in patches:
- # Get the type of velocity bc
- patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- if patch == [None]:
- Utype = -1
- else:
- Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
-
- # If any moving walls (103 - 104)
- if (int(Utype) == 103) or (int(Utype) == 104):
- numMovWall += 1
- if numMovWall > 0:
- return 1
-
- if numMovWall == 0:
- return 0
- else:
- return 1
-
- #############################################################
- def PtDtext(self,data,fipath,patches):
- '''
- Create text for point displacement for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- patches: List of boundary patches
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- ptdtext = self.PtDheader()
-
- # Start the outside
- ptdtext = ptdtext + "boundaryField\n{\n"
-
- # Loop over all patch
- for patchname in patches:
- ptdtext = ptdtext + "\t" + patchname + "\n"
- # Get the type of velocity bc
- patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- if patch == [None]:
- Utype = -1
- else:
- Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
-
- ptdtext = ptdtext + self.PtDpatchtext(data,Utype,patchname,fipath)
-
- # Check for building and other building
- ptdtext = ptdtext + '\tBuilding\n'
- ptdtext = ptdtext + self.PtDpatchtext(data,'301','Building',fipath)
- ptdtext = ptdtext + '\tOtherBuilding\n'
- ptdtext = ptdtext + self.PtDpatchtext(data,'301','OtherBuilding',fipath)
-
- # Close the outside
- ptdtext = ptdtext + "}\n\n"
-
- # Return the text for pointDisplacement
- return ptdtext
-
- #############################################################
- def PtDheader(self):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the pointDisplacement-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7PtDboundary:
+ """This class includes the methods related to
+ point displacement boundary conditions for openfoam7.
+
+ Methods
+ -------
+ PDtext: Get all the text for the pointDisplacement-file
+
+ """ # noqa: D205, D404
+
+ # #############################################################
+ # def PtDtext(self,data,fipath,patches):
+ # '''
+ # Creates the necessary text for point displacement for openfoam7
+
+ # Arguments
+ # -----------
+ # data: all the JSON data
+ # patches: List of boundary patches
+ # fipath: Path where the dakota.json file exists
+ # '''
+
+ # # Create a utilities object
+ # hydroutil = hydroUtils()
+
+ # # Number of moving walls
+ # numMovWall = 0
+
+ # # Loop over all patches
+ # for patchname in patches:
+ # # Get the type of velocity bc
+ # patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
+ # if patch == [None]:
+ # Utype = -1
+ # else:
+ # Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
+
+ # # If any moving walls (103 - 104)
+ # if (int(Utype) == 103) or (int(Utype) == 104):
+ # print(patchname)
+
+ #############################################################
+ def PtDcheck(self, data, patches): # noqa: N802, PLR6301
+ """Checks if a point displacement for openfoam7 is required
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ patches: List of boundary patches
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Number of moving walls
+ numMovWall = 0 # noqa: N806
+
+ # Loop over all patches
+ for patchname in patches:
+ # Get the type of velocity bc
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ if patch == [None]:
+ Utype = -1 # noqa: N806
+ else:
+ Utype = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ )
+
+ # If any moving walls (103 - 104)
+ if (int(Utype) == 103) or (int(Utype) == 104): # noqa: PLR2004
+ numMovWall += 1 # noqa: N806
+ if numMovWall > 0:
+ return 1
+
+ if numMovWall == 0:
+ return 0
+ else: # noqa: RET505
+ return 1
+
+ #############################################################
+ def PtDtext(self, data, fipath, patches): # noqa: N802
+ """Create text for point displacement for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ patches: List of boundary patches
+
+ """ # noqa: D400
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ ptdtext = self.PtDheader()
+
+ # Start the outside
+ ptdtext = ptdtext + 'boundaryField\n{\n' # noqa: PLR6104
+
+ # Loop over all patch
+ for patchname in patches:
+ ptdtext = ptdtext + '\t' + patchname + '\n'
+ # Get the type of velocity bc
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ if patch == [None]:
+ Utype = -1 # noqa: N806
+ else:
+ Utype = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ )
+
+ ptdtext = ptdtext + self.PtDpatchtext(data, Utype, patchname, fipath) # noqa: PLR6104
+
+ # Check for building and other building
+ ptdtext = ptdtext + '\tBuilding\n' # noqa: PLR6104
+ ptdtext = ptdtext + self.PtDpatchtext(data, '301', 'Building', fipath) # noqa: PLR6104
+ ptdtext = ptdtext + '\tOtherBuilding\n' # noqa: PLR6104
+ ptdtext = ptdtext + self.PtDpatchtext(data, '301', 'OtherBuilding', fipath) # noqa: PLR6104
+
+ # Close the outside
+ ptdtext = ptdtext + '}\n\n' # noqa: PLR6104
+
+ # Return the text for pointDisplacement
+ return ptdtext # noqa: RET504
+
+ #############################################################
+ def PtDheader(self): # noqa: N802, PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the pointDisplacement-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tpointVectorField;\n\tlocation\t"0.01";\n\tobject\tpointDisplacement;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- header = header + "dimensions\t[0 1 0 0 0 0 0];\n\n"
- header = header + "internalField\tuniform (0 0 0);\n\n"
-
- # Return the header for U file
- return header
-
-
- #############################################################
- def PtDpatchtext(self,data,Utype,patchname,fipath):
- '''
- Creates the text the pointDisplacement boundary condition
-
- Arguments
- -----------
- data: All the json data
- Utype: Type of velocity b.c
- patchname: Name of the patch
- fipath: Path to where dakota.json file exists
-
- Variable
- -----------
- PtDtext: Text for the particular patch
- '''
-
- # Get hte normal of the patch
- normal = self.getNormal(patchname)
-
- # For each patch / type provide the text
- # Moving walls
- if (int(Utype) == 103) or (int(Utype) == 104):
- PtDtext = "\t{\n\t\t"
- PtDtext = PtDtext + "type\twavemakerMovement;\n\t\t"
- PtDtext = PtDtext + "wavemakerDictName\twavemakerMovementDict;\n\t\t"
- PtDtext = PtDtext + "value\tuniform (0 0 0);\n"
- PtDtext = PtDtext + "\t}\n"
-
- elif int(Utype) > 300:
- PtDtext = "\t{\n\t\t"
- PtDtext = PtDtext + "type\tfixedNormalSlip;\n\t\t"
- PtDtext = PtDtext + "n\t("+normal+");\n\t\t"
- PtDtext = PtDtext + "value\tuniform (0 0 0);\n"
- PtDtext = PtDtext + "\t}\n"
-
- elif (int(Utype) > 200) and (int(Utype) < 300):
- PtDtext = "\t{\n\t\t"
- PtDtext = PtDtext + "type\tfixedValue;\n\t\t"
- PtDtext = PtDtext + "value\tuniform (0 0 0);\n"
- PtDtext = PtDtext + "\t}\n"
-
- else:
- PtDtext = "\t{\n\t\t"
- PtDtext = PtDtext + "type\tfixedValue;\n\t\t"
- PtDtext = PtDtext + "value\tuniform (0 0 0);\n"
- PtDtext = PtDtext + "\t}\n"
-
- return PtDtext
-
- #############################################################
- def getNormal(self,patchname):
- '''
- Get the normal to the patch
-
- Arguments
- -----------
- patchname: Name of the patch
-
- Variable
- -----------
- normal: Normal to the patch
- '''
-
- if (patchname == "Entry") or (patchname == "Exit"):
- normal = "1 0 0"
- elif (patchname == "Left") or (patchname == "Right"):
- normal = "0 1 0"
- elif (patchname == "Bottom") or (patchname == "Top"):
- normal = "0 0 1"
- elif (patchname == "Building") or (patchname == "OtherBuilding"):
- normal = "1 0 0"
-
- return normal
\ No newline at end of file
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ header = header + 'dimensions\t[0 1 0 0 0 0 0];\n\n' # noqa: PLR6104
+ header = header + 'internalField\tuniform (0 0 0);\n\n' # noqa: PLR6104
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def PtDpatchtext(self, data, Utype, patchname, fipath): # noqa: ARG002, N802, N803
+ """Creates the text the pointDisplacement boundary condition
+
+ Arguments:
+ ---------
+ data: All the json data
+ Utype: Type of velocity b.c
+ patchname: Name of the patch
+ fipath: Path to where dakota.json file exists
+
+ Variable
+ -----------
+ PtDtext: Text for the particular patch
+
+ """ # noqa: D400, D401
+ # Get the normal of the patch
+ normal = self.getNormal(patchname)
+
+ # For each patch / type provide the text
+ # Moving walls
+ if (int(Utype) == 103) or (int(Utype) == 104): # noqa: PLR2004
+ PtDtext = '\t{\n\t\t' # noqa: N806
+ PtDtext = PtDtext + 'type\twavemakerMovement;\n\t\t' # noqa: N806, PLR6104
+ PtDtext = PtDtext + 'wavemakerDictName\twavemakerMovementDict;\n\t\t' # noqa: N806, PLR6104
+ PtDtext = PtDtext + 'value\tuniform (0 0 0);\n' # noqa: N806, PLR6104
+ PtDtext = PtDtext + '\t}\n' # noqa: N806, PLR6104
+
+ elif int(Utype) > 300: # noqa: PLR2004
+ PtDtext = '\t{\n\t\t' # noqa: N806
+ PtDtext = PtDtext + 'type\tfixedNormalSlip;\n\t\t' # noqa: N806, PLR6104
+ PtDtext = PtDtext + 'n\t(' + normal + ');\n\t\t' # noqa: N806
+ PtDtext = PtDtext + 'value\tuniform (0 0 0);\n' # noqa: N806, PLR6104
+ PtDtext = PtDtext + '\t}\n' # noqa: N806, PLR6104
+
+ elif (int(Utype) > 200) and (int(Utype) < 300): # noqa: PLR2004
+ PtDtext = '\t{\n\t\t' # noqa: N806
+ PtDtext = PtDtext + 'type\tfixedValue;\n\t\t' # noqa: N806, PLR6104
+ PtDtext = PtDtext + 'value\tuniform (0 0 0);\n' # noqa: N806, PLR6104
+ PtDtext = PtDtext + '\t}\n' # noqa: N806, PLR6104
+
+ else:
+ PtDtext = '\t{\n\t\t' # noqa: N806
+ PtDtext = PtDtext + 'type\tfixedValue;\n\t\t' # noqa: N806, PLR6104
+ PtDtext = PtDtext + 'value\tuniform (0 0 0);\n' # noqa: N806, PLR6104
+ PtDtext = PtDtext + '\t}\n' # noqa: N806, PLR6104
+
+ return PtDtext
+
+ #############################################################
+ def getNormal(self, patchname): # noqa: N802, PLR6301
+ """Get the normal to the patch
+
+ Arguments:
+ ---------
+ patchname: Name of the patch
+
+ Variable
+ -----------
+ normal: Normal to the patch
+
+ """ # noqa: D400
+ if (patchname == 'Entry') or (patchname == 'Exit'): # noqa: PLR1714
+ normal = '1 0 0'
+ elif (patchname == 'Left') or (patchname == 'Right'): # noqa: PLR1714
+ normal = '0 1 0'
+ elif (patchname == 'Bottom') or (patchname == 'Top'): # noqa: PLR1714
+ normal = '0 0 1'
+ elif (patchname == 'Building') or (patchname == 'OtherBuilding'): # noqa: PLR1714
+ normal = '1 0 0'
+
+ return normal
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Solve.py b/modules/createEVENT/GeoClawOpenFOAM/of7Solve.py
index acbd76d95..02ca4fc12 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Solve.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Solve.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -36,398 +35,458 @@
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Solve():
- """
- This class includes the methods related to
- solver for openfoam7.
-
- Methods
- --------
- fvSchemetext: Get all the text for the fvSchemes
- """
-
- #############################################################
- def solverheader(self,fileobjec):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the solver-files
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Solve:
+ """This class includes the methods related to
+ solver for openfoam7.
+
+ Methods
+ -------
+ fvSchemetext: Get all the text for the fvSchemes
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def solverheader(self, fileobjec): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the solver-files
+ """ # noqa: D400, D401
+ header = (
+ """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
-{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\t"""+fileobjec+""";\n}
+{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"system";\n\tobject\t""" # noqa: W291
+ + fileobjec
+ + """;\n}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
-
- #############################################################
- def fvSchemetext(self,data):
- '''
- Creates the necessary text for fvSchemes for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Get the header text for the U-file
- fvSchemetext = self.solverheader("fvSchemes")
-
- # Add all other items
- # ddt
- fvSchemetext = fvSchemetext + 'ddtSchemes\n{\n\tdefault\tEuler;\n}\n\n'
-
- # grad
- fvSchemetext = fvSchemetext + 'gradSchemes\n{\n\tdefault\tGauss linear;\n}\n'
-
- # div
- fvSchemetext = fvSchemetext + '\ndivSchemes\n{\n\t'
- fvSchemetext = fvSchemetext + 'div(rhoPhi,U)\tGauss limitedLinearV 1;\n\t'
- fvSchemetext = fvSchemetext + 'div(U)\tGauss linear;\n\t'
- fvSchemetext = fvSchemetext + 'div((rhoPhi|interpolate(porosity)),U)\tGauss limitedLinearV 1;\n\t'
- fvSchemetext = fvSchemetext + 'div(rhoPhiPor,UPor)\tGauss limitedLinearV 1;\n\t'
- fvSchemetext = fvSchemetext + 'div(rhoPhi,UPor)\tGauss limitedLinearV 1;\n\t'
- fvSchemetext = fvSchemetext + 'div(rhoPhiPor,U)\tGauss limitedLinearV 1;\n\t'
- fvSchemetext = fvSchemetext + 'div(phi,alpha)\tGauss vanLeer;\n\t'
- fvSchemetext = fvSchemetext + 'div(phirb,alpha)\tGauss interfaceCompression;\n\t'
- fvSchemetext = fvSchemetext + 'div((muEff*dev(T(grad(U)))))\tGauss linear;\n\t'
- fvSchemetext = fvSchemetext + 'div(phi,k)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div(phi,epsilon)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi|interpolate(porosity)),k)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi*interpolate(rho)),k)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi|interpolate(porosity)),epsilon)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div(phi,omega)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi|interpolate(porosity)),omega)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi*interpolate(rho)),omega)\tGauss upwind;\n\t'
- fvSchemetext = fvSchemetext + 'div((phi*interpolate(rho)),epsilon)\tGauss upwind;\n'
- fvSchemetext = fvSchemetext + '}\n\n'
-
- # Laplacian
- fvSchemetext = fvSchemetext + 'laplacianSchemes\n{\n\tdefault\tGauss linear corrected;\n}\n\n'
-
- # interpolation
- fvSchemetext = fvSchemetext + 'interpolationSchemes\n{\n\tdefault\tlinear;\n}\n\n'
-
- # snGrad
- fvSchemetext = fvSchemetext + 'snGradSchemes\n{\n\tdefault\tcorrected;\n}\n\n'
-
- # flux
- fvSchemetext = fvSchemetext + 'fluxRequired\n{\n\t'
- fvSchemetext = fvSchemetext + 'default\tno;\n\t'
- fvSchemetext = fvSchemetext + 'p_rgh;\n\t'
- fvSchemetext = fvSchemetext + 'pcorr;\n\t'
- fvSchemetext = fvSchemetext + 'alpha.water;\n'
- fvSchemetext = fvSchemetext + '}\n'
-
- return fvSchemetext
-
- #############################################################
- def fvSolntext(self,data):
- '''
- Creates the necessary text for fvSolution for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the simulation type
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Get the turbulence model
- turb = ', '.join(hydroutil.extract_element_from_json(data, ["Events","TurbulenceModel"]))
-
- # Get the header text for the U-file
- fvSolntext = self.solverheader("fvSolution")
-
- # Other data
- fvSolntext = fvSolntext + 'solvers\n{\n\t'
-
- # solvers: alpha
- fvSolntext = fvSolntext + '"alpha.water.*"\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'nAlphaCorr\t1;\n\t\t'
- fvSolntext = fvSolntext + 'nAlphaSubCycles\t2;\n\t\t'
- fvSolntext = fvSolntext + 'alphaOuterCorrectors\tyes;\n\t\t'
- fvSolntext = fvSolntext + 'cAlpha\t1;\n\t\t'
- fvSolntext = fvSolntext + 'MULESCorr\tno;\n\t\t'
- fvSolntext = fvSolntext + 'nLimiterIter\t3;\n\t\t'
- fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t'
- fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-08;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t'
-
- # solvers: pcorr
- fvSolntext = fvSolntext + '"pcorr.*"\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'solver\tPCG;\n\t\t'
- fvSolntext = fvSolntext + 'preconditioner\tDIC;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-05;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t'
-
- # solvers: pcorrFinal
- fvSolntext = fvSolntext + 'pcorrFinal\n\t{\n\t\t'
- fvSolntext = fvSolntext + '$pcorr;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t'
-
- # solvers: p_rgh
- fvSolntext = fvSolntext + 'p_rgh\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'solver\tPCG;\n\t\t'
- fvSolntext = fvSolntext + 'preconditioner\tDIC;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-07;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0.05;\n\t}\n\n\t'
-
- # solvers: p_rghFinal
- fvSolntext = fvSolntext + 'p_rghFinal\n\t{\n\t\t'
- fvSolntext = fvSolntext + '$p_rgh;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t'
-
- # solvers: U
- fvSolntext = fvSolntext + 'U\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t'
- fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-06;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n'
-
- # Turbulece variables (if exist)
- if (int(turb) == 1) or (int(turb) == 2):
- fvSolntext = fvSolntext + '\n\t'
- fvSolntext = fvSolntext + '"(k|epsilon|omega|B|nuTilda).*"\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t'
- fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-08;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n'
-
- # solvers: cellDisplacement (for flume)
- if int(simtype) == 4:
- # solvers: cellDisplacement (for flume)
- fvSolntext = fvSolntext + '\n\t'
- fvSolntext = fvSolntext + 'cellDisplacement\n\t{\n\t\t'
- fvSolntext = fvSolntext + 'solver\tGAMG;\n\t\t'
- fvSolntext = fvSolntext + 'tolerance\t1e-05;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t\t'
- fvSolntext = fvSolntext + 'smoother\tGaussSeidel;\n\t\t'
- fvSolntext = fvSolntext + 'cacheAgglomeration\tfalse;\n\t\t'
- fvSolntext = fvSolntext + 'nCellsInCoarsestLevel\t10;\n\t\t'
- fvSolntext = fvSolntext + 'agglomerator\tfaceAreaPair;\n\t\t'
- fvSolntext = fvSolntext + 'mergeLevels\t1;\n\t}\n\n\t'
-
- # solvers: cellDisplacementFinal(for flume)
- fvSolntext = fvSolntext + 'cellDisplacementFinal\n\t{\n\t\t'
- fvSolntext = fvSolntext + '$cellDisplacement;\n\t\t'
- fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n'
-
- # Close solvers
- fvSolntext = fvSolntext + '}\n\n'
-
- # PIMPLE
- fvSolntext = fvSolntext + 'PIMPLE\n{\n\t'
- fvSolntext = fvSolntext + 'momentumPredictor\tno;\n\t'
- fvSolntext = fvSolntext + 'nOuterCorrectors\t1;\n\t'
- fvSolntext = fvSolntext + 'nCorrectors\t3;\n\t'
- fvSolntext = fvSolntext + 'nNonOrthogonalCorrectors\t0;\n}\n\n'
-
- # Relaxation factors
- fvSolntext = fvSolntext + 'relaxationFactors\n{\n\t'
- fvSolntext = fvSolntext + 'fields\n\t{\n\t}\n\t'
- fvSolntext = fvSolntext + 'equations\n\t{\n\t\t".*"\t1;\n\t}\n}'
-
- return fvSolntext
-
- #############################################################
- def cdicttext(self,data):
- '''
- Creates the necessary text for controlDict for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- cdicttext = self.solverheader("controlDict")
-
- # Get the simulation type: Solver
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
- if int(simtype) == 4:
- cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n'
- else:
- cdicttext = cdicttext + '\napplication \t olaFlow;\n\n'
-
- # Check restart situation and give start time
- restart = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Restart"]))
- if restart == "Yes":
- cdicttext = cdicttext + 'startFrom \t latestTime;\n\n'
- elif restart == "No":
- # Start time
- startT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","StartTime"]))
- cdicttext = cdicttext + 'startFrom \t startTime;\n\n'
- cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
-
- # End time
- endT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","EndTime"]))
- cdicttext = cdicttext + 'stopAt \t endTime;\n\n'
- cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
-
- # Time interval (modified file needs to be made later)
- cdicttext = cdicttext + 'deltaT \t 1;\n\n'
-
- # Write control
- cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n'
-
- # Write interval (modified file needs to be made later)
- cdicttext = cdicttext + 'writeInterval \t 1;\n\n'
-
- # All others
- cdicttext = cdicttext + 'purgeWrite \t 0;\n\n'
- cdicttext = cdicttext + 'writeFormat \t ascii;\n\n'
- cdicttext = cdicttext + 'writePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n'
- cdicttext = cdicttext + 'timeFormat \t general;\n\n'
- cdicttext = cdicttext + 'timePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n'
- cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n'
- cdicttext = cdicttext + 'maxCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n'
-
- return cdicttext
-
- #############################################################
- def cdictcheck(self,data):
- '''
- Creates the check for controlDict for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Start time
- startT = hydroutil.extract_element_from_json(data, ["Events","StartTime"])
- if startT == [None]:
- return -1
-
- # End time
- endT = hydroutil.extract_element_from_json(data, ["Events","EndTime"])
- if endT == [None]:
- return -1
-
- # deltaT
- deltaT = hydroutil.extract_element_from_json(data, ["Events","TimeInterval"])
- if deltaT == [None]:
- return -1
-
- # WriteT
- writeT = hydroutil.extract_element_from_json(data, ["Events","WriteInterval"])
- if writeT == [None]:
- return -1
-
- # Return 0 if all available
- return 0
-
- #############################################################
- def cdictFtext(self,data):
- '''
- Creates the necessary text for controlDict for openfoam7
- This is used for force computation with Dakota
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- cdicttext = self.solverheader("controlDict")
-
- # Get the simulation type: Solver
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
- if int(simtype) == 4:
- cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n'
- else:
- cdicttext = cdicttext + '\napplication \t olaFlow;\n\n'
-
- # Check restart situation and give start time
- restart = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Restart"]))
- if restart == "Yes":
- cdicttext = cdicttext + 'startFrom \t latestTime;\n\n'
- elif restart == "No":
- # Start time
- startT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","StartTime"]))
- cdicttext = cdicttext + 'startFrom \t startTime;\n\n'
- cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
-
- # End time
- endT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","EndTime"]))
- cdicttext = cdicttext + 'stopAt \t endTime;\n\n'
- cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
-
- # Time interval
- deltaT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","TimeInterval"]))
- cdicttext = cdicttext + 'deltaT \t' + deltaT + ';\n\n'
-
- # Write control
- cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n'
-
- # Write interval
- writeT = ', '.join(hydroutil.extract_element_from_json(data, ["Events","WriteInterval"]))
- cdicttext = cdicttext + 'writeInterval \t' + writeT + ';\n\n'
-
- # All others
- cdicttext = cdicttext + 'purgeWrite \t 0;\n\n'
- cdicttext = cdicttext + 'writeFormat \t ascii;\n\n'
- cdicttext = cdicttext + 'writePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n'
- cdicttext = cdicttext + 'timeFormat \t general;\n\n'
- cdicttext = cdicttext + 'timePrecision \t 6;\n\n'
- cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n'
- cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n'
- cdicttext = cdicttext + 'maxCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n'
- cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n'
-
- # Function for building
- cdicttext = cdicttext + 'functions\n{\n\t'
- cdicttext = cdicttext + 'buildingsForces\n\t{\n\t\t'
- cdicttext = cdicttext + 'type\tforces;\n\t\t'
- cdicttext = cdicttext + 'functionObjectLibs\t("libforces.so");\n\t\t'
- cdicttext = cdicttext + 'writeControl\ttimeStep;\n\t\t'
- cdicttext = cdicttext + 'writeInterval\t1;\n\t\t'
- cdicttext = cdicttext + 'patches\t("Building");\n\t\t' # This needs to be changed to Building
- cdicttext = cdicttext + 'rho\trhoInf;\n\t\t'
- cdicttext = cdicttext + 'log\ttrue;\n\t\t'
- cdicttext = cdicttext + 'rhoInf\t1;\n\t\t'
- cdicttext = cdicttext + 'CofR\t(0 0 0);\n\t\t'
-
- # Get the number of stories
- stories = hydroutil.extract_element_from_json(data, ["GeneralInformation","stories"])
-
- cdicttext = cdicttext + 'binData\n\t\t{\n\t\t\t'
- cdicttext = cdicttext + 'nBin\t'+str(stories[0])+';\n\t\t\t'
- cdicttext = cdicttext + 'direction\t(1 0 0);\n\t\t\t'
- cdicttext = cdicttext + 'cumulative\tno;\n\t\t}\n\t}\n}'
-
- return cdicttext
\ No newline at end of file
+ )
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def fvSchemetext(self, data): # noqa: ARG002, N802
+ """Creates the necessary text for fvSchemes for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Get the header text for the U-file
+ fvSchemetext = self.solverheader('fvSchemes') # noqa: N806
+
+ # Add all other items
+ # ddt
+ fvSchemetext = fvSchemetext + 'ddtSchemes\n{\n\tdefault\tEuler;\n}\n\n' # noqa: N806, PLR6104
+
+ # grad
+ fvSchemetext = fvSchemetext + 'gradSchemes\n{\n\tdefault\tGauss linear;\n}\n' # noqa: N806, PLR6104
+
+ # div
+ fvSchemetext = fvSchemetext + '\ndivSchemes\n{\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'div(rhoPhi,U)\tGauss limitedLinearV 1;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'div(U)\tGauss linear;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext
+ + 'div((rhoPhi|interpolate(porosity)),U)\tGauss limitedLinearV 1;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div(rhoPhiPor,UPor)\tGauss limitedLinearV 1;\n\t'
+ )
+ fvSchemetext = fvSchemetext + 'div(rhoPhi,UPor)\tGauss limitedLinearV 1;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'div(rhoPhiPor,U)\tGauss limitedLinearV 1;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'div(phi,alpha)\tGauss vanLeer;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div(phirb,alpha)\tGauss interfaceCompression;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div((muEff*dev(T(grad(U)))))\tGauss linear;\n\t'
+ )
+ fvSchemetext = fvSchemetext + 'div(phi,k)\tGauss upwind;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'div(phi,epsilon)\tGauss upwind;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div((phi|interpolate(porosity)),k)\tGauss upwind;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div((phi*interpolate(rho)),k)\tGauss upwind;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext
+ + 'div((phi|interpolate(porosity)),epsilon)\tGauss upwind;\n\t'
+ )
+ fvSchemetext = fvSchemetext + 'div(phi,omega)\tGauss upwind;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext
+ + 'div((phi|interpolate(porosity)),omega)\tGauss upwind;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div((phi*interpolate(rho)),omega)\tGauss upwind;\n\t'
+ )
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'div((phi*interpolate(rho)),epsilon)\tGauss upwind;\n'
+ )
+ fvSchemetext = fvSchemetext + '}\n\n' # noqa: N806, PLR6104
+
+ # Laplacian
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext
+ + 'laplacianSchemes\n{\n\tdefault\tGauss linear corrected;\n}\n\n'
+ )
+
+ # interpolation
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'interpolationSchemes\n{\n\tdefault\tlinear;\n}\n\n'
+ )
+
+ # snGrad
+ fvSchemetext = ( # noqa: N806, PLR6104
+ fvSchemetext + 'snGradSchemes\n{\n\tdefault\tcorrected;\n}\n\n'
+ )
+
+ # flux
+ fvSchemetext = fvSchemetext + 'fluxRequired\n{\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'default\tno;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'p_rgh;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'pcorr;\n\t' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + 'alpha.water;\n' # noqa: N806, PLR6104
+ fvSchemetext = fvSchemetext + '}\n' # noqa: N806, PLR6104
+
+ return fvSchemetext # noqa: RET504
+
+ #############################################################
+ def fvSolntext(self, data): # noqa: N802
+ """Creates the necessary text for fvSolution for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the simulation type
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Get the turbulence model
+ turb = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'TurbulenceModel'])
+ )
+
+ # Get the header text for the U-file
+ fvSolntext = self.solverheader('fvSolution') # noqa: N806
+
+ # Other data
+ fvSolntext = fvSolntext + 'solvers\n{\n\t' # noqa: N806, PLR6104
+
+ # solvers: alpha
+ fvSolntext = fvSolntext + '"alpha.water.*"\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nAlphaCorr\t1;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nAlphaSubCycles\t2;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'alphaOuterCorrectors\tyes;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'cAlpha\t1;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'MULESCorr\tno;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nLimiterIter\t3;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-08;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: pcorr
+ fvSolntext = fvSolntext + '"pcorr.*"\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tPCG;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'preconditioner\tDIC;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-05;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: pcorrFinal
+ fvSolntext = fvSolntext + 'pcorrFinal\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + '$pcorr;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: p_rgh
+ fvSolntext = fvSolntext + 'p_rgh\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tPCG;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'preconditioner\tDIC;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-07;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0.05;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: p_rghFinal
+ fvSolntext = fvSolntext + 'p_rghFinal\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + '$p_rgh;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: U
+ fvSolntext = fvSolntext + 'U\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-06;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n' # noqa: N806, PLR6104
+
+ # Turbulece variables (if exist)
+ if (int(turb) == 1) or (int(turb) == 2): # noqa: PLR2004
+ fvSolntext = fvSolntext + '\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + '"(k|epsilon|omega|B|nuTilda).*"\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tsmoothSolver;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'smoother\tsymGaussSeidel;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-08;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n' # noqa: N806, PLR6104
+
+ # solvers: cellDisplacement (for flume)
+ if int(simtype) == 4: # noqa: PLR2004
+ # solvers: cellDisplacement (for flume)
+ fvSolntext = fvSolntext + '\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'cellDisplacement\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'solver\tGAMG;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'tolerance\t1e-05;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'smoother\tGaussSeidel;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'cacheAgglomeration\tfalse;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nCellsInCoarsestLevel\t10;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'agglomerator\tfaceAreaPair;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'mergeLevels\t1;\n\t}\n\n\t' # noqa: N806, PLR6104
+
+ # solvers: cellDisplacementFinal(for flume)
+ fvSolntext = fvSolntext + 'cellDisplacementFinal\n\t{\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + '$cellDisplacement;\n\t\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'relTol\t0;\n\t}\n' # noqa: N806, PLR6104
+
+ # Close solvers
+ fvSolntext = fvSolntext + '}\n\n' # noqa: N806, PLR6104
+
+ # PIMPLE
+ fvSolntext = fvSolntext + 'PIMPLE\n{\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'momentumPredictor\tno;\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nOuterCorrectors\t1;\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nCorrectors\t3;\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'nNonOrthogonalCorrectors\t0;\n}\n\n' # noqa: N806, PLR6104
+
+ # Relaxation factors
+ fvSolntext = fvSolntext + 'relaxationFactors\n{\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'fields\n\t{\n\t}\n\t' # noqa: N806, PLR6104
+ fvSolntext = fvSolntext + 'equations\n\t{\n\t\t".*"\t1;\n\t}\n}' # noqa: N806, PLR6104
+
+ return fvSolntext # noqa: RET504
+
+ #############################################################
+ def cdicttext(self, data):
+ """Creates the necessary text for controlDict for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ cdicttext = self.solverheader('controlDict')
+
+ # Get the simulation type: Solver
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+ if int(simtype) == 4: # noqa: PLR2004
+ cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n' # noqa: PLR6104
+ else:
+ cdicttext = cdicttext + '\napplication \t olaFlow;\n\n' # noqa: PLR6104
+
+ # Check restart situation and give start time
+ restart = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'Restart'])
+ )
+ if restart == 'Yes':
+ cdicttext = cdicttext + 'startFrom \t latestTime;\n\n' # noqa: PLR6104
+ elif restart == 'No':
+ # Start time
+ startT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'StartTime'])
+ )
+ cdicttext = cdicttext + 'startFrom \t startTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
+
+ # End time
+ endT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'EndTime'])
+ )
+ cdicttext = cdicttext + 'stopAt \t endTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
+
+ # Time interval (modified file needs to be made later)
+ cdicttext = cdicttext + 'deltaT \t 1;\n\n' # noqa: PLR6104
+
+ # Write control
+ cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n' # noqa: PLR6104
+
+ # Write interval (modified file needs to be made later)
+ cdicttext = cdicttext + 'writeInterval \t 1;\n\n' # noqa: PLR6104
+
+ # All others
+ cdicttext = cdicttext + 'purgeWrite \t 0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeFormat \t ascii;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timeFormat \t general;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n' # noqa: PLR6104
+
+ return cdicttext # noqa: RET504
+
+ #############################################################
+ def cdictcheck(self, data): # noqa: PLR6301
+ """Creates the check for controlDict for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Start time
+ startT = hydroutil.extract_element_from_json(data, ['Events', 'StartTime']) # noqa: N806
+ if startT == [None]:
+ return -1
+
+ # End time
+ endT = hydroutil.extract_element_from_json(data, ['Events', 'EndTime']) # noqa: N806
+ if endT == [None]:
+ return -1
+
+ # deltaT
+ deltaT = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'TimeInterval']
+ )
+ if deltaT == [None]:
+ return -1
+
+ # WriteT
+ writeT = hydroutil.extract_element_from_json( # noqa: N806
+ data, ['Events', 'WriteInterval']
+ )
+ if writeT == [None]:
+ return -1
+
+ # Return 0 if all available
+ return 0
+
+ #############################################################
+ def cdictFtext(self, data): # noqa: N802
+ """Creates the necessary text for controlDict for openfoam7
+ This is used for force computation with Dakota
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D205, D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ cdicttext = self.solverheader('controlDict')
+
+ # Get the simulation type: Solver
+ simtype = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+ if int(simtype) == 4: # noqa: PLR2004
+ cdicttext = cdicttext + '\napplication \t olaDyMFlow;\n\n' # noqa: PLR6104
+ else:
+ cdicttext = cdicttext + '\napplication \t olaFlow;\n\n' # noqa: PLR6104
+
+ # Check restart situation and give start time
+ restart = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'Restart'])
+ )
+ if restart == 'Yes':
+ cdicttext = cdicttext + 'startFrom \t latestTime;\n\n' # noqa: PLR6104
+ elif restart == 'No':
+ # Start time
+ startT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'StartTime'])
+ )
+ cdicttext = cdicttext + 'startFrom \t startTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'startTime \t' + startT + ';\n\n'
+
+ # End time
+ endT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'EndTime'])
+ )
+ cdicttext = cdicttext + 'stopAt \t endTime;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'endTime \t' + endT + ';\n\n'
+
+ # Time interval
+ deltaT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'TimeInterval'])
+ )
+ cdicttext = cdicttext + 'deltaT \t' + deltaT + ';\n\n'
+
+ # Write control
+ cdicttext = cdicttext + 'writeControl \t adjustableRunTime;\n\n' # noqa: PLR6104
+
+ # Write interval
+ writeT = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(data, ['Events', 'WriteInterval'])
+ )
+ cdicttext = cdicttext + 'writeInterval \t' + writeT + ';\n\n'
+
+ # All others
+ cdicttext = cdicttext + 'purgeWrite \t 0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeFormat \t ascii;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeCompression \t uncompressed;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timeFormat \t general;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'timePrecision \t 6;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'runTimeModifiable \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'adjustTimeStep \t yes;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxAlphaCo \t 1.0;\n\n' # noqa: PLR6104
+ cdicttext = cdicttext + 'maxDeltaT \t 1;\n\n' # noqa: PLR6104
+
+ # Function for building
+ cdicttext = cdicttext + 'functions\n{\n\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'buildingsForces\n\t{\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'type\tforces;\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'functionObjectLibs\t("libforces.so");\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeControl\ttimeStep;\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'writeInterval\t1;\n\t\t' # noqa: PLR6104
+ cdicttext = ( # noqa: PLR6104
+ cdicttext + 'patches\t("Building");\n\t\t'
+ ) # This needs to be changed to Building
+ cdicttext = cdicttext + 'rho\trhoInf;\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'log\ttrue;\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'rhoInf\t1;\n\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'CofR\t(0 0 0);\n\t\t' # noqa: PLR6104
+
+ # Get the number of stories
+ stories = hydroutil.extract_element_from_json(
+ data, ['GeneralInformation', 'stories']
+ )
+
+ cdicttext = cdicttext + 'binData\n\t\t{\n\t\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'nBin\t' + str(stories[0]) + ';\n\t\t\t'
+ cdicttext = cdicttext + 'direction\t(1 0 0);\n\t\t\t' # noqa: PLR6104
+ cdicttext = cdicttext + 'cumulative\tno;\n\t\t}\n\t}\n}' # noqa: PLR6104
+
+ return cdicttext # noqa: RET504
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Turbulence.py b/modules/createEVENT/GeoClawOpenFOAM/of7Turbulence.py
index c7bd3f272..d3e42227f 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Turbulence.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Turbulence.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,80 +32,78 @@
####################################################################
# Standard python modules
-
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Turbulence():
- """
- This class includes the methods related to
- turbulence for openfoam7.
-
- Methods
- --------
- decomptext: Get all the text for the decomposeParDict
- """
-
- #############################################################
- def turbtext(self,data):
- '''
- Creates the necessary files for turbulenceDict for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the header text for the U-file
- turbtext = self.turbheader()
-
- # Get the type of turbulence model
- turbmodel = ', '.join(hydroutil.extract_element_from_json(data, ["Events","TurbulenceModel"]))
-
- if int(turbmodel) == 0:
- turbtext = turbtext + '\nsimulationType\tlaminar;\n'
- elif int(turbmodel) == 1:
- turbtext = turbtext + 'simulationType\tRAS;\n\n'
- turbtext = turbtext + 'RAS\n{\n'
- turbtext = turbtext + '\tRASModel\tkEpsilon;\n'
- turbtext = turbtext + '\tturbulence\ton;\n'
- turbtext = turbtext + '\tprintCoeffs\ton;\n}\n'
- elif int(turbmodel) == 2:
- turbtext = turbtext + 'simulationType\tRAS;\n\n'
- turbtext = turbtext + 'RAS\n{\n'
- turbtext = turbtext + '\tRASModel\tkOmegaSST;\n'
- turbtext = turbtext + '\tturbulence\ton;\n'
- turbtext = turbtext + '\tprintCoeffs\ton;\n}\n'
-
- return turbtext
-
- #############################################################
- def turbheader(self):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the turbulence properties-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Turbulence:
+ """This class includes the methods related to
+ turbulence for openfoam7.
+
+ Methods
+ -------
+ decomptext: Get all the text for the decomposeParDict
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def turbtext(self, data):
+ """Creates the necessary files for turbulenceDict for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the header text for the U-file
+ turbtext = self.turbheader()
+
+ # Get the type of turbulence model
+ turbmodel = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'TurbulenceModel'])
+ )
+
+ if int(turbmodel) == 0:
+ turbtext = turbtext + '\nsimulationType\tlaminar;\n' # noqa: PLR6104
+ elif int(turbmodel) == 1:
+ turbtext = turbtext + 'simulationType\tRAS;\n\n' # noqa: PLR6104
+ turbtext = turbtext + 'RAS\n{\n' # noqa: PLR6104
+ turbtext = turbtext + '\tRASModel\tkEpsilon;\n' # noqa: PLR6104
+ turbtext = turbtext + '\tturbulence\ton;\n' # noqa: PLR6104
+ turbtext = turbtext + '\tprintCoeffs\ton;\n}\n' # noqa: PLR6104
+ elif int(turbmodel) == 2: # noqa: PLR2004
+ turbtext = turbtext + 'simulationType\tRAS;\n\n' # noqa: PLR6104
+ turbtext = turbtext + 'RAS\n{\n' # noqa: PLR6104
+ turbtext = turbtext + '\tRASModel\tkOmegaSST;\n' # noqa: PLR6104
+ turbtext = turbtext + '\tturbulence\ton;\n' # noqa: PLR6104
+ turbtext = turbtext + '\tprintCoeffs\ton;\n}\n' # noqa: PLR6104
+
+ return turbtext
+
+ #############################################################
+ def turbheader(self): # noqa: PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the turbulence properties-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tdictionary;\n\tlocation\t"constant";\n\tobject\tturbulenceProperties;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- # Return the header for U file
- return header
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+ # Return the header for U file
+ return header # noqa: RET504
diff --git a/modules/createEVENT/GeoClawOpenFOAM/of7Uboundary.py b/modules/createEVENT/GeoClawOpenFOAM/of7Uboundary.py
index ee8fe7de2..d2f95e84d 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/of7Uboundary.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/of7Uboundary.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -37,518 +36,606 @@
# Other custom modules
from hydroUtils import hydroUtils
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class of7Uboundary():
- """
- This class includes the methods related to
- velocity boundary conditions for openfoam7.
-
- Methods
- --------
- Utext: Get s all the text for the U-file
- """
-
- #############################################################
- def Utext(self,data,fipath,patches):
- '''
- Creates the necessary folders for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- patches: List of boundary patches
- fipath: Path where the dakota.json file exists
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Number of moving walls
- numMovWall = 0
-
- # Get the header text for the U-file
- utext = self.Uheader()
-
- # Start the outside
- utext = utext + "boundaryField\n{\n"
-
- # Loop over all patches
- for patchname in patches:
- utext = utext + "\t" + patchname + "\n"
- patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- if patch == [None]:
- Utype = -1
- else:
- Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
- if int(Utype) == 103 or int(Utype) == 104:
- numMovWall += 1
- utext = utext + self.Upatchtext(data,Utype,patchname,fipath,numMovWall)
-
- # Check for building and other building
- utext = utext + '\tBuilding\n'
- utext = utext + self.Upatchtext(data,'301','Building',fipath,numMovWall)
- utext = utext + '\tOtherBuilding\n'
- utext = utext + self.Upatchtext(data,'301','OtherBuilding',fipath,numMovWall)
-
- # Close the outside
- utext = utext + "}\n\n"
-
- # Return the text for velocity BC
- return utext
-
- #############################################################
- def Uheader(self):
- '''
- Creates the text for the header
-
- Variable
- -----------
- header: Header for the U-file
- '''
-
- header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\
+class of7Uboundary:
+ """This class includes the methods related to
+ velocity boundary conditions for openfoam7.
+
+ Methods
+ -------
+ Utext: Get s all the text for the U-file
+
+ """ # noqa: D205, D404
+
+ #############################################################
+ def Utext(self, data, fipath, patches): # noqa: N802
+ """Creates the necessary folders for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ patches: List of boundary patches
+ fipath: Path where the dakota.json file exists
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Number of moving walls
+ numMovWall = 0 # noqa: N806
+
+ # Get the header text for the U-file
+ utext = self.Uheader()
+
+ # Start the outside
+ utext = utext + 'boundaryField\n{\n' # noqa: PLR6104
+
+ # Loop over all patches
+ for patchname in patches:
+ utext = utext + '\t' + patchname + '\n'
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ if patch == [None]:
+ Utype = -1 # noqa: N806
+ else:
+ Utype = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ )
+ if int(Utype) == 103 or int(Utype) == 104: # noqa: PLR2004
+ numMovWall += 1 # noqa: N806
+ utext = utext + self.Upatchtext( # noqa: PLR6104
+ data, Utype, patchname, fipath, numMovWall
+ )
+
+ # Check for building and other building
+ utext = utext + '\tBuilding\n' # noqa: PLR6104
+ utext = utext + self.Upatchtext(data, '301', 'Building', fipath, numMovWall) # noqa: PLR6104
+ utext = utext + '\tOtherBuilding\n' # noqa: PLR6104
+ utext = utext + self.Upatchtext( # noqa: PLR6104
+ data, '301', 'OtherBuilding', fipath, numMovWall
+ )
+
+ # Close the outside
+ utext = utext + '}\n\n' # noqa: PLR6104
+
+ # Return the text for velocity BC
+ return utext # noqa: RET504
+
+ #############################################################
+ def Uheader(self): # noqa: N802, PLR6301
+ """Creates the text for the header
+
+ Variable
+ -----------
+ header: Header for the U-file
+ """ # noqa: D400, D401
+ header = """/*--------------------------*- NHERI SimCenter -*----------------------------*\\
| | H |
| | Y | HydroUQ: Water-based Natural Hazards Modeling Application
|======| D | Website: simcenter.designsafe-ci.org/research-tools/hydro-uq
| | R | Version: 1.00
| | O |
-\*---------------------------------------------------------------------------*/
+\\*---------------------------------------------------------------------------*/
FoamFile
{\n\tversion\t2.0;\n\tformat\tascii;\n\tclass\tvolVectorField;\n\tlocation\t"0";\n\tobject\tU;\n}
-// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n"""
-
- header = header + "dimensions\t[0 1 -1 0 0 0 0];\n\n"
- header = header + "internalField\tuniform (0 0 0);\n\n"
-
- # Return the header for U file
- return header
-
- #############################################################
- def Upatchtext(self,data,Utype,patchname,fipath,numMovWall):
- '''
- Creates the text the velocity boundary condition
-
- Arguments
- -----------
- data: All the json data
- Utype: Type of velocity b.c
- patchname: Name of the patch
- fipath: Path to where dakota.json file exists
-
- Variable
- -----------
- Utext: Text for the particular patch
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Inlet types
- # For each type, get the text
- if int(Utype) == 101:
- # SW solutions (1)
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\ttimeVaryingMappedFixedValue;\n\t\t"
- Utext = Utext + "offset\t(0 0 0);\n\t\t"
- Utext = Utext + "setAverage\toff;\n"
- Utext = Utext + "\t}\n"
-
- elif int(Utype) == 102:
- # Inlet: constant velocity
- # Get the velocity values
- velo = hydroutil.extract_element_from_json(data, ["Events","Velocity_"+patchname])
- if velo == [None]:
- vx = 0.0
- vy = 0.0
- vz = 0.0
- else:
- velvals = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Velocity_"+patchname]))
- velvals = velvals.replace(',',' ')
- vels = [float(vi) for vi in velvals.split()]
- vx = vels[0]
- vy = vels[1]
- vz = vels[2]
-
- # Get the text
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tfixedValue;\n\t\t"
- Utext = Utext + "value\t(" + str(vx) +"\t"+ str(vy) +"\t"+ str(vz) +");\n\t}\n"
-
- elif int(Utype) == 103:
- # Inlet Moving wall (OSU flume)
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tmovingWallVelocity;\n\t\t"
- Utext = Utext + "value\tuniform (0 0 0);\n\t}\n"
- # Create the files required
- # Moving wall file
- # Get the displacement and waterheight file name
- dispfilename = hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallDisp_"+patchname])
- if dispfilename != [None]:
- dispfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallDisp_"+patchname]))
- dispfilepath = os.path.join(fipath,dispfilename)
- if os.path.exists(dispfilepath):
- heightfilename = hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallHeight_"+patchname])
- if heightfilename != [None]:
- heightfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallHeight_"+patchname]))
- heightfilepath = os.path.join(fipath,heightfilename)
- if not os.path.exists(heightfilepath):
- heightfilepath = "None"
- else:
- heightfilepath = "None"
- # Wave maker text file
- self.OSUwavemakerText(fipath,dispfilepath,heightfilepath,numMovWall)
- # Wavemakermovement dictionary
- self.of7wavemakerdict(fipath)
- # Dynamic mesh dictionary
- self.of7dynamicMeshdict(fipath)
-
- elif int(Utype) == 104:
- # Inlet Moving wall (Gen flume)
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tmovingWallVelocity;\n\t\t"
- Utext = Utext + "value\tuniform (0 0 0);\n\t}\n"
- # Create the files required
- # Moving wall file
- # Get the displacement and waterheight file name
- # # Get the displacement and waterheight file name
- dispfilename = hydroutil.extract_element_from_json(data, ["Events","MovingWallDisp_"+patchname])
- if dispfilename != [None]:
- dispfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MovingWallDisp_"+patchname]))
- dispfilepath = os.path.join(fipath,dispfilename)
- if os.path.exists(dispfilepath):
- heightfilename = hydroutil.extract_element_from_json(data, ["Events","MovingWallHeight_"+patchname])
- if heightfilename != [None]:
- heightfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MovingWallHeight_"+patchname]))
- heightfilepath = os.path.join(fipath,heightfilename)
- if not os.path.exists(heightfilepath):
- heightfilepath = "None"
- else:
- heightfilepath = "None"
- # Wave maker text file
- self.GenwavemakerText(fipath,dispfilepath,heightfilepath,numMovWall)
- # Wavemakermovement dictionary
- self.of7wavemakerdict(fipath)
- # Dynamic mesh dictionary
- self.of7dynamicMeshdict(fipath)
-
- elif int(Utype) == 201:
- # Outlet zero gradient
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tzeroGradient;\n\t}\n"
-
- elif int(Utype) == 202:
- # Outlet: inletOutlet
- # Get the velocity values
- velo = hydroutil.extract_element_from_json(data, ["Events","Velocity_"+patchname])
- if velo == [None]:
- vx = 0.0
- vy = 0.0
- vz = 0.0
- else:
- velvals = ', '.join(hydroutil.extract_element_from_json(data, ["Events","Velocity_"+patchname]))
- velvals = velvals.replace(',',' ')
- vels = [float(vi) for vi in velvals.split()]
- vx = vels[0]
- vy = vels[1]
- vz = vels[2]
-
- # Get the text
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tinletOutlet;\n\t\t"
- Utext = Utext + "inletValue\tuniform (" + str(vx) +"\t"+ str(vy) +"\t"+ str(vz) +");\n\t\t"
- Utext = Utext + "value\tuniform (" + str(vx) +"\t"+ str(vy) +"\t"+ str(vz) +");\n"
- Utext = Utext + "\t}\n"
-
- elif int(Utype) == 301:
- # Wall: noSlip
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tnoSlip;\n\t}\n"
-
- else:
- # Default: Empty
- Utext = "\t{\n\t\t"
- Utext = Utext + "type\tempty;\n\t}\n"
-
- # Return the header for U file
- return Utext
-
- #############################################################
- def Uchecks(self,data,fipath,patches):
- '''
- Creates the data files required for the OSU moving wall
-
- Arguments
- -----------
- data: All the data from JSON file
- fipath: Path to the dakota.json file location
- patches: List of patches
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Number of moving walls
- numMovWall = 0
-
- # Loop over all patches
- for patchname in patches:
- # Get the type of velocity bc
- patch = hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname])
- if patch == [None]:
- Utype = -1
- else:
- Utype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","VelocityType_" + patchname]))
-
- # Checks for different U-types
- if int(Utype) == 103:
- # Checking for multiple moving walls
- numMovWall += 1
- if numMovWall > 1:
- return -1
-
- # Check for existing moving wall files
- dispfilename = hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallDisp_" + patchname])
- if dispfilename == [None]:
- return -1
- else:
- dispfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","OSUMovingWallDisp_" + patchname]))
- pathF = os.path.join(fipath,dispfilename)
- if not os.path.exists(pathF):
- return -1
-
- elif int(Utype) == 104:
- # Checking for multiple moving walls
- numMovWall += 1
- if numMovWall > 1:
- return -1
-
- # Check for existing moving wall files
- dispfilename = hydroutil.extract_element_from_json(data, ["Events","MovingWallDisp_" + patchname])
- if dispfilename == [None]:
- return -1
- else:
- dispfilename = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MovingWallDisp_" + patchname]))
- pathF = os.path.join(fipath,dispfilename)
- if not os.path.exists(pathF):
- return -1
-
- # If all checks passes
- return 0
-
- #############################################################
- def of7wavemakerdict(self,fipath):
- '''
- Creates the wavemaker dictionary for the moving wall
-
- Arguments
- -----------
- fipath: Path to the dakota.json file location
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the file ID
- filepath = os.path.join(fipath,"constant","wavemakerMovementDict")
- fileID = open(filepath, "w")
- # Header
- header = hydroutil.of7header("dictionary","constant","wavemakerMovementDict")
- fileID.write(header)
- # Other data
- fileID.write('\nreread\tfalse;\n\n')
- fileID.write('#include\t"wavemakerMovement.txt"\n')
- # Close the file
- fileID.close()
-
- #############################################################
- def of7dynamicMeshdict(self,fipath):
- '''
- Creates the dynamic mesh dictionary for the moving wall
-
- Arguments
- -----------
- fipath: Path to the dakota.json file location
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get the file ID
- filepath = os.path.join(fipath,"constant","dynamicMeshDict")
- fileID = open(filepath, "w")
- # Header
- header = hydroutil.of7header("dictionary","constant","dynamicMeshDict")
- fileID.write(header)
- # Other data
- fileID.write('\ndynamicFvMesh\tdynamicMotionSolverFvMesh;\n\n')
- fileID.write('motionSolverLibs\t("libfvMotionSolvers.so");\n\n')
- fileID.write('solver\tdisplacementLaplacian;\n\n')
- fileID.write('displacementLaplacianCoeffs\n{\n\tdiffusivity uniform;\n}\n');
- # Close the file
- fileID.close()
-
- #############################################################
- def OSUwavemakerText(self,fipath,dispfilepath,heightfilepath,numMovWall):
- '''
- Creates the wavemaker text file for the OSU moving wall
-
- Arguments
- -----------
- fipath: Path to the dakota.json file location
- '''
-
- # Get the file ID
- filepath = os.path.join(fipath,"constant","wavemakerMovement.txt")
- fileID = open(filepath, "w")
-
- # Start writing the file
- fileID.write('wavemakerType\tPiston;\n')
- fileID.write('tSmooth\t1.5;\n')
- fileID.write('genAbs\t0;\n\n')
-
- # Create the wavemaker movement file
- # Get the frequency of the wavemaker
- frequency = 0
- waterdepth = 0
- filewm = open(dispfilepath,'r')
- Lines = filewm.readlines()
- count = 0
- for line in Lines:
- count += 1
- if count == 37:
- stra=line.replace('% SampleRate: ','')
- stra2=stra.replace(' Hz','')
- frequency = 1/float(stra2)
- break
- count = 0
- for line in Lines:
- count += 1
- if count == 61:
- stra=line.replace('% StillWaterDepth: ','')
- waterdepth = float(stra)
- break
-
- # Count the number of lines
- countlines = 0
- with open(dispfilepath) as fdisp:
- for line2 in fdisp:
- if line2.strip():
- countlines += 1
- countlines = countlines - 72
-
- # Create the timeseries
- time = 0
- fileID.write('timeSeries\t'+str(countlines)+'(\n')
- for ii in range(countlines):
- fileID.write(str(time)+'\n')
- time = time + frequency
- fileID.write(');\n\n')
-
- # Create the paddle position
- fileID.write('paddlePosition 1(\n'+str(countlines)+'(\n')
- count = 0
- for line in Lines:
- count += 1
- if count > 72:
- if line != "\n":
- data = float(line)
- fileID.write(str(data)+'\n')
- fileID.write(')\n);\n\n')
-
- # Write the paddle Eta
- if heightfilepath != "None":
- # Write the paddle Eta
- fileID.write('paddleEta 1(\n'+str(countlines)+'(\n')
- filewmg = open(heightfilepath,'r')
- Lines2 = filewmg.readlines()
- count = 0
- for line in Lines2:
- count += 1
- if count > 72:
- if line != "\n":
- data = float(line)+waterdepth
- fileID.write(str(data)+'\n')
- fileID.write(')\n);')
-
-
- #############################################################
- def GenwavemakerText(self,fipath,dispfilepath,heightfilepath,numMovWall):
- '''
- Creates the wavemaker text file for a general moving wall
-
- Arguments
- -----------
- fipath: Path to the dakota.json file location
- '''
-
- # Get the file ID
- filepath = os.path.join(fipath,"constant","wavemakerMovement.txt")
- fileID = open(filepath, "w")
-
- # Start writing the file
- fileID.write('wavemakerType\tPiston;\n')
- fileID.write('tSmooth\t1.5;\n')
- fileID.write('genAbs\t0;\n\n')
-
- # Create the wavemaker movement file
- # Get the frequency of the wavemaker
- filewm = open(dispfilepath,'r')
- Lines = filewm.readlines()
- count = 0
- for line in Lines:
- count += 1
- if count == 1:
- frequency = float(line)
- break
-
- # Count the number of lines
- countlines = 0
- with open(dispfilepath) as fdisp:
- for line2 in fdisp:
- if line2.strip():
- countlines += 1
- countlines = countlines - 1
-
- # Create the timeseries
- time = 0
- fileID.write('timeSeries\t'+str(countlines)+'(\n')
- for ii in range(countlines):
- fileID.write(str(time)+'\n')
- time = time + frequency
- fileID.write(');\n\n')
-
- # Create the paddle position
- fileID.write('paddlePosition 1(\n'+str(countlines)+'(\n')
- count = 0
- for line in Lines:
- count += 1
- if count > 1:
- if line != "\n":
- data = float(line)
- fileID.write(str(data)+'\n')
- fileID.write(')\n);\n\n')
-
- # Get the water depth and paddle eta
- if heightfilepath != "None":
- # Get the height
- filewmg = open(heightfilepath,'r')
- Lines2 = filewmg.readlines()
- count = 0
- for line in Lines2:
- count += 1
- if count == 1:
- waterdepth = float(line)
- break
-
- # Get the paddle eta
- fileID.write('paddleEta 1(\n'+str(countlines)+'(\n')
- count = 0
- for line in Lines2:
- count += 1
- if count > 1:
- if line != "\n":
- data = float(line)+waterdepth
- fileID.write(str(data)+'\n')
- fileID.write(')\n);')
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n""" # noqa: W291
+
+ header = header + 'dimensions\t[0 1 -1 0 0 0 0];\n\n' # noqa: PLR6104
+ header = header + 'internalField\tuniform (0 0 0);\n\n' # noqa: PLR6104
+
+ # Return the header for U file
+ return header # noqa: RET504
+
+ #############################################################
+ def Upatchtext(self, data, Utype, patchname, fipath, numMovWall): # noqa: C901, N802, N803
+ """Creates the text the velocity boundary condition
+
+ Arguments:
+ ---------
+ data: All the json data
+ Utype: Type of velocity b.c
+ patchname: Name of the patch
+ fipath: Path to where dakota.json file exists
+
+ Variable
+ -----------
+ Utext: Text for the particular patch
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Inlet types
+ # For each type, get the text
+ if int(Utype) == 101: # noqa: PLR2004
+ # SW solutions (1)
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\ttimeVaryingMappedFixedValue;\n\t\t' # noqa: N806, PLR6104
+ Utext = Utext + 'offset\t(0 0 0);\n\t\t' # noqa: N806, PLR6104
+ Utext = Utext + 'setAverage\toff;\n' # noqa: N806, PLR6104
+ Utext = Utext + '\t}\n' # noqa: N806, PLR6104
+
+ elif int(Utype) == 102: # noqa: PLR2004
+ # Inlet: constant velocity
+ # Get the velocity values
+ velo = hydroutil.extract_element_from_json(
+ data, ['Events', 'Velocity_' + patchname]
+ )
+ if velo == [None]:
+ vx = 0.0
+ vy = 0.0
+ vz = 0.0
+ else:
+ velvals = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'Velocity_' + patchname]
+ )
+ )
+ velvals = velvals.replace(',', ' ')
+ vels = [float(vi) for vi in velvals.split()]
+ vx = vels[0]
+ vy = vels[1]
+ vz = vels[2]
+
+ # Get the text
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tfixedValue;\n\t\t' # noqa: N806, PLR6104
+ Utext = ( # noqa: N806
+ Utext
+ + 'value\t('
+ + str(vx)
+ + '\t'
+ + str(vy)
+ + '\t'
+ + str(vz)
+ + ');\n\t}\n'
+ )
+
+ elif int(Utype) == 103: # noqa: PLR2004
+ # Inlet Moving wall (OSU flume)
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tmovingWallVelocity;\n\t\t' # noqa: N806, PLR6104
+ Utext = Utext + 'value\tuniform (0 0 0);\n\t}\n' # noqa: N806, PLR6104
+ # Create the files required
+ # Moving wall file
+ # Get the displacement and waterheight file name
+ dispfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallDisp_' + patchname]
+ )
+ if dispfilename != [None]:
+ dispfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallDisp_' + patchname]
+ )
+ )
+ dispfilepath = os.path.join(fipath, dispfilename) # noqa: PTH118
+ if os.path.exists(dispfilepath): # noqa: PTH110
+ heightfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallHeight_' + patchname]
+ )
+ if heightfilename != [None]:
+ heightfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallHeight_' + patchname]
+ )
+ )
+ heightfilepath = os.path.join(fipath, heightfilename) # noqa: PTH118
+ if not os.path.exists(heightfilepath): # noqa: PTH110
+ heightfilepath = 'None'
+ else:
+ heightfilepath = 'None'
+ # Wave maker text file
+ self.OSUwavemakerText(
+ fipath, dispfilepath, heightfilepath, numMovWall
+ )
+ # Wavemakermovement dictionary
+ self.of7wavemakerdict(fipath)
+ # Dynamic mesh dictionary
+ self.of7dynamicMeshdict(fipath)
+
+ elif int(Utype) == 104: # noqa: PLR2004
+ # Inlet Moving wall (Gen flume)
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tmovingWallVelocity;\n\t\t' # noqa: N806, PLR6104
+ Utext = Utext + 'value\tuniform (0 0 0);\n\t}\n' # noqa: N806, PLR6104
+ # Create the files required
+ # Moving wall file
+ # Get the displacement and waterheight file name
+ # # Get the displacement and waterheight file name
+ dispfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallDisp_' + patchname]
+ )
+ if dispfilename != [None]:
+ dispfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallDisp_' + patchname]
+ )
+ )
+ dispfilepath = os.path.join(fipath, dispfilename) # noqa: PTH118
+ if os.path.exists(dispfilepath): # noqa: PTH110
+ heightfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallHeight_' + patchname]
+ )
+ if heightfilename != [None]:
+ heightfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallHeight_' + patchname]
+ )
+ )
+ heightfilepath = os.path.join(fipath, heightfilename) # noqa: PTH118
+ if not os.path.exists(heightfilepath): # noqa: PTH110
+ heightfilepath = 'None'
+ else:
+ heightfilepath = 'None'
+ # Wave maker text file
+ self.GenwavemakerText(
+ fipath, dispfilepath, heightfilepath, numMovWall
+ )
+ # Wavemakermovement dictionary
+ self.of7wavemakerdict(fipath)
+ # Dynamic mesh dictionary
+ self.of7dynamicMeshdict(fipath)
+
+ elif int(Utype) == 201: # noqa: PLR2004
+ # Outlet zero gradient
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tzeroGradient;\n\t}\n' # noqa: N806, PLR6104
+
+ elif int(Utype) == 202: # noqa: PLR2004
+ # Outlet: inletOutlet
+ # Get the velocity values
+ velo = hydroutil.extract_element_from_json(
+ data, ['Events', 'Velocity_' + patchname]
+ )
+ if velo == [None]:
+ vx = 0.0
+ vy = 0.0
+ vz = 0.0
+ else:
+ velvals = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'Velocity_' + patchname]
+ )
+ )
+ velvals = velvals.replace(',', ' ')
+ vels = [float(vi) for vi in velvals.split()]
+ vx = vels[0]
+ vy = vels[1]
+ vz = vels[2]
+
+ # Get the text
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tinletOutlet;\n\t\t' # noqa: N806, PLR6104
+ Utext = ( # noqa: N806
+ Utext
+ + 'inletValue\tuniform ('
+ + str(vx)
+ + '\t'
+ + str(vy)
+ + '\t'
+ + str(vz)
+ + ');\n\t\t'
+ )
+ Utext = ( # noqa: N806
+ Utext
+ + 'value\tuniform ('
+ + str(vx)
+ + '\t'
+ + str(vy)
+ + '\t'
+ + str(vz)
+ + ');\n'
+ )
+ Utext = Utext + '\t}\n' # noqa: N806, PLR6104
+
+ elif int(Utype) == 301: # noqa: PLR2004
+ # Wall: noSlip
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tnoSlip;\n\t}\n' # noqa: N806, PLR6104
+
+ else:
+ # Default: Empty
+ Utext = '\t{\n\t\t' # noqa: N806
+ Utext = Utext + 'type\tempty;\n\t}\n' # noqa: N806, PLR6104
+
+ # Return the header for U file
+ return Utext
+
+ #############################################################
+ def Uchecks(self, data, fipath, patches): # noqa: C901, N802, PLR6301
+ """Creates the data files required for the OSU moving wall
+
+ Arguments:
+ ---------
+ data: All the data from JSON file
+ fipath: Path to the dakota.json file location
+ patches: List of patches
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Number of moving walls
+ numMovWall = 0 # noqa: N806
+
+ # Loop over all patches
+ for patchname in patches:
+ # Get the type of velocity bc
+ patch = hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ if patch == [None]:
+ Utype = -1 # noqa: N806
+ else:
+ Utype = ', '.join( # noqa: N806
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'VelocityType_' + patchname]
+ )
+ )
+
+ # Checks for different U-types
+ if int(Utype) == 103: # noqa: PLR2004
+ # Checking for multiple moving walls
+ numMovWall += 1 # noqa: N806
+ if numMovWall > 1:
+ return -1
+
+ # Check for existing moving wall files
+ dispfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallDisp_' + patchname]
+ )
+ if dispfilename == [None]:
+ return -1
+ else: # noqa: RET505
+ dispfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'OSUMovingWallDisp_' + patchname]
+ )
+ )
+ pathF = os.path.join(fipath, dispfilename) # noqa: PTH118, N806
+ if not os.path.exists(pathF): # noqa: PTH110
+ return -1
+
+ elif int(Utype) == 104: # noqa: PLR2004
+ # Checking for multiple moving walls
+ numMovWall += 1 # noqa: N806
+ if numMovWall > 1:
+ return -1
+
+ # Check for existing moving wall files
+ dispfilename = hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallDisp_' + patchname]
+ )
+ if dispfilename == [None]:
+ return -1
+ else: # noqa: RET505
+ dispfilename = ', '.join(
+ hydroutil.extract_element_from_json(
+ data, ['Events', 'MovingWallDisp_' + patchname]
+ )
+ )
+ pathF = os.path.join(fipath, dispfilename) # noqa: PTH118, N806
+ if not os.path.exists(pathF): # noqa: PTH110
+ return -1
+
+ # If all checks passes
+ return 0
+
+ #############################################################
+ def of7wavemakerdict(self, fipath): # noqa: PLR6301
+ """Creates the wavemaker dictionary for the moving wall
+
+ Arguments:
+ ---------
+ fipath: Path to the dakota.json file location
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the file ID
+ filepath = os.path.join(fipath, 'constant', 'wavemakerMovementDict') # noqa: PTH118
+ fileID = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ # Header
+ header = hydroutil.of7header(
+ 'dictionary', 'constant', 'wavemakerMovementDict'
+ )
+ fileID.write(header)
+ # Other data
+ fileID.write('\nreread\tfalse;\n\n')
+ fileID.write('#include\t"wavemakerMovement.txt"\n')
+ # Close the file
+ fileID.close()
+
+ #############################################################
+ def of7dynamicMeshdict(self, fipath): # noqa: N802, PLR6301
+ """Creates the dynamic mesh dictionary for the moving wall
+
+ Arguments:
+ ---------
+ fipath: Path to the dakota.json file location
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get the file ID
+ filepath = os.path.join(fipath, 'constant', 'dynamicMeshDict') # noqa: PTH118
+ fileID = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ # Header
+ header = hydroutil.of7header('dictionary', 'constant', 'dynamicMeshDict')
+ fileID.write(header)
+ # Other data
+ fileID.write('\ndynamicFvMesh\tdynamicMotionSolverFvMesh;\n\n')
+ fileID.write('motionSolverLibs\t("libfvMotionSolvers.so");\n\n')
+ fileID.write('solver\tdisplacementLaplacian;\n\n')
+ fileID.write('displacementLaplacianCoeffs\n{\n\tdiffusivity uniform;\n}\n')
+ # Close the file
+ fileID.close()
+
+ #############################################################
+ def OSUwavemakerText(self, fipath, dispfilepath, heightfilepath, numMovWall): # noqa: ARG002, C901, N802, N803, PLR6301
+ """Creates the wavemaker text file for the OSU moving wall
+
+ Arguments:
+ ---------
+ fipath: Path to the dakota.json file location
+
+ """ # noqa: D400, D401
+ # Get the file ID
+ filepath = os.path.join(fipath, 'constant', 'wavemakerMovement.txt') # noqa: PTH118
+ fileID = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+
+ # Start writing the file
+ fileID.write('wavemakerType\tPiston;\n')
+ fileID.write('tSmooth\t1.5;\n')
+ fileID.write('genAbs\t0;\n\n')
+
+ # Create the wavemaker movement file
+ # Get the frequency of the wavemaker
+ frequency = 0
+ waterdepth = 0
+ filewm = open(dispfilepath) # noqa: PLW1514, PTH123, SIM115
+ Lines = filewm.readlines() # noqa: N806
+ count = 0
+ for line in Lines:
+ count += 1 # noqa: SIM113
+ if count == 37: # noqa: PLR2004
+ stra = line.replace('% SampleRate: ', '')
+ stra2 = stra.replace(' Hz', '')
+ frequency = 1 / float(stra2)
+ break
+ count = 0
+ for line in Lines:
+ count += 1
+ if count == 61: # noqa: PLR2004
+ stra = line.replace('% StillWaterDepth: ', '')
+ waterdepth = float(stra)
+ break
+
+ # Count the number of lines
+ countlines = 0
+ with open(dispfilepath) as fdisp: # noqa: PLW1514, PTH123
+ for line2 in fdisp:
+ if line2.strip():
+ countlines += 1
+ countlines = countlines - 72 # noqa: PLR6104
+
+ # Create the timeseries
+ time = 0
+ fileID.write('timeSeries\t' + str(countlines) + '(\n')
+ for ii in range(countlines): # noqa: B007
+ fileID.write(str(time) + '\n')
+ time = time + frequency # noqa: PLR6104
+ fileID.write(');\n\n')
+
+ # Create the paddle position
+ fileID.write('paddlePosition 1(\n' + str(countlines) + '(\n')
+ count = 0
+ for line in Lines:
+ count += 1
+ if count > 72: # noqa: PLR2004
+ if line != '\n':
+ data = float(line)
+ fileID.write(str(data) + '\n')
+ fileID.write(')\n);\n\n')
+
+ # Write the paddle Eta
+ if heightfilepath != 'None':
+ # Write the paddle Eta
+ fileID.write('paddleEta 1(\n' + str(countlines) + '(\n')
+ filewmg = open(heightfilepath) # noqa: PLW1514, PTH123, SIM115
+ Lines2 = filewmg.readlines() # noqa: N806
+ count = 0
+ for line in Lines2:
+ count += 1
+ if count > 72: # noqa: PLR2004
+ if line != '\n':
+ data = float(line) + waterdepth
+ fileID.write(str(data) + '\n')
+ fileID.write(')\n);')
+
+ #############################################################
+ def GenwavemakerText(self, fipath, dispfilepath, heightfilepath, numMovWall): # noqa: ARG002, C901, N802, N803, PLR6301
+ """Creates the wavemaker text file for a general moving wall
+
+ Arguments:
+ ---------
+ fipath: Path to the dakota.json file location
+
+ """ # noqa: D400, D401
+ # Get the file ID
+ filepath = os.path.join(fipath, 'constant', 'wavemakerMovement.txt') # noqa: PTH118
+ fileID = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+
+ # Start writing the file
+ fileID.write('wavemakerType\tPiston;\n')
+ fileID.write('tSmooth\t1.5;\n')
+ fileID.write('genAbs\t0;\n\n')
+
+ # Create the wavemaker movement file
+ # Get the frequency of the wavemaker
+ filewm = open(dispfilepath) # noqa: PLW1514, PTH123, SIM115
+ Lines = filewm.readlines() # noqa: N806
+ count = 0
+ for line in Lines:
+ count += 1 # noqa: SIM113
+ if count == 1:
+ frequency = float(line)
+ break
+
+ # Count the number of lines
+ countlines = 0
+ with open(dispfilepath) as fdisp: # noqa: PLW1514, PTH123
+ for line2 in fdisp:
+ if line2.strip():
+ countlines += 1
+ countlines = countlines - 1 # noqa: PLR6104
+
+ # Create the timeseries
+ time = 0
+ fileID.write('timeSeries\t' + str(countlines) + '(\n')
+ for ii in range(countlines): # noqa: B007
+ fileID.write(str(time) + '\n')
+ time = time + frequency # noqa: PLR6104
+ fileID.write(');\n\n')
+
+ # Create the paddle position
+ fileID.write('paddlePosition 1(\n' + str(countlines) + '(\n')
+ count = 0
+ for line in Lines:
+ count += 1
+ if count > 1:
+ if line != '\n':
+ data = float(line)
+ fileID.write(str(data) + '\n')
+ fileID.write(')\n);\n\n')
+
+ # Get the water depth and paddle eta
+ if heightfilepath != 'None':
+ # Get the height
+ filewmg = open(heightfilepath) # noqa: PLW1514, PTH123, SIM115
+ Lines2 = filewmg.readlines() # noqa: N806
+ count = 0
+ for line in Lines2:
+ count += 1
+ if count == 1:
+ waterdepth = float(line)
+ break
+
+ # Get the paddle eta
+ fileID.write('paddleEta 1(\n' + str(countlines) + '(\n')
+ count = 0
+ for line in Lines2:
+ count += 1
+ if count > 1:
+ if line != '\n':
+ data = float(line) + waterdepth
+ fileID.write(str(data) + '\n')
+ fileID.write(')\n);')
diff --git a/modules/createEVENT/GeoClawOpenFOAM/openfoam7.py b/modules/createEVENT/GeoClawOpenFOAM/openfoam7.py
index 9c750d255..8ec5da444 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/openfoam7.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/openfoam7.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -37,556 +36,557 @@
# Other custom modules
from hydroUtils import hydroUtils
-from of7Geometry import of7Geometry
+from of7Alpboundary import of7Alpboundary
from of7Building import of7Building
-from of7Meshing import of7Meshing
-from of7Materials import of7Materials
+from of7Dakota import of7Dakota
+from of7Decomp import of7Decomp
+from of7Geometry import of7Geometry
from of7Initial import of7Initial
-from of7Uboundary import of7Uboundary
+from of7Materials import of7Materials
+from of7Meshing import of7Meshing
+from of7Others import of7Others
from of7Prboundary import of7Prboundary
-from of7Alpboundary import of7Alpboundary
+from of7Process import of7Process
from of7PtDboundary import of7PtDboundary
-from of7Turbulence import of7Turbulence
-from of7Decomp import of7Decomp
from of7Solve import of7Solve
-from of7Others import of7Others
-from of7Dakota import of7Dakota
-from of7Process import of7Process
+from of7Turbulence import of7Turbulence
+from of7Uboundary import of7Uboundary
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class openfoam7():
- """
- This class includes the methods related to openfoam7.
-
- Methods
- --------
- extract:
- """
-
- #############################################################
- def createfolder(self,data,path,args):
- '''
- Creates the necessary folders for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the new folder needs to be created
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Create directories for openfoam dictionaries
- # Access: Only owner can read and write
- access_rights = 0o700
-
- # Create 0-directory
- pathF = os.path.join(path,'0.org')
- if(os.path.exists(pathF)):
- shutil.rmtree(pathF)
- os.mkdir(pathF,access_rights)
- else:
- os.mkdir(pathF,access_rights)
-
- #Create constant-directory
- pathF = os.path.join(path,'constant')
- if(os.path.exists(pathF)):
- shutil.rmtree(pathF)
- os.mkdir(pathF,access_rights)
- else:
- os.mkdir(pathF,access_rights)
-
- # Create the triSurface directory
- pathF = os.path.join(path,'constant','triSurface')
- if(os.path.exists(pathF)):
- shutil.rmtree(pathF)
- os.mkdir(pathF,access_rights)
- else:
- os.mkdir(pathF,access_rights)
-
- #Create system-directory
- pathF = os.path.join(path,'system')
- if(os.path.exists(pathF)):
- shutil.rmtree(pathF)
- os.mkdir(pathF,access_rights)
- else:
- os.mkdir(pathF,access_rights)
-
- # Get the information from json file
- hydrobrain = ', '.join(hydroutil.extract_element_from_json(data, ["remoteAppDir"]))
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
- simtype = ', '.join(hydroutil.extract_element_from_json(data, ["Events","SimulationType"]))
-
- # Add all variables
- caseruntext = 'echo Setting up variables\n\n'
- caseruntext = caseruntext + 'export BIM='+args.b+'\n\n'
- caseruntext = caseruntext + 'export HYDROPATH='+path+'\n\n'
- caseruntext = caseruntext + 'export LD_LIBRARY_PATH='+args.L+'\n\n'
- caseruntext = caseruntext + 'export PATH='+args.P+'\n\n'
- caseruntext = caseruntext + 'export inputFile='+args.i+'\n\n'
- caseruntext = caseruntext + 'export driverFile='+args.d+'\n\n'
- caseruntext = caseruntext + 'export inputDirectory='+path+'\n\n'
- caseruntext = caseruntext + 'export HYDROBRAIN='+os.path.join(hydrobrain,'applications','createEVENT','GeoClawOpenFOAM')+'\n\n'
-
- # Load all modules
- caseruntext = caseruntext + 'echo Loading modules on Stampede2\n'
- caseruntext = caseruntext + 'module load intel/18.0.2\n'
- caseruntext = caseruntext + 'module load impi/18.0.2\n'
- caseruntext = caseruntext + 'module load openfoam/7.0\n'
- caseruntext = caseruntext + 'module load dakota/6.8.0\n'
- caseruntext = caseruntext + 'module load python3\n\n'
-
- # Move the case files to the present folder
- zerofldr = os.path.join(path, '0.org')
- zero2fldr = '0'
- cstfldr = os.path.join(path, 'constant')
- systfldr = os.path.join(path, 'system')
- caseruntext = caseruntext + 'cp -r ' + zerofldr + ' .\n'
- caseruntext = caseruntext + 'cp -r 0.org 0\n'
- caseruntext = caseruntext + 'cp -r ' + cstfldr + ' .\n'
- caseruntext = caseruntext + 'cp -r ' + systfldr + ' .\n\n'
-
- # Create the caserun file
- if os.path.exists('caserun.sh'):
- os.remove('caserun.sh')
- scriptfile = open('caserun.sh',"w")
- scriptfile.write(caseruntext)
- scriptfile.close()
-
- # Return completion flag
- return 0
-
- #############################################################
- def creategeometry(self,data,path):
- '''
- Creates the necessary folders for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get mesher type
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
-
- # Create the geometry related files
- Geometry = of7Geometry()
- if int(mesher[0]) == 1:
- return 0
- elif int(mesher[0]) == 0 or int(mesher[0]) == 2:
- geomcode = Geometry.geomcheck(data,path)
- if geomcode == -1:
- return -1
- else:
- stlcode = Geometry.createOFSTL(data,path)
- if stlcode < 0:
- return -1
-
- # Building related files
- Building = of7Building()
- if int(mesher[0]) == 1:
- return 0
- elif int(mesher[0]) == 0 or int(mesher[0]) == 2:
- buildcode = Building.buildcheck(data,path)
- if buildcode == -1:
- return -1
- else:
- buildcode2 = Building.createbuilds(data,path)
- if buildcode2 < 0:
- return -1
-
- # Solution related files (SW solutions)
- # Always needed irrespective of geometry / mesh
-
- # Scripts
- Geometry.scripts(data)
-
- return 0
-
- #############################################################
- def createmesh(self,data,path):
- '''
- Creates the mesh dictionaries for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Get mesher type
- mesher = ', '.join(hydroutil.extract_element_from_json(data, ["Events","MeshType"]))
-
- # Create the meshing related file
- Meshing = of7Meshing()
- meshcode = Meshing.meshcheck(data,path)
- if meshcode == -1:
- return -1
- else:
- # Hydro mesher
- if int(mesher[0]) == 0:
- # blockMesh
- bmeshtext = Meshing.bmeshtext(data)
- fname = 'blockMeshDict'
- filepath = os.path.join(path, 'system', fname)
- bmeshfile = open(filepath, "w")
- bmeshfile.write(bmeshtext)
- bmeshfile.close()
- # surfaceFeatureExtract
- sfetext = Meshing.sfetext()
- fname = 'surfaceFeatureExtractDict'
- filepath = os.path.join(path, 'system', fname)
- sfefile = open(filepath, "w")
- sfefile.write(sfetext)
- sfefile.close()
- # snappyHexMesh
- shmtext = Meshing.shmtext(data)
- fname = 'snappyHexMeshDict'
- filepath = os.path.join(path, 'system', fname)
- shmfile = open(filepath, "w")
- shmfile.write(shmtext)
- shmfile.close()
-
- # Mesh files from other softwares (1)
- # Do nothing here. Add to caserun.sh
-
- # User mesh dictionaries (2)
- # Do nothing here. Copy files to relevant place
- # in caserun.sh
-
- # Scripts
- Meshing.scripts(data,path)
-
- return 0
-
- #############################################################
- def materials(self,data,path):
- '''
- Creates the material files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the transportProperties file
- Materials = of7Materials()
- matcode = Materials.matcheck(data)
- if matcode == -1:
- return -1
- else:
- mattext = Materials.mattext(data)
- fname = 'transportProperties'
- filepath = os.path.join(path, 'constant', fname)
- matfile = open(filepath, "w")
- matfile.write(mattext)
- matfile.close()
-
- return 0
-
- #############################################################
- def initial(self,data,path):
- '''
- Creates the initial condition files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files dakota.json lies
- '''
-
- # Create the setFields file
- Inicond = of7Initial()
- initcode = Inicond.alphacheck(data,path)
- if initcode == -1:
- return -1
- else:
- alphatext = Inicond.alphatext(data,path)
- fname = "setFieldsDict"
- filepath = os.path.join(path, 'system', fname)
- alphafile = open(filepath, "w")
- alphafile.write(alphatext)
- alphafile.close()
-
- # Scripts
- Inicond.scripts(data,path)
-
- return 0
-
- #############################################################
- def boundary(self,data,path):
- '''
- Creates the bc condition files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Initialize the patches
- patches = ['Entry', 'Exit', 'Top', 'Bottom', 'Right', 'Left']
-
- # Create object for velocity boundary condition
- # Get the text for the velocity boundary
- # Write the U-file in 0.org
- Uboundary = of7Uboundary()
- utext = Uboundary.Utext(data,path,patches)
- # Check for boundary conditions here
- ecode = Uboundary.Uchecks(data,path,patches)
- if ecode == -1:
- return -1
- else:
- # Write the U-file if no errors
- # Path to the file
- fname = 'U'
- filepath = os.path.join(path, '0.org', fname)
- Ufile = open(filepath, "w")
- Ufile.write(utext)
- Ufile.close()
-
- # Create object for pressure boundary condition
- # Get the text for the pressure boundary
- # Write the p_rgh-file in 0.org
- Prboundary = of7Prboundary()
- prtext = Prboundary.Prtext(data,patches)
- fname = 'p_rgh'
- filepath = os.path.join(path, '0.org', fname)
- prfile = open(filepath, "w")
- prfile.write(prtext)
- prfile.close()
-
- # Create object for alpha boundary condition
- # Get the text for the alpha boundary
- # Write the alpha-file in 0.org
- Alpboundary = of7Alpboundary()
- Alptext = Alpboundary.Alptext(data,patches)
- fname = 'alpha.water'
- filepath = os.path.join(path, '0.org', fname)
- Alpfile = open(filepath, "w")
- Alpfile.write(Alptext)
- Alpfile.close()
-
- # Loop over all the velocity type to see if any
- # has a moving wall. If so initialize the
- # pointDisplacement file
- PtDboundary = of7PtDboundary()
- ptDcode = PtDboundary.PtDcheck(data,patches)
- if ptDcode == 1:
- pdtext = PtDboundary.PtDtext(data,path,patches)
- fname = 'pointDisplacement'
- filepath = os.path.join(path, '0.org', fname)
- ptDfile = open(filepath, "w")
- ptDfile.write(pdtext)
- ptDfile.close()
-
-
-
- return 0
-
- #############################################################
- def turbulence(self,data,path):
- '''
- Creates the turbulenceDict and other files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the domain decomposition file
- Turb = of7Turbulence()
- turbtext = Turb.turbtext(data)
- fname = 'turbulenceProperties'
- filepath = os.path.join(path, 'constant', fname)
- turbfile = open(filepath, "w")
- turbfile.write(turbtext)
- turbfile.close()
-
- return 0
-
- #############################################################
- def parallelize(self,data,path):
- '''
- Creates the domain decomposition files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the domain decomposition file
- Decomp = of7Decomp()
- decomptext = Decomp.decomptext(data)
- fname = 'decomposeParDict'
- filepath = os.path.join(path, 'system', fname)
- decompfile = open(filepath, "w")
- decompfile.write(decomptext)
- decompfile.close()
-
- # Scripts
- Decomp.scripts(data,path)
-
- return 0
-
- #############################################################
- def solve(self,data,path):
- '''
- Creates the solver related files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the solver files
- Solve = of7Solve()
- # fvSchemes
- fvschemetext = Solve.fvSchemetext(data)
- fname = 'fvSchemes'
- filepath = os.path.join(path, 'system', fname)
- fvschemefile = open(filepath,"w")
- fvschemefile.write(fvschemetext)
- fvschemefile.close()
-
- #fvSolutions
- fvsolntext = Solve.fvSolntext(data)
- fname = 'fvSolution'
- filepath = os.path.join(path, 'system', fname)
- fvsolnfile = open(filepath,"w")
- fvsolnfile.write(fvsolntext)
- fvsolnfile.close()
-
- # controlDict
- ecode = Solve.cdictcheck(data)
- if ecode == -1:
- return -1
- else:
- cdicttext = Solve.cdicttext(data)
- fname = 'controlDict'
- filepath = os.path.join(path, 'system', fname)
- cdictfile = open(filepath,"w")
- cdictfile.write(cdicttext)
- cdictfile.close()
-
- # Create CdictForce
- cdictFtext = Solve.cdictFtext(data)
- fname = 'cdictforce'
- cdictFfile = open(fname,"w")
- cdictFfile.write(cdictFtext)
- cdictFfile.close()
-
- return 0
-
- #############################################################
- def others(self,data,path):
- '''
- Creates the other auxillary files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the auxillary files
- Others = of7Others()
- # g-file
- gfiletext = Others.gfiletext(data)
- fname = 'g'
- filepath = os.path.join(path, 'constant', fname)
- gfile = open(filepath,"w")
- gfile.write(gfiletext)
- gfile.close()
-
- return 0
-
- #############################################################
- def dakota(self,args):
- '''
- Creates the dakota scripts for openfoam7
-
- Arguments
- -----------
- args: all arguments
- '''
-
- # Create the solver files
- dakota = of7Dakota()
-
- # Dakota Scripts
- dakota.dakotascripts(args)
-
- return 0
-
- #############################################################
- def postprocessing(self,data,path):
- '''
- Creates the postprocessing related files for openfoam7
-
- Arguments
- -----------
- data: all the JSON data
- path: Path where the geometry files (STL) needs to be created
- '''
-
- # Create the solver files
- pprocess = of7Process()
- # controlDict
- ecode = pprocess.pprocesscheck(data,path)
- if ecode == -1:
- return -1
- elif ecode == 0:
- return 0
- else:
- # sample file
- pprocesstext = pprocess.pprocesstext(data,path)
- fname = 'sample'
- filepath = os.path.join(fname)
- samplefile = open(filepath,"w")
- samplefile.write(pprocesstext)
- samplefile.close()
- # Controldict
- pprocesstext = pprocess.pprocesscdict(data,path)
- fname = 'cdictpp'
- filepath = os.path.join(fname)
- samplefile = open(filepath,"w")
- samplefile.write(pprocesstext)
- samplefile.close()
-
- # Scripts
- pprocess.scripts(data,path)
-
- return 0
-
- #############################################################
- def cleaning(self,args,path):
- '''
- Creates the cleaning scripts for openfoam7
-
- Arguments
- -----------
- args: all arguments
- '''
-
- # Create the solver files
- cleaner = of7Dakota()
-
- # Dakota Scripts
- cleaner.cleaning(args,path)
-
- return 0
+class openfoam7:
+ """This class includes the methods related to openfoam7.
+
+ Methods
+ -------
+ extract:
+
+ """ # noqa: D404
+
+ #############################################################
+ def createfolder(self, data, path, args): # noqa: PLR6301
+ """Creates the necessary folders for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the new folder needs to be created
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Create directories for openfoam dictionaries
+ # Access: Only owner can read and write
+ access_rights = 0o700
+
+ # Create 0-directory
+ pathF = os.path.join(path, '0.org') # noqa: PTH118, N806
+ if os.path.exists(pathF): # noqa: PTH110
+ shutil.rmtree(pathF)
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+ else:
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+
+ # Create constant-directory
+ pathF = os.path.join(path, 'constant') # noqa: PTH118, N806
+ if os.path.exists(pathF): # noqa: PTH110
+ shutil.rmtree(pathF)
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+ else:
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+
+ # Create the triSurface directory
+ pathF = os.path.join(path, 'constant', 'triSurface') # noqa: PTH118, N806
+ if os.path.exists(pathF): # noqa: PTH110
+ shutil.rmtree(pathF)
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+ else:
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+
+ # Create system-directory
+ pathF = os.path.join(path, 'system') # noqa: PTH118, N806
+ if os.path.exists(pathF): # noqa: PTH110
+ shutil.rmtree(pathF)
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+ else:
+ os.mkdir(pathF, access_rights) # noqa: PTH102
+
+ # Get the information from json file
+ hydrobrain = ', '.join(
+ hydroutil.extract_element_from_json(data, ['remoteAppDir'])
+ )
+ mesher = ', '.join( # noqa: F841
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+ simtype = ', '.join( # noqa: F841
+ hydroutil.extract_element_from_json(data, ['Events', 'SimulationType'])
+ )
+
+ # Add all variables
+ caseruntext = 'echo Setting up variables\n\n'
+ caseruntext = caseruntext + 'export BIM=' + args.b + '\n\n'
+ caseruntext = caseruntext + 'export HYDROPATH=' + path + '\n\n'
+ caseruntext = caseruntext + 'export LD_LIBRARY_PATH=' + args.L + '\n\n'
+ caseruntext = caseruntext + 'export PATH=' + args.P + '\n\n'
+ caseruntext = caseruntext + 'export inputFile=' + args.i + '\n\n'
+ caseruntext = caseruntext + 'export driverFile=' + args.d + '\n\n'
+ caseruntext = caseruntext + 'export inputDirectory=' + path + '\n\n'
+ caseruntext = (
+ caseruntext
+ + 'export HYDROBRAIN='
+ + os.path.join( # noqa: PTH118
+ hydrobrain, 'applications', 'createEVENT', 'GeoClawOpenFOAM'
+ )
+ + '\n\n'
+ )
+
+ # Load all modules
+ caseruntext = caseruntext + 'echo Loading modules on Stampede2\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'module load intel/18.0.2\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'module load impi/18.0.2\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'module load openfoam/7.0\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'module load dakota/6.8.0\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'module load python3\n\n' # noqa: PLR6104
+
+ # Move the case files to the present folder
+ zerofldr = os.path.join(path, '0.org') # noqa: PTH118
+ zero2fldr = '0' # noqa: F841
+ cstfldr = os.path.join(path, 'constant') # noqa: PTH118
+ systfldr = os.path.join(path, 'system') # noqa: PTH118
+ caseruntext = caseruntext + 'cp -r ' + zerofldr + ' .\n'
+ caseruntext = caseruntext + 'cp -r 0.org 0\n' # noqa: PLR6104
+ caseruntext = caseruntext + 'cp -r ' + cstfldr + ' .\n'
+ caseruntext = caseruntext + 'cp -r ' + systfldr + ' .\n\n'
+
+ # Create the caserun file
+ if os.path.exists('caserun.sh'): # noqa: PTH110
+ os.remove('caserun.sh') # noqa: PTH107
+ scriptfile = open('caserun.sh', 'w') # noqa: PLW1514, PTH123, SIM115
+ scriptfile.write(caseruntext)
+ scriptfile.close()
+
+ # Return completion flag
+ return 0
+
+ #############################################################
+ def creategeometry(self, data, path): # noqa: PLR6301
+ """Creates the necessary folders for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get mesher type
+ mesher = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+
+ # Create the geometry related files
+ Geometry = of7Geometry() # noqa: N806
+ if int(mesher[0]) == 1:
+ return 0
+ elif int(mesher[0]) == 0 or int(mesher[0]) == 2: # noqa: RET505, PLR2004
+ geomcode = Geometry.geomcheck(data, path)
+ if geomcode == -1:
+ return -1
+ else: # noqa: RET505
+ stlcode = Geometry.createOFSTL(data, path)
+ if stlcode < 0:
+ return -1
+
+ # Building related files
+ Building = of7Building() # noqa: N806
+ if int(mesher[0]) == 1:
+ return 0
+ elif int(mesher[0]) == 0 or int(mesher[0]) == 2: # noqa: RET505, PLR2004
+ buildcode = Building.buildcheck(data, path)
+ if buildcode == -1:
+ return -1
+ else: # noqa: RET505
+ buildcode2 = Building.createbuilds(data, path)
+ if buildcode2 < 0:
+ return -1
+
+ # Solution related files (SW solutions)
+ # Always needed irrespective of geometry / mesh
+
+ # Scripts
+ Geometry.scripts(data)
+
+ return 0
+
+ #############################################################
+ def createmesh(self, data, path): # noqa: PLR6301
+ """Creates the mesh dictionaries for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Get mesher type
+ mesher = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'MeshType'])
+ )
+
+ # Create the meshing related file
+ Meshing = of7Meshing() # noqa: N806
+ meshcode = Meshing.meshcheck(data, path)
+ if meshcode == -1:
+ return -1
+ elif int(mesher[0]) == 0: # noqa: RET505
+ # blockMesh
+ bmeshtext = Meshing.bmeshtext(data)
+ fname = 'blockMeshDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ bmeshfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ bmeshfile.write(bmeshtext)
+ bmeshfile.close()
+ # surfaceFeatureExtract
+ sfetext = Meshing.sfetext()
+ fname = 'surfaceFeatureExtractDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ sfefile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ sfefile.write(sfetext)
+ sfefile.close()
+ # snappyHexMesh
+ shmtext = Meshing.shmtext(data)
+ fname = 'snappyHexMeshDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ shmfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ shmfile.write(shmtext)
+ shmfile.close()
+
+ # Mesh files from other software (1)
+ # Do nothing here. Add to caserun.sh
+
+ # User mesh dictionaries (2)
+ # Do nothing here. Copy files to relevant place
+ # in caserun.sh
+
+ # Scripts
+ Meshing.scripts(data, path)
+
+ return 0
+
+ #############################################################
+ def materials(self, data, path): # noqa: PLR6301
+ """Creates the material files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the transportProperties file
+ Materials = of7Materials() # noqa: N806
+ matcode = Materials.matcheck(data)
+ if matcode == -1:
+ return -1
+ else: # noqa: RET505
+ mattext = Materials.mattext(data)
+ fname = 'transportProperties'
+ filepath = os.path.join(path, 'constant', fname) # noqa: PTH118
+ matfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ matfile.write(mattext)
+ matfile.close()
+
+ return 0
+
+ #############################################################
+ def initial(self, data, path): # noqa: PLR6301
+ """Creates the initial condition files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files dakota.json lies
+
+ """ # noqa: D400, D401
+ # Create the setFields file
+ Inicond = of7Initial() # noqa: N806
+ initcode = Inicond.alphacheck(data, path)
+ if initcode == -1:
+ return -1
+ else: # noqa: RET505
+ alphatext = Inicond.alphatext(data, path)
+ fname = 'setFieldsDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ alphafile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ alphafile.write(alphatext)
+ alphafile.close()
+
+ # Scripts
+ Inicond.scripts(data, path)
+
+ return 0
+
+ #############################################################
+ def boundary(self, data, path): # noqa: PLR6301
+ """Creates the bc condition files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Initialize the patches
+ patches = ['Entry', 'Exit', 'Top', 'Bottom', 'Right', 'Left']
+
+ # Create object for velocity boundary condition
+ # Get the text for the velocity boundary
+ # Write the U-file in 0.org
+ Uboundary = of7Uboundary() # noqa: N806
+ utext = Uboundary.Utext(data, path, patches)
+ # Check for boundary conditions here
+ ecode = Uboundary.Uchecks(data, path, patches)
+ if ecode == -1:
+ return -1
+ else: # noqa: RET505
+ # Write the U-file if no errors
+ # Path to the file
+ fname = 'U'
+ filepath = os.path.join(path, '0.org', fname) # noqa: PTH118
+ Ufile = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ Ufile.write(utext)
+ Ufile.close()
+
+ # Create object for pressure boundary condition
+ # Get the text for the pressure boundary
+ # Write the p_rgh-file in 0.org
+ Prboundary = of7Prboundary() # noqa: N806
+ prtext = Prboundary.Prtext(data, patches)
+ fname = 'p_rgh'
+ filepath = os.path.join(path, '0.org', fname) # noqa: PTH118
+ pr_file = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ pr_file.write(prtext)
+ pr_file.close()
+
+ # Create object for alpha boundary condition
+ # Get the text for the alpha boundary
+ # Write the alpha-file in 0.org
+ Alpboundary = of7Alpboundary() # noqa: N806
+ Alptext = Alpboundary.Alptext(data, patches) # noqa: N806
+ fname = 'alpha.water'
+ filepath = os.path.join(path, '0.org', fname) # noqa: PTH118
+ Alpfile = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ Alpfile.write(Alptext)
+ Alpfile.close()
+
+ # Loop over all the velocity type to see if any
+ # has a moving wall. If so initialize the
+ # pointDisplacement file
+ PtDboundary = of7PtDboundary() # noqa: N806
+ ptDcode = PtDboundary.PtDcheck(data, patches) # noqa: N806
+ if ptDcode == 1:
+ pdtext = PtDboundary.PtDtext(data, path, patches)
+ fname = 'pointDisplacement'
+ filepath = os.path.join(path, '0.org', fname) # noqa: PTH118
+ ptDfile = open(filepath, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ ptDfile.write(pdtext)
+ ptDfile.close()
+
+ return 0
+
+ #############################################################
+ def turbulence(self, data, path): # noqa: PLR6301
+ """Creates the turbulenceDict and other files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the domain decomposition file
+ Turb = of7Turbulence() # noqa: N806
+ turbtext = Turb.turbtext(data)
+ fname = 'turbulenceProperties'
+ filepath = os.path.join(path, 'constant', fname) # noqa: PTH118
+ turbfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ turbfile.write(turbtext)
+ turbfile.close()
+
+ return 0
+
+ #############################################################
+ def parallelize(self, data, path): # noqa: PLR6301
+ """Creates the domain decomposition files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the domain decomposition file
+ Decomp = of7Decomp() # noqa: N806
+ decomptext = Decomp.decomptext(data)
+ fname = 'decomposeParDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ decompfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ decompfile.write(decomptext)
+ decompfile.close()
+
+ # Scripts
+ Decomp.scripts(data, path)
+
+ return 0
+
+ #############################################################
+ def solve(self, data, path): # noqa: PLR6301
+ """Creates the solver related files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the solver files
+ Solve = of7Solve() # noqa: N806
+ # fvSchemes
+ fvschemetext = Solve.fvSchemetext(data)
+ fname = 'fvSchemes'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ fvschemefile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ fvschemefile.write(fvschemetext)
+ fvschemefile.close()
+
+ # fvSolutions
+ fvsolntext = Solve.fvSolntext(data)
+ fname = 'fvSolution'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ fvsolnfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ fvsolnfile.write(fvsolntext)
+ fvsolnfile.close()
+
+ # controlDict
+ ecode = Solve.cdictcheck(data)
+ if ecode == -1:
+ return -1
+ else: # noqa: RET505
+ cdicttext = Solve.cdicttext(data)
+ fname = 'controlDict'
+ filepath = os.path.join(path, 'system', fname) # noqa: PTH118
+ cdictfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ cdictfile.write(cdicttext)
+ cdictfile.close()
+
+ # Create CdictForce
+ cdictFtext = Solve.cdictFtext(data) # noqa: N806
+ fname = 'cdictforce'
+ cdictFfile = open(fname, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ cdictFfile.write(cdictFtext)
+ cdictFfile.close()
+
+ return 0
+
+ #############################################################
+ def others(self, data, path): # noqa: PLR6301
+ """Creates the other auxiliary files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the auxiliary files
+ Others = of7Others() # noqa: N806
+ # g-file
+ gfiletext = Others.gfiletext(data)
+ fname = 'g'
+ filepath = os.path.join(path, 'constant', fname) # noqa: PTH118
+ gfile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ gfile.write(gfiletext)
+ gfile.close()
+
+ return 0
+
+ #############################################################
+ def dakota(self, args): # noqa: PLR6301
+ """Creates the dakota scripts for openfoam7
+
+ Arguments:
+ ---------
+ args: all arguments
+
+ """ # noqa: D400, D401
+ # Create the solver files
+ dakota = of7Dakota()
+
+ # Dakota Scripts
+ dakota.dakotascripts(args)
+
+ return 0
+
+ #############################################################
+ def postprocessing(self, data, path): # noqa: PLR6301
+ """Creates the postprocessing related files for openfoam7
+
+ Arguments:
+ ---------
+ data: all the JSON data
+ path: Path where the geometry files (STL) needs to be created
+
+ """ # noqa: D400, D401
+ # Create the solver files
+ pprocess = of7Process()
+ # controlDict
+ ecode = pprocess.pprocesscheck(data, path)
+ if ecode == -1:
+ return -1
+ elif ecode == 0: # noqa: RET505
+ return 0
+ else:
+ # sample file
+ pprocesstext = pprocess.pprocesstext(data, path)
+ fname = 'sample'
+ filepath = os.path.join(fname) # noqa: PTH118
+ samplefile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ samplefile.write(pprocesstext)
+ samplefile.close()
+ # Controldict
+ pprocesstext = pprocess.pprocesscdict(data, path)
+ fname = 'cdictpp'
+ filepath = os.path.join(fname) # noqa: PTH118
+ samplefile = open(filepath, 'w') # noqa: PLW1514, PTH123, SIM115
+ samplefile.write(pprocesstext)
+ samplefile.close()
+
+ # Scripts
+ pprocess.scripts(data, path)
+
+ return 0
+
+ #############################################################
+ def cleaning(self, args, path): # noqa: PLR6301
+ """Creates the cleaning scripts for openfoam7
+
+ Arguments:
+ ---------
+ args: all arguments
+
+ """ # noqa: D400, D401
+ # Create the solver files
+ cleaner = of7Dakota()
+
+ # Dakota Scripts
+ cleaner.cleaning(args, path)
+
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/osuFlume.py b/modules/createEVENT/GeoClawOpenFOAM/osuFlume.py
index a88497a8b..a81430f26 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/osuFlume.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/osuFlume.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+
+All rights reserved.
- All rights reserved.
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -33,75 +32,76 @@
####################################################################
# Standard python modules
import os
+
import numpy as np
# Other custom modules
from flume import flume
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class osuFlume():
- """
- This class includes the methods related to
- creating a standard OSU flume
-
- Methods
- --------
- creategeom: Create geometry and STL files
- """
-
- #############################################################
- def creategeom(self,data,path):
- '''
- Creates the geometry for OSU flume
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Number of flume points
- numflumepoints = 9
-
- # Provide the coordinates in a numpy array
- nums = np.zeros(2*(numflumepoints))
- nums[0] = -2.085
- nums[1] = 0.0
- nums[2] = 14.2748
- nums[3] = 0.0
- nums[4] = 14.2748
- nums[5] = 0.1524
- nums[6] = 17.9324
- nums[7] = 0.1524
- nums[8] = 28.9052
- nums[9] = 1.15
- nums[10] = 43.5356
- nums[11] = 1.7526
- nums[12] = 80.116
- nums[13] = 1.7526
- nums[14] = 80.116
- nums[15] = 4.572
- nums[16] = -2.085
- nums[17] = 4.572
-
- # Create temporary file
- filename = 'FlumeData.txt'
- if os.path.exists(filename):
- os.remove(filename)
- f = open(filename, "a")
- for ii in range(int(numflumepoints)):
- f.write(str(nums[2*ii]) + ',' + str(nums[2*ii+1]) + '\n' )
- f.close()
-
- # Add breadth of the flume
- breadth = 3.70
-
- # Create the STL file and get extreme file (needed for blockmesh and building)
- flumeobj = flume()
- extreme = flumeobj.generateflume(breadth,path)
-
- # Write extreme values and building data to temporary file for later usage
- flumeobj.extremedata(extreme,breadth)
-
- return 0
\ No newline at end of file
+class osuFlume:
+ """This class includes the methods related to
+ creating a standard OSU flume
+
+ Methods
+ -------
+ creategeom: Create geometry and STL files
+
+ """ # noqa: D205, D400, D404
+
+ #############################################################
+ def creategeom(self, data, path): # noqa: ARG002, PLR6301
+ """Creates the geometry for OSU flume
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Number of flume points
+ numflumepoints = 9
+
+ # Provide the coordinates in a numpy array
+ nums = np.zeros(2 * (numflumepoints))
+ nums[0] = -2.085
+ nums[1] = 0.0
+ nums[2] = 14.2748
+ nums[3] = 0.0
+ nums[4] = 14.2748
+ nums[5] = 0.1524
+ nums[6] = 17.9324
+ nums[7] = 0.1524
+ nums[8] = 28.9052
+ nums[9] = 1.15
+ nums[10] = 43.5356
+ nums[11] = 1.7526
+ nums[12] = 80.116
+ nums[13] = 1.7526
+ nums[14] = 80.116
+ nums[15] = 4.572
+ nums[16] = -2.085
+ nums[17] = 4.572
+
+ # Create temporary file
+ filename = 'FlumeData.txt'
+ if os.path.exists(filename): # noqa: PTH110
+ os.remove(filename) # noqa: PTH107
+ f = open(filename, 'a') # noqa: PLW1514, PTH123, SIM115
+ for ii in range(int(numflumepoints)):
+ f.write(str(nums[2 * ii]) + ',' + str(nums[2 * ii + 1]) + '\n')
+ f.close()
+
+ # Add breadth of the flume
+ breadth = 3.70
+
+ # Create the STL file and get extreme file (needed for blockmesh and building)
+ flumeobj = flume()
+ extreme = flumeobj.generateflume(breadth, path)
+
+ # Write extreme values and building data to temporary file for later usage
+ flumeobj.extremedata(extreme, breadth)
+
+ return 0
diff --git a/modules/createEVENT/GeoClawOpenFOAM/userFlume.py b/modules/createEVENT/GeoClawOpenFOAM/userFlume.py
index 44cd0572d..2ae6bbfbf 100644
--- a/modules/createEVENT/GeoClawOpenFOAM/userFlume.py
+++ b/modules/createEVENT/GeoClawOpenFOAM/userFlume.py
@@ -1,28 +1,27 @@
-####################################################################
+# # noqa: INP001
# LICENSING INFORMATION
####################################################################
-"""
- LICENSE INFORMATION:
-
- Copyright (c) 2020-2030, The Regents of the University of California (Regents).
+"""LICENSE INFORMATION:
+
+Copyright (c) 2020-2030, The Regents of the University of California (Regents).
- All rights reserved.
+All rights reserved.
- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- 1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
- 2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
+The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
- REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-"""
+REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+""" # noqa: D400
####################################################################
# AUTHOR INFORMATION
####################################################################
@@ -34,66 +33,73 @@
# Standard python modules
import os
+from flume import flume
+
# Other custom modules
from hydroUtils import hydroUtils
-from flume import flume
+
####################################################################
# OpenFOAM7 solver class
####################################################################
-class userFlume():
- """
- This class includes the methods related to
- creating a flume as specified by the user
-
- Methods
- --------
- creategeom: Create geometry and STL files
- """
-
- #############################################################
- def creategeom(self,data,path):
- '''
- Creates the geometry for user flume
-
- Arguments
- -----------
- data: all the JSON data
- '''
-
- # Create a utilities object
- hydroutil = hydroUtils()
-
- # Read the flume segments
- flumesegs = ', '.join(hydroutil.extract_element_from_json(data, ["Events","FlumeSegments"]))
- # Get the number of flume segments
- numflumesegs = ', '.join(hydroutil.extract_element_from_json(data, ["Events","NumFlumeSegments"]))
-
- # Replace the comma by spaces in segments list
- flumesegs = flumesegs.replace(',', ' ')
- # Convert the flume segment to list of floats
- nums = [float(n) for n in flumesegs.split()]
- # Remove the first item
- nums.pop(0)
-
- # Create temporary file
- filename = 'FlumeData.txt'
- if os.path.exists(filename):
- os.remove(filename)
- f = open(filename, "a")
- for ii in range(int(numflumesegs)):
- f.write(str(nums[2*ii]) + ',' + str(nums[2*ii+1]) + '\n' )
- f.close()
-
- # Get the breadth
- breadthval = ''.join(hydroutil.extract_element_from_json(data, ["Events","FlumeBreadth"]))
- breadth = float(breadthval)
-
- # Create the STL file and get extreme file (needed for blockmesh and building)
- flumeobj = flume()
- extreme = flumeobj.generateflume(breadth,path)
-
- # Write extreme values and building data to temporary file for later usage
- flumeobj.extremedata(extreme,breadth)
-
- return 0
\ No newline at end of file
+class userFlume:
+ """This class includes the methods related to
+ creating a flume as specified by the user
+
+ Methods
+ -------
+ creategeom: Create geometry and STL files
+
+ """ # noqa: D205, D400, D404
+
+ #############################################################
+ def creategeom(self, data, path): # noqa: PLR6301
+ """Creates the geometry for user flume
+
+ Arguments:
+ ---------
+ data: all the JSON data
+
+ """ # noqa: D400, D401
+ # Create a utilities object
+ hydroutil = hydroUtils()
+
+ # Read the flume segments
+ flumesegs = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'FlumeSegments'])
+ )
+ # Get the number of flume segments
+ numflumesegs = ', '.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'NumFlumeSegments'])
+ )
+
+ # Replace the comma by spaces in segments list
+ flumesegs = flumesegs.replace(',', ' ')
+ # Convert the flume segment to list of floats
+ nums = [float(n) for n in flumesegs.split()]
+ # Remove the first item
+ nums.pop(0)
+
+ # Create temporary file
+ filename = 'FlumeData.txt'
+ if os.path.exists(filename): # noqa: PTH110
+ os.remove(filename) # noqa: PTH107
+ f = open(filename, 'a') # noqa: PLW1514, PTH123, SIM115
+ for ii in range(int(numflumesegs)):
+ f.write(str(nums[2 * ii]) + ',' + str(nums[2 * ii + 1]) + '\n')
+ f.close()
+
+ # Get the breadth
+ breadthval = ''.join(
+ hydroutil.extract_element_from_json(data, ['Events', 'FlumeBreadth'])
+ )
+ breadth = float(breadthval)
+
+ # Create the STL file and get extreme file (needed for blockmesh and building)
+ flumeobj = flume()
+ extreme = flumeobj.generateflume(breadth, path)
+
+ # Write extreme values and building data to temporary file for later usage
+ flumeobj.extremedata(extreme, breadth)
+
+ return 0
diff --git a/modules/createEVENT/HighRiseTPU/HighRiseTPU.cpp b/modules/createEVENT/HighRiseTPU/HighRiseTPU.cpp
index 5e815bc49..61a8b6ef7 100644
--- a/modules/createEVENT/HighRiseTPU/HighRiseTPU.cpp
+++ b/modules/createEVENT/HighRiseTPU/HighRiseTPU.cpp
@@ -93,7 +93,7 @@ main(int argc, char **argv) {
json_t *generalInformation = json_object_get(input, "GeneralInformation");
json_t *inputEventsArray = json_object_get(input, "Events");
if (generalInformation == NULL || inputEventsArray == NULL) {
- std::cerr << "FATAL ERROR - input file conatins no Events key-pair\n";
+ std::cerr << "FATAL ERROR - input file contains no Events key-pair\n";
exit(-1);
}
@@ -127,7 +127,7 @@ main(int argc, char **argv) {
json_object_set(units,"time",json_string("sec"));
json_object_set(outputEvent,"units",units);
- // call function to fill in event details .. depends on getRV flag what is acually done
+ // call function to fill in event details .. depends on getRV flag what is actually done
addEvent(generalInformation, inputEvent, outputEvent, doRV);
json_array_append(outputEventsArray, outputEvent);
@@ -179,7 +179,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
widthJO == NULL ||
depthJO == NULL ||
storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -333,7 +333,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
}
//
- // for each tap determine factors fr moments and forces for the buiding asuming a mesh discretization
+ // for each tap determine factors fr moments and forces for the building assuming a mesh discretization
//
int numDivisionX = 10;
@@ -538,7 +538,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
json_t *storiesJO = json_object_get(generalInfo,"stories");
if (storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -641,10 +641,10 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
//
// function to add factors for forces and moment contribution coefficients for taps to building floor
-// determine coeffiecients for each tap for a building face. This is done by going over each story of
+// determine coefficients for each tap for a building face. This is done by going over each story of
// For each story break into numDiv X numDiv segments. For each segment assume point load at center
// segment and equal in mag to area of segment and using simply supported beam formula determine force
-// at floor below and floor above. based on distance from center line of story determine actibg moments
+// at floor below and floor above. based on distance from center line of story determine acting moments
// on floors.
//
// inputs: height: height of building
@@ -731,9 +731,9 @@ int addForcesFace(TAP *theTaps, int numTaps,
// function to fnd nearest tap
// inputs: theTAPS: array of Taps,
// numTaps: number of taps in array
-// xLoc, yLoc: is location of inut point
+// xLoc, yLoc: is location of input point
// face: if of face
-// output: pinter to nearest TAp in the array, NULL if no taps with face
+// output: pointer to nearest TAp in the array, NULL if no taps with face
//
TAP *findNearestTAP(TAP *theTAPS, int numTaps, double locX, double locY, int face) {
diff --git a/modules/createEVENT/HighRiseTPU/HighRiseTPU.py b/modules/createEVENT/HighRiseTPU/HighRiseTPU.py
index 748c5617c..edd13281e 100644
--- a/modules/createEVENT/HighRiseTPU/HighRiseTPU.py
+++ b/modules/createEVENT/HighRiseTPU/HighRiseTPU.py
@@ -1,105 +1,98 @@
-# python code to open the TPU .mat file
+# python code to open the TPU .mat file # noqa: CPY001, D100, INP001
# and put data into a SimCenter JSON file for
# wind tunnel data
-import sys
import os
-import subprocess
-import json
-import stat
-import shutil
-import numpy as np
+import sys
+
import scipy.io as sio
-from pprint import pprint
-inputArgs = sys.argv
+inputArgs = sys.argv # noqa: N816
-print ("Number of arguments: %d" % len(sys.argv))
-print ("The arguments are: %s" %str(sys.argv))
+print('Number of arguments: %d' % len(sys.argv)) # noqa: T201
+print('The arguments are: %s' % str(sys.argv)) # noqa: T201, UP031
# set filenames
-matFileIN = sys.argv[1]
-jsonFileOUT = sys.argv[2]
-
-dataDir = os.getcwd()
-scriptDir = os.path.dirname(os.path.realpath(__file__))
-
-def parseTPU_HighRise_MatFile(matFileIn, windFileOutName):
-
- file = open(windFileOutName,"w", encoding='utf-8');
- file.write("{\n");
-
- mat_contents = sio.loadmat(matFileIn);
- depth = mat_contents['Building_depth'][0][0];
- height = mat_contents['Building_height'][0][0];
- breadth = mat_contents['Building_breadth'][0][0];
- period = mat_contents['Sample_period'][0][0];
- frequency = mat_contents['Sample_frequency'][0][0];
- angle = mat_contents['Wind_direction_angle'][0][0];
- #uH = mat_contents['Uh_AverageWindSpeed'][0][0];
- uH = float(mat_contents['Uh_AverageWindSpeed'][0]);
- print(uH)
- print(depth)
- print(height)
- file.write("\"windSpeed\":%f," % uH);
- file.write("\"depth\":%f," % depth);
- file.write("\"height\":%f," % height);
- file.write("\"breadth\":%f," % breadth);
- file.write("\"period\":%f," % period);
- file.write("\"units\":{\"length\":\"m\",\"time\":\"sec\"},");
- file.write("\"frequency\":%f," % frequency);
- file.write("\"incidenceAngle\":%f," % angle);
- file.write("\"tapLocations\": [");
-
- locations = mat_contents['Location_of_measured_points'];
- numLocations = locations.shape[1];
-
+matFileIN = sys.argv[1] # noqa: N816
+jsonFileOUT = sys.argv[2] # noqa: N816
+
+dataDir = os.getcwd() # noqa: PTH109, N816
+scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
+
+
+def parseTPU_HighRise_MatFile(matFileIn, windFileOutName): # noqa: N802, N803, D103
+ file = open(windFileOutName, 'w', encoding='utf-8') # noqa: SIM115, PTH123
+ file.write('{\n')
+ mat_contents = sio.loadmat(matFileIn)
+ depth = mat_contents['Building_depth'][0][0]
+ height = mat_contents['Building_height'][0][0]
+ breadth = mat_contents['Building_breadth'][0][0]
+ period = mat_contents['Sample_period'][0][0]
+ frequency = mat_contents['Sample_frequency'][0][0]
+ angle = mat_contents['Wind_direction_angle'][0][0]
+ # uH = mat_contents['Uh_AverageWindSpeed'][0][0];
+ uH = float(mat_contents['Uh_AverageWindSpeed'][0]) # noqa: N806
+ print(uH) # noqa: T201
+ print(depth) # noqa: T201
+ print(height) # noqa: T201
+ file.write('"windSpeed":%f,' % uH) # noqa: UP031
+ file.write('"depth":%f,' % depth) # noqa: UP031
+ file.write('"height":%f,' % height) # noqa: UP031
+ file.write('"breadth":%f,' % breadth) # noqa: UP031
+ file.write('"period":%f,' % period) # noqa: UP031
+ file.write('"units":{"length":"m","time":"sec"},')
+ file.write('"frequency":%f,' % frequency) # noqa: UP031
+ file.write('"incidenceAngle":%f,' % angle) # noqa: UP031
+ file.write('"tapLocations": [')
+ locations = mat_contents['Location_of_measured_points']
+ numLocations = locations.shape[1] # noqa: N806
# get xMax and yMax .. assuming first sensor is 1m from building edge
- # location on faces cannot be obtained from the inputs, at least not with
+ # location on faces cannot be obtained from the inputs, at least not with
# current documentation, awaing email from TPU
- xMax = max(locations[0])+1
- yMax = max(locations[1])+1
-
- for loc in range(0, numLocations):
+ xMax = max(locations[0]) + 1 # noqa: N806, F841
+ yMax = max(locations[1]) + 1 # noqa: N806, F841
+
+ for loc in range(numLocations):
tag = locations[2][loc]
- xLoc = locations[0][loc]
- yLoc = locations[1][loc]
+ xLoc = locations[0][loc] # noqa: N806
+ yLoc = locations[1][loc] # noqa: N806
face = locations[3][loc]
- X = xLoc
- Y = yLoc
- if (face == 2):
- xLoc = X - breadth
- elif (face == 3):
- xLoc = X - breadth - depth
- elif (face == 4):
- xLoc = X - 2*breadth - depth
-
- if (loc == numLocations-1):
- file.write("{\"id\":%d,\"xLoc\":%f,\"yLoc\":%f,\"face\":%d}]" % (tag, xLoc, yLoc, face))
+ X = xLoc # noqa: N806
+ Y = yLoc # noqa: N806, F841
+ if face == 2: # noqa: PLR2004
+ xLoc = X - breadth # noqa: N806
+ elif face == 3: # noqa: PLR2004
+ xLoc = X - breadth - depth # noqa: N806
+ elif face == 4: # noqa: PLR2004
+ xLoc = X - 2 * breadth - depth # noqa: N806
+
+ if loc == numLocations - 1:
+ file.write(
+ '{"id":%d,"xLoc":%f,"yLoc":%f,"face":%d}]' % (tag, xLoc, yLoc, face)
+ )
else:
- file.write("{\"id\":%d,\"xLoc\":%f,\"yLoc\":%f,\"face\":%d}," % (tag, xLoc, yLoc, face))
-
-
- file.write(",\"pressureCoefficients\": [");
- coefficients = mat_contents['Wind_pressure_coefficients'];
- numLocations = coefficients.shape[1];
- numValues = coefficients.shape[0];
-
- for loc in range(0, numLocations):
- file.write("{\"id\": %d , \"data\":[" % (loc+1))
- for i in range(0, numValues-1):
- file.write("%f," % coefficients[i,loc])
- if (loc != numLocations-1):
- file.write("%f]}," % coefficients[numValues-1,loc])
+ file.write(
+ '{"id":%d,"xLoc":%f,"yLoc":%f,"face":%d},' % (tag, xLoc, yLoc, face)
+ )
+
+ file.write(',"pressureCoefficients": [')
+ coefficients = mat_contents['Wind_pressure_coefficients']
+ numLocations = coefficients.shape[1] # noqa: N806
+ numValues = coefficients.shape[0] # noqa: N806
+ for loc in range(numLocations):
+ file.write('{"id": %d , "data":[' % (loc + 1))
+ for i in range(numValues - 1):
+ file.write('%f,' % coefficients[i, loc]) # noqa: UP031
+ if loc != numLocations - 1:
+ file.write('%f]},' % coefficients[numValues - 1, loc]) # noqa: UP031
else:
- file.write("%f]}]" % coefficients[numValues-1,loc])
+ file.write('%f]}]' % coefficients[numValues - 1, loc]) # noqa: UP031
- file.write("}")
+ file.write('}')
file.close()
-if __name__ == '__main__':
- parseTPU_HighRise_MatFile(matFileIN,jsonFileOUT)
-
+if __name__ == '__main__':
+ parseTPU_HighRise_MatFile(matFileIN, jsonFileOUT)
diff --git a/modules/createEVENT/IsolatedBuildingCFD/IsolatedBuildingCFD.py b/modules/createEVENT/IsolatedBuildingCFD/IsolatedBuildingCFD.py
index 8a3fae0e8..995c5c327 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/IsolatedBuildingCFD.py
+++ b/modules/createEVENT/IsolatedBuildingCFD/IsolatedBuildingCFD.py
@@ -1,103 +1,88 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- patternsArray = []
- windEventJson = {
- "type" : "Wind",
- "subtype": "IsolatedBuildingCFD",
- "pattern": patternsArray,
- "pressure": [],
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ patternsArray = [] # noqa: N806
+ windEventJson = { # noqa: N806
+ 'type': 'Wind',
+ 'subtype': 'IsolatedBuildingCFD',
+ 'pattern': patternsArray,
+ 'pressure': [],
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [windEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [windEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
- with open(eventFilePath, "w") as eventsFile:
+ with open(eventFilePath, 'w') as eventsFile: # noqa: N806, PLW1514, PTH123
json.dump(eventDict, eventsFile)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r', encoding='utf-8') as BIMFile:
- bim = json.load(BIMFile)
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath, encoding='utf-8') as BIMFile: # noqa: PTH123, N806
+ bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using CFD
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by CFD")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
-
- #parsing arguments
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by CFD'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
+
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
+ if arguments.getRV == True: # noqa: E712
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- #write the event file
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ # write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/IsolatedBuildingCFD/foam_dict_reader.py b/modules/createEVENT/IsolatedBuildingCFD/foam_dict_reader.py
index 2a5718c53..fe504a04f 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/foam_dict_reader.py
+++ b/modules/createEVENT/IsolatedBuildingCFD/foam_dict_reader.py
@@ -1,73 +1,71 @@
-"""
-This script contains functions for reading and writing OpenFoam dictionaries.
+"""This script contains functions for reading and writing OpenFoam dictionaries.""" # noqa: CPY001, D404, INP001
-"""
-import numpy as np
import os
+import numpy as np
+
-def find_keyword_line(dict_lines, keyword):
-
+def find_keyword_line(dict_lines, keyword): # noqa: D103
start_line = -1
-
+
count = 0
for line in dict_lines:
- l = line.lstrip(" ")
-
+ l = line.lstrip(' ') # noqa: E741
+
if l.startswith(keyword):
start_line = count
break
-
- count += 1
+
+ count += 1 # noqa: SIM113
return start_line
-
+
+
def write_foam_field(field, file_name):
- """
- Writes a given numpy two dimensional array to OpenFOAM
- field format. It can handel the following formats:
- pointField,
- vectorField,
- tensorField,
- symmTensorField
- """
- if os.path.exists(file_name):
- os.remove(file_name)
-
- foam_file = open(file_name, "w+")
+ """Writes a given numpy two dimensional array to OpenFOAM
+ field format. It can handle the following formats:
+ pointField,
+ vectorField,
+ tensorField,
+ symmTensorField
+ """ # noqa: D205, D400, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
+
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- line = "\n("
+ line = '\n('
for j in range(size[1]):
- line += " {:.6e}".format(field[i,j])
- line += ")"
+ line += f' {field[i, j]:.6e}'
+ line += ')'
foam_file.write(line)
-
- foam_file.write('\n);')
+
+ foam_file.write('\n);')
foam_file.close()
+
def write_scalar_field(field, file_name):
- """
- Writes a given one dimensional numpy array to OpenFOAM
+ """Writes a given one dimensional numpy array to OpenFOAM
scalar field format.
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name,"w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- foam_file.write("\n {:.6e}".format(field.flatten()[i]))
-
- foam_file.write('\n);')
- foam_file.close()
\ No newline at end of file
+ foam_file.write(f'\n {field.flatten()[i]:.6e}')
+
+ foam_file.write('\n);')
+ foam_file.close()
diff --git a/modules/createEVENT/IsolatedBuildingCFD/process_output_data.py b/modules/createEVENT/IsolatedBuildingCFD/process_output_data.py
index 5204271dc..29a634408 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/process_output_data.py
+++ b/modules/createEVENT/IsolatedBuildingCFD/process_output_data.py
@@ -1,122 +1,103 @@
-"""
-This script create blockMeshDict for OpenFoam given vertices and boundary type.
+"""This script create blockMeshDict for OpenFoam given vertices and boundary type.
+code creates pressure probes for the main simulation. Three types of
+probes are created.
-code creates pressure probes for the main simulation. Three types of
-probes are created.
+""" # noqa: CPY001, D404, INP001
-"""
-
-import sys
-import os
-import subprocess
import json
-import stat
-import shutil
-import numpy as np
-import CWE as cwe
-import matplotlib.pyplot as plt
-import matplotlib.gridspec as gridspec
+import sys
+
+import CWE as cwe # noqa: N811
import numpy as np
-import CWE as cwe
-from scipy import signal
-from scipy.interpolate import interp1d
-from scipy.interpolate import UnivariateSpline
-from scipy import stats
-def write_wind_profiles(case_path):
- inf_path = case_path + "/constant/boundaryData/windProfile/sampledData/verticalProfile/"
-
-
+def write_wind_profiles(case_path): # noqa: D103
+ inf_path = (
+ case_path + '/constant/boundaryData/windProfile/sampledData/verticalProfile/'
+ )
+
inf = cwe.VelocityData('cfd', inf_path, start_time=None, end_time=None)
-
-
- #Read JSON data for turbulence model
- wc_json_file = open(case_path + "/constant/simCenter/windCharacteristics.json")
-
+
+ # Read JSON data for turbulence model
+ wc_json_file = open(case_path + '/constant/simCenter/windCharacteristics.json') # noqa: PLW1514, PTH123, SIM115
+
# Returns JSON object as a dictionary
wind_data = json.load(wc_json_file, 'r', encoding='utf-8')
wc_json_file.close()
-
+
building_height = wind_data['buildingHeight']
-
- #Wind profile z, Uav, Iu, Lu
+
+ # Wind profile z, Uav, Iu, Lu
prof = np.zeros((len(inf.z), 4))
- prof[:,0] = inf.z
- prof[:,1] = inf.Uav
- prof[:,2] = inf.I[:,0]
- prof[:,3] = inf.L[:,0]
-
-
- #Wind velocity at roof height
- H_loc = np.argmin(np.abs(inf.z - building_height))
-
- # U, v, w in at roof height
- Uh = inf.U[H_loc, :, :].T
-
- Suh = []
-
-
+ prof[:, 0] = inf.z
+ prof[:, 1] = inf.Uav
+ prof[:, 2] = inf.I[:, 0]
+ prof[:, 3] = inf.L[:, 0]
+
+ # Wind velocity at roof height
+ H_loc = np.argmin(np.abs(inf.z - building_height)) # noqa: N806
+
+ # U, v, w in at roof height
+ Uh = inf.U[H_loc, :, :].T # noqa: N806
+
+ s_uh = []
+
for i in range(3):
f, s = cwe.psd(Uh[:, i], 0.0025, 8)
- Suh.append(np.abs(s))
-
- Suh.insert(0, f)
-
- Suhout = np.asarray(Suh, dtype=np.float32).T
-
+ s_uh.append(np.abs(s))
+
+ s_uh.insert(0, f)
+
+ Suhout = np.asarray(s_uh, dtype=np.float32).T # noqa: N806
+
write_precision = 6
- fmt ='%.{}e'.format(write_precision)
-
- prof_path = case_path + "/constant/simCenter/output/windProfiles.txt"
- Suh_path = case_path + "/constant/simCenter/output/Suh.txt"
-
+ fmt = f'%.{write_precision}e'
+
+ prof_path = case_path + '/constant/simCenter/output/windProfiles.txt'
+ s_uh_path = case_path + '/constant/simCenter/output/Suh.txt'
+
np.savetxt(prof_path, prof, fmt=fmt)
- np.savetxt(Suh_path, Suhout, fmt=fmt)
+ np.savetxt(s_uh_path, Suhout, fmt=fmt)
+
+def write_wind_loads(case_path): # noqa: D103
+ # Write base forces
+ base_forces_path = case_path + '/postProcessing/baseForces/0/forces.dat'
+ base_o, base_t, base_f, base_m = cwe.read_forces_OF10(base_forces_path) # noqa: F841
-def write_wind_loads(case_path):
- #Write base forces
- base_forces_path = case_path + "/postProcessing/baseForces/0/forces.dat"
- base_o, base_t, base_f, base_m = cwe.read_forces_OF10(base_forces_path)
-
base_forces = np.zeros((len(base_t), 3))
-
- base_forces[:,0:2] = base_f[:, 0:2]
- base_forces[:,2] = base_m[:, 2]
-
-
- #Write story forces
- story_forces_path = case_path + "/postProcessing/storyForces/0/forces_bins.dat"
- story_coord, story_t, story_f, story_m = cwe.read_bin_forces_OF10(story_forces_path)
-
-
+
+ base_forces[:, 0:2] = base_f[:, 0:2]
+ base_forces[:, 2] = base_m[:, 2]
+
+ # Write story forces
+ story_forces_path = case_path + '/postProcessing/storyForces/0/forces_bins.dat'
+ story_coord, story_t, story_f, story_m = cwe.read_bin_forces_OF10( # noqa: F841
+ story_forces_path
+ )
+
write_precision = 6
- fmt ='%.{}e'.format(write_precision)
-
- out_base_path = case_path + "/constant/simCenter/output/baseForces.txt"
-
- out_story_path_Fx = case_path + "/constant/simCenter/output/storyForcesFx.txt"
- out_story_path_Fy = case_path + "/constant/simCenter/output/storyForcesFy.txt"
- out_story_path_Mz = case_path + "/constant/simCenter/output/storyForcesMz.txt"
-
- np.savetxt(out_base_path, base_forces, fmt=fmt)
-
- np.savetxt(out_story_path_Fx, story_f[:,:,0], fmt=fmt)
- np.savetxt(out_story_path_Fy, story_f[:,:,1], fmt=fmt)
- np.savetxt(out_story_path_Mz, story_m[:,:,2], fmt=fmt)
+ fmt = f'%.{write_precision}e'
+ out_base_path = case_path + '/constant/simCenter/output/baseForces.txt'
+ out_story_path_Fx = case_path + '/constant/simCenter/output/storyForcesFx.txt' # noqa: N806
+ out_story_path_Fy = case_path + '/constant/simCenter/output/storyForcesFy.txt' # noqa: N806
+ out_story_path_Mz = case_path + '/constant/simCenter/output/storyForcesMz.txt' # noqa: N806
+ np.savetxt(out_base_path, base_forces, fmt=fmt)
-if __name__ == '__main__':
-
+ np.savetxt(out_story_path_Fx, story_f[:, :, 0], fmt=fmt)
+ np.savetxt(out_story_path_Fy, story_f[:, :, 1], fmt=fmt)
+ np.savetxt(out_story_path_Mz, story_m[:, :, 2], fmt=fmt)
+
+
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
case_path = sys.argv[1]
-
+
write_wind_profiles(case_path)
write_wind_loads(case_path)
-
diff --git a/modules/createEVENT/IsolatedBuildingCFD/setup_case.py b/modules/createEVENT/IsolatedBuildingCFD/setup_case.py
index 5f600f5e4..3a71af187 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/setup_case.py
+++ b/modules/createEVENT/IsolatedBuildingCFD/setup_case.py
@@ -1,250 +1,268 @@
-"""
-This script writes BC and initial condition, and setups the OpenFoam case
+"""This script writes BC and initial condition, and setups the OpenFoam case
directory.
-"""
-import numpy as np
-import sys
-import os
+""" # noqa: CPY001, D205, D404, INP001
+
import json
+import os
+import sys
+
import foam_dict_reader as foam
+import numpy as np
from stl import mesh
-def write_block_mesh_dict(input_json_path, template_dict_path, case_path):
+def write_block_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["blockMeshParameters"]
+ mesh_data = json_data['blockMeshParameters']
geom_data = json_data['GeometricData']
boundary_data = json_data['boundaryConditions']
normalization_type = geom_data['normalizationType']
origin = np.array(geom_data['origin'])
- scale = geom_data['geometricScale']
- H = geom_data['buildingHeight']/scale #convert to model-scale
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale']
+ H = geom_data['buildingHeight'] / scale # convert to model-scale # noqa: N806
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_cells = mesh_data['xNumCells']
y_cells = mesh_data['yNumCells']
z_cells = mesh_data['zNumCells']
-
+
x_grading = mesh_data['xGrading']
y_grading = mesh_data['yGrading']
z_grading = mesh_data['zGrading']
- bc_map = {"slip": 'wall', "cyclic": 'cyclic', "noSlip": 'wall',
- "symmetry": 'symmetry', "empty": 'empty', "TInf": 'patch',
- "MeanABL": 'patch', "Uniform": 'patch', "zeroPressureOutlet": 'patch',
- "roughWallFunction": 'wall',"smoothWallFunction": 'wall'}
-
-
+ bc_map = {
+ 'slip': 'wall',
+ 'cyclic': 'cyclic',
+ 'noSlip': 'wall',
+ 'symmetry': 'symmetry',
+ 'empty': 'empty',
+ 'TInf': 'patch',
+ 'MeanABL': 'patch',
+ 'Uniform': 'patch',
+ 'zeroPressureOutlet': 'patch',
+ 'roughWallFunction': 'wall',
+ 'smoothWallFunction': 'wall',
+ }
inlet_type = bc_map[boundary_data['inletBoundaryCondition']]
outlet_type = bc_map[boundary_data['outletBoundaryCondition']]
- ground_type = bc_map[boundary_data['groundBoundaryCondition']]
+ ground_type = bc_map[boundary_data['groundBoundaryCondition']]
top_type = bc_map[boundary_data['topBoundaryCondition']]
front_type = bc_map[boundary_data['sidesBoundaryCondition']]
back_type = bc_map[boundary_data['sidesBoundaryCondition']]
-
+
length_unit = json_data['lengthUnit']
- if normalization_type == "Relative":
- Lx = Lx*H
- Ly = Ly*H
- Lz = Lz*H
- Lf = Lf*H
- origin = origin*H
-
+ if normalization_type == 'Relative':
+ Lx = Lx * H # noqa: N806, PLR6104
+ Ly = Ly * H # noqa: N806, PLR6104
+ Lz = Lz * H # noqa: N806, PLR6104
+ Lf = Lf * H # noqa: N806, PLR6104
+ origin = origin * H # noqa: PLR6104
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
x_max = x_min + Lx
y_max = y_min + Ly
z_max = z_min + Lz
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/blockMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/blockMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
- dict_lines[17] = "\txMin\t\t{:.4f};\n".format(x_min)
- dict_lines[18] = "\tyMin\t\t{:.4f};\n".format(y_min)
- dict_lines[19] = "\tzMin\t\t{:.4f};\n".format(z_min)
+ dict_lines[17] = f'\txMin\t\t{x_min:.4f};\n'
+ dict_lines[18] = f'\tyMin\t\t{y_min:.4f};\n'
+ dict_lines[19] = f'\tzMin\t\t{z_min:.4f};\n'
- dict_lines[20] = "\txMax\t\t{:.4f};\n".format(x_max)
- dict_lines[21] = "\tyMax\t\t{:.4f};\n".format(y_max)
- dict_lines[22] = "\tzMax\t\t{:.4f};\n".format(z_max)
+ dict_lines[20] = f'\txMax\t\t{x_max:.4f};\n'
+ dict_lines[21] = f'\tyMax\t\t{y_max:.4f};\n'
+ dict_lines[22] = f'\tzMax\t\t{z_max:.4f};\n'
+ dict_lines[23] = f'\txCells\t\t{x_cells:d};\n'
+ dict_lines[24] = f'\tyCells\t\t{y_cells:d};\n'
+ dict_lines[25] = f'\tzCells\t\t{z_cells:d};\n'
- dict_lines[23] = "\txCells\t\t{:d};\n".format(x_cells)
- dict_lines[24] = "\tyCells\t\t{:d};\n".format(y_cells)
- dict_lines[25] = "\tzCells\t\t{:d};\n".format(z_cells)
-
- dict_lines[26] = "\txGrading\t{:.4f};\n".format(x_grading)
- dict_lines[27] = "\tyGrading\t{:.4f};\n".format(y_grading)
- dict_lines[28] = "\tzGrading\t{:.4f};\n".format(z_grading)
+ dict_lines[26] = f'\txGrading\t{x_grading:.4f};\n'
+ dict_lines[27] = f'\tyGrading\t{y_grading:.4f};\n'
+ dict_lines[28] = f'\tzGrading\t{z_grading:.4f};\n'
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
- dict_lines[31] = "convertToMeters {:.4f};\n".format(convert_to_meters)
- dict_lines[61] = " type {};\n".format(inlet_type)
- dict_lines[70] = " type {};\n".format(outlet_type)
- dict_lines[79] = " type {};\n".format(ground_type)
- dict_lines[88] = " type {};\n".format(top_type)
- dict_lines[97] = " type {};\n".format(front_type)
- dict_lines[106] = " type {};\n".format(back_type)
-
-
- write_file_name = case_path + "/system/blockMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[31] = f'convertToMeters {convert_to_meters:.4f};\n'
+ dict_lines[61] = f' type {inlet_type};\n'
+ dict_lines[70] = f' type {outlet_type};\n'
+ dict_lines[79] = f' type {ground_type};\n'
+ dict_lines[88] = f' type {top_type};\n'
+ dict_lines[97] = f' type {front_type};\n'
+ dict_lines[106] = f' type {back_type};\n'
+
+ write_file_name = case_path + '/system/blockMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
+def write_building_stl_file(input_json_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
-def write_building_stl_file(input_json_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
-
geom_data = json_data['GeometricData']
- if geom_data["buildingShape"] == "Complex":
+ if geom_data['buildingShape'] == 'Complex':
import_building_stl_file(input_json_path, case_path)
- return
+ return
- #Else create the STL file
- scale = geom_data['geometricScale']
- length_unit = json_data['lengthUnit']
+ # Else create the STL file
+ scale = geom_data['geometricScale']
+ length_unit = json_data['lengthUnit']
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
-
- #Convert from full-scale to model-scale
- B = convert_to_meters*geom_data['buildingWidth']/scale
- D = convert_to_meters*geom_data['buildingDepth']/scale
- H = convert_to_meters*geom_data['buildingHeight']/scale
-
+
+ # Convert from full-scale to model-scale
+ B = convert_to_meters * geom_data['buildingWidth'] / scale # noqa: N806
+ D = convert_to_meters * geom_data['buildingDepth'] / scale # noqa: N806
+ H = convert_to_meters * geom_data['buildingHeight'] / scale # noqa: N806
+
normalization_type = geom_data['normalizationType']
origin = np.array(geom_data['origin'])
wind_dxn = geom_data['windDirection']
- if normalization_type == "Relative":
- origin = origin*H
-
+ if normalization_type == 'Relative':
+ origin = origin * H # noqa: PLR6104
wind_dxn_rad = np.deg2rad(wind_dxn)
- epsilon = 1.0e-5
-
+ epsilon = 1.0e-5
+
# Define the 8 vertices of the building
- vertices = np.array([[-D/2.0, -B/2.0, -epsilon],
- [+D/2.0, -B/2.0, -epsilon],
- [+D/2.0, +B/2.0, -epsilon],
- [-D/2.0, +B/2.0, -epsilon],
- [-D/2.0, -B/2.0, +H],
- [+D/2.0, -B/2.0, +H],
- [+D/2.0, +B/2.0, +H],
- [-D/2.0, +B/2.0, +H]])
+ vertices = np.array(
+ [
+ [-D / 2.0, -B / 2.0, -epsilon],
+ [+D / 2.0, -B / 2.0, -epsilon],
+ [+D / 2.0, +B / 2.0, -epsilon],
+ [-D / 2.0, +B / 2.0, -epsilon],
+ [-D / 2.0, -B / 2.0, +H],
+ [+D / 2.0, -B / 2.0, +H],
+ [+D / 2.0, +B / 2.0, +H],
+ [-D / 2.0, +B / 2.0, +H],
+ ]
+ )
n_vertices = np.shape(vertices)[0]
- #The default coordinate system is building center.
- #Transform the preferred origin
- vertices = vertices - origin
-
- #Transform transform the vertices to account the wind direction.
+ # The default coordinate system is building center.
+ # Transform the preferred origin
+ vertices = vertices - origin # noqa: PLR6104
+
+ # Transform transform the vertices to account the wind direction.
trans_vertices = np.zeros((n_vertices, 3))
- trans_vertices[:,2] = vertices[:,2]
-
- t_matrix = np.array([[np.cos(wind_dxn_rad), -np.sin(wind_dxn_rad)],
- [np.sin(wind_dxn_rad), np.cos(wind_dxn_rad)]])
-
+ trans_vertices[:, 2] = vertices[:, 2]
+
+ t_matrix = np.array(
+ [
+ [np.cos(wind_dxn_rad), -np.sin(wind_dxn_rad)],
+ [np.sin(wind_dxn_rad), np.cos(wind_dxn_rad)],
+ ]
+ )
+
for i in range(n_vertices):
- trans_vertices[i,0:2] = np.matmul(t_matrix, vertices[i,0:2])
-
-
+ trans_vertices[i, 0:2] = np.matmul(t_matrix, vertices[i, 0:2])
+
# Define the 12 triangles composing the rectangular building
- faces = np.array([\
- [0,3,1],
- [1,3,2],
- [0,4,7],
- [0,7,3],
- [4,5,6],
- [4,6,7],
- [5,1,2],
- [5,2,6],
- [2,3,6],
- [3,7,6],
- [0,1,5],
- [0,5,4]])
-
+ faces = np.array(
+ [
+ [0, 3, 1],
+ [1, 3, 2],
+ [0, 4, 7],
+ [0, 7, 3],
+ [4, 5, 6],
+ [4, 6, 7],
+ [5, 1, 2],
+ [5, 2, 6],
+ [2, 3, 6],
+ [3, 7, 6],
+ [0, 1, 5],
+ [0, 5, 4],
+ ]
+ )
+
# Create the mesh
bldg = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
- bldg.vectors[i][j] = trans_vertices[f[j],:]
-
+ bldg.vectors[i][j] = trans_vertices[f[j], :]
+
# Write the mesh to file "building.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
bldg.save(case_path + '/constant/geometry/building.stl', mode=fmt)
-def import_building_stl_file(input_json_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def import_building_stl_file(input_json_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
- if json_data["GeometricData"]["buildingShape"] == "Simple":
- return
+ if json_data['GeometricData']['buildingShape'] == 'Simple':
+ return
# Returns JSON object as a dictionary
- stl_path = json_data["GeometricData"]["importedSTLPath"]
- scale_factor = json_data["GeometricData"]["stlScaleFactor"]
- recenter = json_data["GeometricData"]["recenterToOrigin"]
- use_stl_dimension = json_data["GeometricData"]["useSTLDimensions"]
- account_wind_direction = json_data["GeometricData"]["accountWindDirection"]
- origin = np.array(json_data["GeometricData"]['origin'])
- wind_dxn = json_data["GeometricData"]['windDirection']
+ stl_path = json_data['GeometricData']['importedSTLPath']
+ scale_factor = json_data['GeometricData']['stlScaleFactor']
+ recenter = json_data['GeometricData']['recenterToOrigin']
+ use_stl_dimension = json_data['GeometricData']['useSTLDimensions'] # noqa: F841
+ account_wind_direction = json_data['GeometricData']['accountWindDirection']
+ origin = np.array(json_data['GeometricData']['origin'])
+ wind_dxn = json_data['GeometricData']['windDirection']
wind_dxn_rad = np.deg2rad(wind_dxn)
# Using an existing closed stl file:
@@ -260,1099 +278,1142 @@ def import_building_stl_file(input_json_path, case_path):
# if use_stl_dimension:
# Data to be written
stl_summary = {
- "xMin": float(min_x),
- "xMax": float(max_x),
- "yMin": float(min_y),
- "yMax": float(max_y),
- "zMin": float(min_z),
- "zMax": float(max_z)
+ 'xMin': float(min_x),
+ 'xMax': float(max_x),
+ 'yMin': float(min_y),
+ 'yMax': float(max_y),
+ 'zMin': float(min_z),
+ 'zMax': float(max_z),
}
# Serializing json
json_object = json.dumps(stl_summary, indent=4)
-
+
# Writing to sample.json
- with open(input_json_path + "/stlGeometrySummary.json", "w", encoding='utf-8') as outfile:
+ with open( # noqa: FURB103, PTH123
+ input_json_path + '/stlGeometrySummary.json', 'w', encoding='utf-8'
+ ) as outfile:
outfile.write(json_object)
-
- #Translate the bottom center to origin
+
+ # Translate the bottom center to origin
if recenter:
- t = np.array([-((max_x - min_x)/2.0 + min_x), -((max_y - min_y)/2.0 + min_y), -min_z]) - origin/scale_factor
+ t = (
+ np.array(
+ [
+ -((max_x - min_x) / 2.0 + min_x),
+ -((max_y - min_y) / 2.0 + min_y),
+ -min_z,
+ ]
+ )
+ - origin / scale_factor
+ )
bldg_mesh.translate(t)
-
- #Account wind direction by rotation
+
+ # Account wind direction by rotation
if account_wind_direction:
- #Rotate about z-axis
+ # Rotate about z-axis
bldg_mesh.rotate(np.array([0, 0, 1.0]), wind_dxn_rad)
- # Scale the mesh
+ # Scale the mesh
bldg_mesh.vectors *= scale_factor
# Write the mesh to file "building.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
bldg_mesh.save(case_path + '/constant/geometry/building.stl', mode=fmt)
-def write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
-
- # Returns JSON object as a dictionary
- domain_data = json_data["snappyHexMeshParameters"]
- building_stl_name = domain_data['buildingSTLName']
-
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/surfaceFeaturesDictTemplate", "r")
-
- #Export to OpenFOAM probe format
- dict_lines = dict_file.readlines()
- dict_file.close()
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "surfaces")
- dict_lines[start_index] = "surfaces (\"{}.stl\");\n".format(building_stl_name)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/surfaceFeaturesDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
- for line in dict_lines:
- output_file.write(line)
-
- output_file.close()
-
-def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
-
+
+def write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
+
+ # Returns JSON object as a dictionary
+ domain_data = json_data['snappyHexMeshParameters']
+ building_stl_name = domain_data['buildingSTLName']
+
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/surfaceFeaturesDictTemplate') # noqa: PLW1514, PTH123, SIM115
+
+ # Export to OpenFOAM probe format
+ dict_lines = dict_file.readlines()
+ dict_file.close()
+
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'surfaces')
+ dict_lines[start_index] = f'surfaces ("{building_stl_name}.stl");\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/surfaceFeaturesDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ for line in dict_lines:
+ output_file.write(line)
+
+ output_file.close()
+
+
+def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
+
# Returns JSON object as a dictionary
- mesh_data = json_data["snappyHexMeshParameters"]
+ mesh_data = json_data['snappyHexMeshParameters']
geom_data = json_data['GeometricData']
- scale = geom_data['geometricScale']
- H = geom_data['buildingHeight']/scale #convert to model-scale
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale']
+ H = geom_data['buildingHeight'] / scale # convert to model-scale # noqa: N806
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
normalization_type = geom_data['normalizationType']
origin = np.array(geom_data['origin'])
-
+
building_stl_name = mesh_data['buildingSTLName']
num_cells_between_levels = mesh_data['numCellsBetweenLevels']
resolve_feature_angle = mesh_data['resolveFeatureAngle']
- num_processors = mesh_data['numProcessors']
-
+ num_processors = mesh_data['numProcessors'] # noqa: F841
+
refinement_boxes = mesh_data['refinementBoxes']
-
+
add_surface_refinement = mesh_data['addSurfaceRefinement']
surface_refinement_level = mesh_data['surfaceRefinementLevel']
surface_refinement_distance = mesh_data['surfaceRefinementDistance']
refinement_surface_name = mesh_data['refinementSurfaceName']
-
+
add_edge_refinement = mesh_data['addEdgeRefinement']
edge_refinement_level = mesh_data['edgeRefinementLevel']
refinement_edge_name = mesh_data['refinementEdgeName']
-
+
add_prism_layers = mesh_data['addPrismLayers']
number_of_prism_layers = mesh_data['numberOfPrismLayers']
prism_layer_expansion_ratio = mesh_data['prismLayerExpansionRatio']
final_prism_layer_thickness = mesh_data['finalPrismLayerThickness']
prism_layer_surface_name = mesh_data['prismLayerSurfaceName']
- prism_layer_relative_size = "on"
+ prism_layer_relative_size = 'on'
- if normalization_type == "Relative":
- Lx = Lx*H
- Ly = Ly*H
- Lz = Lz*H
- Lf = Lf*H
- origin = origin*H
+ if normalization_type == 'Relative':
+ Lx = Lx * H # noqa: N806, PLR6104
+ Ly = Ly * H # noqa: N806, PLR6104
+ Lz = Lz * H # noqa: N806, PLR6104
+ Lf = Lf * H # noqa: N806, PLR6104
+ origin = origin * H # noqa: PLR6104
for i in range(len(refinement_boxes)):
for j in range(2, 8, 1):
- refinement_boxes[i][j] = refinement_boxes[i][j]*H
-
- surface_refinement_distance = surface_refinement_distance*H
-
+ refinement_boxes[i][j] = refinement_boxes[i][j] * H # noqa: PLR6104
+
+ surface_refinement_distance = surface_refinement_distance * H # noqa: PLR6104
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
- x_max = x_min + Lx
+ x_max = x_min + Lx # noqa: F841
y_max = y_min + Ly
- z_max = z_min + Lz
-
- inside_point = [x_min + Lf/2.0, (y_min + y_max)/2.0, H]
+ z_max = z_min + Lz # noqa: F841
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/snappyHexMeshDictTemplate", "r")
+ inside_point = [x_min + Lf / 2.0, (y_min + y_max) / 2.0, H]
- #Export to OpenFOAM probe format
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/snappyHexMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
+
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
- dict_file.close()
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "addLayers")
- dict_lines[start_index] = "addLayers\t{};\n".format("on" if add_prism_layers else "off")
-
- ###################### Edit Geometry Section ##############################
-
- #Add refinement box geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- n_boxes = len(refinement_boxes)
+ dict_file.close()
+
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'addLayers')
+ dict_lines[start_index] = 'addLayers\t{};\n'.format(
+ 'on' if add_prism_layers else 'off'
+ )
+
+ # Edit Geometry Section ##############################
+
+ # Add refinement box geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ n_boxes = len(refinement_boxes)
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " type searchableBox;\n"
- added_part += " min ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][2], refinement_boxes[i][3], refinement_boxes[i][4])
- added_part += " max ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][5], refinement_boxes[i][6], refinement_boxes[i][7])
- added_part += " }\n"
-
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' type searchableBox;\n'
+ added_part += f' min ({refinement_boxes[i][2]:.4f} {refinement_boxes[i][3]:.4f} {refinement_boxes[i][4]:.4f});\n'
+ added_part += f' max ({refinement_boxes[i][5]:.4f} {refinement_boxes[i][6]:.4f} {refinement_boxes[i][7]:.4f});\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
- #Add building stl geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- added_part += " {}\n".format(building_stl_name)
- added_part += " {\n"
- added_part += " type triSurfaceMesh;\n"
- added_part += " file \"{}.stl\";\n".format(building_stl_name)
- added_part += " }\n"
-
+
+ # Add building stl geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ added_part += f' {building_stl_name}\n'
+ added_part += ' {\n'
+ added_part += ' type triSurfaceMesh;\n'
+ added_part += f' file "{building_stl_name}.stl";\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ################# Edit castellatedMeshControls Section ####################
-
- #Write 'nCellsBetweenLevels'
- start_index = foam.find_keyword_line(dict_lines, "nCellsBetweenLevels")
- dict_lines[start_index] = " nCellsBetweenLevels {:d};\n".format(num_cells_between_levels)
-
- #Write 'resolveFeatureAngle'
- start_index = foam.find_keyword_line(dict_lines, "resolveFeatureAngle")
- dict_lines[start_index] = " resolveFeatureAngle {:d};\n".format(resolve_feature_angle)
-
- #Write 'insidePoint'
- start_index = foam.find_keyword_line(dict_lines, "insidePoint")
- dict_lines[start_index] = " insidePoint ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
-
- #For compatibility with OpenFOAM-9 and older
- start_index = foam.find_keyword_line(dict_lines, "locationInMesh")
- dict_lines[start_index] = " locationInMesh ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #Add refinement edge
- if add_edge_refinement:
- start_index = foam.find_keyword_line(dict_lines, "features") + 2
- added_part = ""
- added_part += " {\n"
- added_part += " file \"{}.eMesh\";\n".format(refinement_edge_name)
- added_part += " level {};\n".format(edge_refinement_level)
- added_part += " }\n"
-
+
+ # Edit castellatedMeshControls Section ####################
+
+ # Write 'nCellsBetweenLevels'
+ start_index = foam.find_keyword_line(dict_lines, 'nCellsBetweenLevels')
+ dict_lines[start_index] = (
+ f' nCellsBetweenLevels {num_cells_between_levels:d};\n'
+ )
+
+ # Write 'resolveFeatureAngle'
+ start_index = foam.find_keyword_line(dict_lines, 'resolveFeatureAngle')
+ dict_lines[start_index] = f' resolveFeatureAngle {resolve_feature_angle:d};\n'
+
+ # Write 'insidePoint'
+ start_index = foam.find_keyword_line(dict_lines, 'insidePoint')
+ dict_lines[start_index] = (
+ f' insidePoint ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # For compatibility with OpenFOAM-9 and older
+ start_index = foam.find_keyword_line(dict_lines, 'locationInMesh')
+ dict_lines[start_index] = (
+ f' locationInMesh ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # Add refinement edge
+ if add_edge_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'features') + 2
+ added_part = ''
+ added_part += ' {\n'
+ added_part += f' file "{refinement_edge_name}.eMesh";\n'
+ added_part += f' level {edge_refinement_level};\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
- #Add refinement surface
- if add_surface_refinement:
- start_index = foam.find_keyword_line(dict_lines, "refinementSurfaces") + 2
- added_part = ""
- added_part += " {}\n".format(refinement_surface_name)
- added_part += " {\n"
- added_part += " level ({} {});\n".format(surface_refinement_level, surface_refinement_level)
- added_part += " patchInfo\n"
- added_part += " {\n"
- added_part += " type wall;\n"
- added_part += " }\n"
- added_part += " }\n"
-
+
+ # Add refinement surface
+ if add_surface_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'refinementSurfaces') + 2
+ added_part = ''
+ added_part += f' {refinement_surface_name}\n'
+ added_part += ' {\n'
+ added_part += f' level ({surface_refinement_level} {surface_refinement_level});\n'
+ added_part += ' patchInfo\n'
+ added_part += ' {\n'
+ added_part += ' type wall;\n'
+ added_part += ' }\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
- #Add surface refinement around the building as a refinement region
+
+ # Add surface refinement around the building as a refinement region
# if surface_refinement_level > refinement_boxes[-1][1]:
- added_part = ""
- added_part += " {}\n".format(refinement_surface_name)
- added_part += " {\n"
- added_part += " mode distance;\n"
- added_part += " levels (({:.4f} {}));\n".format(surface_refinement_distance, refinement_boxes[-1][1] + 1)
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part = ''
+ added_part += f' {refinement_surface_name}\n'
+ added_part += ' {\n'
+ added_part += ' mode distance;\n'
+ added_part += f' levels (({surface_refinement_distance:.4f} {refinement_boxes[-1][1] + 1}));\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
- #Add box refinements
- added_part = ""
+
+ # Add box refinements
+ added_part = ''
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " mode inside;\n"
- added_part += " level {};\n".format(refinement_boxes[i][1])
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' mode inside;\n'
+ added_part += f' level {refinement_boxes[i][1]};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
-
- ####################### Edit PrismLayer Section ##########################
- #Add surface layers (prism layers)
- added_part = ""
- added_part += " \"{}\"\n".format(prism_layer_surface_name)
- added_part += " {\n"
- added_part += " nSurfaceLayers {};\n".format(number_of_prism_layers)
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "layers") + 2
+
+ # Edit PrismLayer Section ##########################
+ # Add surface layers (prism layers)
+ added_part = ''
+ added_part += f' "{prism_layer_surface_name}"\n'
+ added_part += ' {\n'
+ added_part += f' nSurfaceLayers {number_of_prism_layers};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'layers') + 2
dict_lines.insert(start_index, added_part)
- #Write 'relativeSizes'
- start_index = foam.find_keyword_line(dict_lines, "relativeSizes")
- dict_lines[start_index] = " relativeSizes {};\n".format(prism_layer_relative_size)
-
- #Write 'expansionRatio'
- start_index = foam.find_keyword_line(dict_lines, "expansionRatio")
- dict_lines[start_index] = " expansionRatio {:.4f};\n".format(prism_layer_expansion_ratio)
-
- #Write 'finalLayerThickness'
- start_index = foam.find_keyword_line(dict_lines, "finalLayerThickness")
- dict_lines[start_index] = " finalLayerThickness {:.4f};\n".format(final_prism_layer_thickness)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/snappyHexMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write 'relativeSizes'
+ start_index = foam.find_keyword_line(dict_lines, 'relativeSizes')
+ dict_lines[start_index] = f' relativeSizes {prism_layer_relative_size};\n'
+
+ # Write 'expansionRatio'
+ start_index = foam.find_keyword_line(dict_lines, 'expansionRatio')
+ dict_lines[start_index] = (
+ f' expansionRatio {prism_layer_expansion_ratio:.4f};\n'
+ )
+
+ # Write 'finalLayerThickness'
+ start_index = foam.find_keyword_line(dict_lines, 'finalLayerThickness')
+ dict_lines[start_index] = (
+ f' finalLayerThickness {final_prism_layer_thickness:.4f};\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/snappyHexMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-def write_U_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_U_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- inlet_BC_type = boundary_data['inletBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
-
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ inlet_BC_type = boundary_data['inletBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806, F841
+
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/UFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/UFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- ##################### Internal Field #########################
- #Initialize the internal fields frow a lower velocity to avoid Courant number
- #instability when the solver starts. Now %10 of roof-height wind speed is set
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform (0 0 0);\n"
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
-
- if inlet_BC_type == "Uniform":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
-
- if inlet_BC_type == "MeanABL":
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletVelocity;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 uniform \t {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
- if inlet_BC_type == "TInf":
- added_part = ""
- added_part += "\t type \t turbulentDFMInlet;\n"
- added_part += "\t filterType \t exponential;\n"
- added_part += "\t filterFactor \t {};\n".format(4)
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t periodicInY \t {};\n".format("true")
- added_part += "\t periodicInZ \t {};\n".format("false")
- added_part += "\t constMeanU \t {};\n".format("true")
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
-
+
+ # Internal Field #########################
+ # Initialize the internal fields frow a lower velocity to avoid Courant number
+ # instability when the solver starts. Now %10 of roof-height wind speed is set
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = 'internalField uniform (0 0 0);\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+
+ if inlet_BC_type == 'Uniform':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+
+ if inlet_BC_type == 'MeanABL':
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletVelocity;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 uniform \t {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
+ if inlet_BC_type == 'TInf':
+ added_part = ''
+ added_part += '\t type \t turbulentDFMInlet;\n'
+ added_part += '\t filterType \t exponential;\n'
+ added_part += f'\t filterFactor \t {4};\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+ added_part += '\t periodicInY \t {};\n'.format('true')
+ added_part += '\t periodicInZ \t {};\n'.format('false')
+ added_part += '\t constMeanU \t {};\n'.format('true')
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform (0 0 0);\n"
- added_part += "\t value \t uniform (0 0 0);\n"
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += '\t inletValue \t uniform (0 0 0);\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t value \t uniform (0 0 0);\n"
- added_part += "\t uniformValue \t constant (0 0 0);\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+ added_part += '\t uniformValue \t constant (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format("noSlip")
-
+
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+ added_part = ''
+ added_part += '\t type \t {};\n'.format('noSlip')
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/U"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+", encoding='utf-8')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/U'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+', encoding='utf-8') # noqa: SIM115, PTH123
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-def write_p_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_p_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/pFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/pFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #BC and initial condition
- p0 = 0.0;
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(p0)
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ p0 = 0.0
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {p0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(p0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {p0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/p"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/p'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-
-def write_nut_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_nut_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806
# wind_speed = wind_data['roofHeightWindSpeed']
# building_height = wind_data['buildingHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/nutFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/nutFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #BC and initial condition
- nut0 = 0.0
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(nut0)
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ nut0 = 0.0
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {nut0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(nut0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {nut0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkAtmRoughWallFunction;\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t value \t uniform 0.0;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
-
-
- dict_lines.insert(start_index, added_part)
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkAtmRoughWallFunction;\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t value \t uniform 0.0;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
-
- if building_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if building_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if building_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkRoughWallFunction;\n"
- added_part += "\t Ks \t uniform 1e-5;\n"
- added_part += "\t Cs \t uniform 0.5;\n"
- added_part += "\t value \t uniform 0;\n"
-
+
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+
+ if building_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if building_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if building_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkRoughWallFunction;\n'
+ added_part += '\t Ks \t uniform 1e-5;\n'
+ added_part += '\t Cs \t uniform 0.5;\n'
+ added_part += '\t value \t uniform 0;\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/nut"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/nut'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_epsilon_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_epsilon_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/epsilonFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/epsilonFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #BC and initial condition
- epsilon0 = 0.01
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(epsilon0)
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletEpsilon;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ # BC and initial condition
+ epsilon0 = 0.01
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {epsilon0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletEpsilon;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(epsilon0)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {epsilon0:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- #Note: Should be replaced with smooth wall function for epsilon,
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ # Note: Should be replaced with smooth wall function for epsilon,
# now the same with rough wall function.
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
- dict_lines.insert(start_index, added_part)
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+ dict_lines.insert(start_index, added_part)
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
-
- if building_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if building_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.4)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- if building_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.4)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+
+ if building_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if building_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.4:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ if building_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.4:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/epsilon"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/epsilon'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_k_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_k_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/kFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/kFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #BC and initial condition (you may need to scale to model scale)
+
+ # BC and initial condition (you may need to scale to model scale)
# k0 = 1.3 #not in model scale
-
- I = 0.1
- k0 = 1.5*(I*wind_speed)**2
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField \t uniform {:.4f};\n".format(k0)
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletK;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ I = 0.1 # noqa: N806, E741
+ k0 = 1.5 * (I * wind_speed) ** 2
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField \t uniform {k0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletK;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(k0)
- added_part += "\t value \t uniform {:.4f};\n".format(k0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {k0:.4f};\n'
+ added_part += f'\t value \t uniform {k0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
-
- if building_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if building_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.6f};\n".format(k0)
-
- #Note: should be replaced with k wall function for rough walls
+
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+
+ if building_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if building_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {k0:.6f};\n'
+
+ # Note: should be replaced with k wall function for rough walls
# now it's the same with smooth wall function.
- if building_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.6f};\n".format(k0)
+ if building_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {k0:.6f};\n'
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/k"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/k'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_controlDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_controlDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
- rm_data = json_data["resultMonitoring"]
-
+ ns_data = json_data['numericalSetup']
+ rm_data = json_data['resultMonitoring']
+
solver_type = ns_data['solverType']
duration = ns_data['duration']
time_step = ns_data['timeStep']
max_courant_number = ns_data['maxCourantNumber']
adjust_time_step = ns_data['adjustTimeStep']
-
+
monitor_base_load = rm_data['monitorBaseLoad']
monitor_surface_pressure = rm_data['monitorSurfacePressure']
-
- # Need to change this for
- max_delta_t = 10*time_step
-
- #Write 10 times
+
+ # Need to change this for
+ max_delta_t = 10 * time_step
+
+ # Write 10 times
write_frequency = 10.0
- write_interval_time = duration/write_frequency
- write_interval_count = int(write_interval_time/time_step)
- purge_write = 3
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/controlDictTemplate", "r")
+ write_interval_time = duration / write_frequency
+ write_interval_count = int(write_interval_time / time_step)
+ purge_write = 3
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/controlDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write application type
- start_index = foam.find_keyword_line(dict_lines, "application")
- dict_lines[start_index] = "application \t{};\n".format(solver_type)
-
- #Write end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime \t{:.6f};\n".format(duration)
-
- #Write time step time
- start_index = foam.find_keyword_line(dict_lines, "deltaT")
- dict_lines[start_index] = "deltaT \t{:.6f};\n".format(time_step)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+
+ # Write application type
+ start_index = foam.find_keyword_line(dict_lines, 'application')
+ dict_lines[start_index] = f'application \t{solver_type};\n'
+
+ # Write end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime \t{duration:.6f};\n'
+
+ # Write time step time
+ start_index = foam.find_keyword_line(dict_lines, 'deltaT')
+ dict_lines[start_index] = f'deltaT \t{time_step:.6f};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('adjustableRunTime')
else:
- dict_lines[start_index] = "writeControl \t\t{};\n".format("timeStep")
-
- #Write adjustable time step or not
- start_index = foam.find_keyword_line(dict_lines, "adjustTimeStep")
- dict_lines[start_index] = "adjustTimeStep \t\t{};\n".format("yes" if adjust_time_step else "no")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(write_interval_time)
+ dict_lines[start_index] = 'writeControl \t\t{};\n'.format('timeStep')
+
+ # Write adjustable time step or not
+ start_index = foam.find_keyword_line(dict_lines, 'adjustTimeStep')
+ dict_lines[start_index] = 'adjustTimeStep \t\t{};\n'.format(
+ 'yes' if adjust_time_step else 'no'
+ )
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = f'writeInterval \t{write_interval_time:.6f};\n'
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(write_interval_count)
-
- #Write maxCo
- start_index = foam.find_keyword_line(dict_lines, "maxCo")
- dict_lines[start_index] = "maxCo \t{:.2f};\n".format(max_courant_number)
-
- #Write maximum time step
- start_index = foam.find_keyword_line(dict_lines, "maxDeltaT")
- dict_lines[start_index] = "maxDeltaT \t{:.6f};\n".format(max_delta_t)
-
- #Write purge write interval
- start_index = foam.find_keyword_line(dict_lines, "purgeWrite")
- dict_lines[start_index] = "purgeWrite \t{};\n".format(purge_write)
-
- ########################### Function Objects ##############################
-
- #Find function object location
- start_index = foam.find_keyword_line(dict_lines, "functions") + 2
-
- #Write story loads functionObjects
- added_part = " #includeFunc storyForces\n"
+ dict_lines[start_index] = f'writeInterval \t{write_interval_count};\n'
+
+ # Write maxCo
+ start_index = foam.find_keyword_line(dict_lines, 'maxCo')
+ dict_lines[start_index] = f'maxCo \t{max_courant_number:.2f};\n'
+
+ # Write maximum time step
+ start_index = foam.find_keyword_line(dict_lines, 'maxDeltaT')
+ dict_lines[start_index] = f'maxDeltaT \t{max_delta_t:.6f};\n'
+
+ # Write purge write interval
+ start_index = foam.find_keyword_line(dict_lines, 'purgeWrite')
+ dict_lines[start_index] = f'purgeWrite \t{purge_write};\n'
+
+ # Function Objects ##############################
+
+ # Find function object location
+ start_index = foam.find_keyword_line(dict_lines, 'functions') + 2
+
+ # Write story loads functionObjects
+ added_part = ' #includeFunc storyForces\n'
dict_lines.insert(start_index, added_part)
- #Write base loads functionObjects
+ # Write base loads functionObjects
if monitor_base_load:
- added_part = " #includeFunc baseForces\n"
+ added_part = ' #includeFunc baseForces\n'
dict_lines.insert(start_index, added_part)
-
- #Write pressure sampling points
+
+ # Write pressure sampling points
if monitor_surface_pressure:
- added_part = " #includeFunc generatedPressureSamplingPoints\n"
- added_part += " #includeFunc importedPressureSamplingPoints\n"
+ added_part = ' #includeFunc generatedPressureSamplingPoints\n'
+ added_part += ' #includeFunc importedPressureSamplingPoints\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/controlDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/controlDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_fvSolution_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_fvSolution_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
json_file.close()
-
+
num_non_orthogonal_correctors = ns_data['numNonOrthogonalCorrectors']
num_correctors = ns_data['numCorrectors']
num_outer_correctors = ns_data['numOuterCorrectors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSolutionTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/fvSolutionTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
- #Write simpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "SIMPLE") + 2
- added_part = ""
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write simpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'SIMPLE') + 2
+ added_part = ''
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pimpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "PIMPLE") + 2
- added_part = ""
- added_part += " nOuterCorrectors \t{};\n".format(num_outer_correctors)
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+
+ # Write pimpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PIMPLE') + 2
+ added_part = ''
+ added_part += f' nOuterCorrectors \t{num_outer_correctors};\n'
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
+ dict_lines.insert(start_index, added_part)
+
+ # Write pisoFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PISO') + 2
+ added_part = ''
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
- #Write pisoFoam options
- start_index = foam.find_keyword_line(dict_lines, "PISO") + 2
- added_part = ""
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
- dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSolution"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSolution'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-def write_generated_pressure_probes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
+def write_generated_pressure_probes_file( # noqa: D103
+ input_json_path,
+ template_dict_path,
+ case_path,
+):
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
adjust_time_step = ns_data['adjustTimeStep']
@@ -1362,61 +1423,68 @@ def write_generated_pressure_probes_file(input_json_path, template_dict_path, ca
generated_sampling_points = rm_data['generatedPressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = 'writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
else:
- dict_lines[start_index] = "writeControl \t{};\n".format("timeStep")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(pressure_write_interval*time_step)
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = (
+ f'writeInterval \t{pressure_write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
-
- added_part = ""
-
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
+
for i in range(len(generated_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(generated_sampling_points[i][0], generated_sampling_points[i][1], generated_sampling_points[i][2])
-
+ added_part += f' ({generated_sampling_points[i][0]:.6f} {generated_sampling_points[i][1]:.6f} {generated_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/generatedPressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/generatedPressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_imported_pressure_probes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json") as json_file:
+def write_imported_pressure_probes_file( # noqa: D103
+ input_json_path,
+ template_dict_path,
+ case_path,
+):
+ # Read JSON data
+ with open(input_json_path + '/IsolatedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
adjust_time_step = ns_data['adjustTimeStep']
@@ -1426,546 +1494,566 @@ def write_imported_pressure_probes_file(input_json_path, template_dict_path, cas
imported_sampling_points = rm_data['importedPressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = "writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam" and adjust_time_step:
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(pressure_write_interval*time_step)
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = 'writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam' and adjust_time_step:
+ dict_lines[start_index] = (
+ f'writeInterval \t{pressure_write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
- added_part = ""
-
for i in range(len(imported_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(imported_sampling_points[i][0], imported_sampling_points[i][1], imported_sampling_points[i][2])
-
+ added_part += f' ({imported_sampling_points[i][0]:.6f} {imported_sampling_points[i][1]:.6f} {imported_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/importedPressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/importedPressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-
-def write_base_forces_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_base_forces_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
air_density = 1.0
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- num_stories = rm_data['numStories']
- floor_height = rm_data['floorHeight']
+ num_stories = rm_data['numStories'] # noqa: F841
+ floor_height = rm_data['floorHeight'] # noqa: F841
center_of_rotation = rm_data['centerOfRotation']
base_load_write_interval = rm_data['baseLoadWriteInterval']
- monitor_base_load = rm_data['monitorBaseLoad']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/baseForcesTemplate", "r")
+ monitor_base_load = rm_data['monitorBaseLoad'] # noqa: F841
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/baseForcesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(base_load_write_interval)
-
- #Write patch name to integrate forces on
- start_index = foam.find_keyword_line(dict_lines, "patches")
- dict_lines[start_index] = "patches \t({});\n".format("building")
-
- #Write air density to rhoInf
- start_index = foam.find_keyword_line(dict_lines, "rhoInf")
- dict_lines[start_index] = "rhoInf \t\t{:.4f};\n".format(air_density)
-
- #Write center of rotation
- start_index = foam.find_keyword_line(dict_lines, "CofR")
- dict_lines[start_index] = "CofR \t\t({:.4f} {:.4f} {:.4f});\n".format(center_of_rotation[0], center_of_rotation[1], center_of_rotation[2])
-
- #Write edited dict to file
- write_file_name = case_path + "/system/baseForces"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{base_load_write_interval};\n'
+
+ # Write patch name to integrate forces on
+ start_index = foam.find_keyword_line(dict_lines, 'patches')
+ dict_lines[start_index] = 'patches \t({});\n'.format('building')
+
+ # Write air density to rhoInf
+ start_index = foam.find_keyword_line(dict_lines, 'rhoInf')
+ dict_lines[start_index] = f'rhoInf \t\t{air_density:.4f};\n'
+
+ # Write center of rotation
+ start_index = foam.find_keyword_line(dict_lines, 'CofR')
+ dict_lines[start_index] = (
+ f'CofR \t\t({center_of_rotation[0]:.4f} {center_of_rotation[1]:.4f} {center_of_rotation[2]:.4f});\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/baseForces'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-
-def write_story_forces_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_story_forces_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
air_density = 1.0
- #Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ # Returns JSON object as a dictionary
+ rm_data = json_data['resultMonitoring']
num_stories = rm_data['numStories']
- floor_height = rm_data['floorHeight']
+ floor_height = rm_data['floorHeight'] # noqa: F841
center_of_rotation = rm_data['centerOfRotation']
story_load_write_interval = rm_data['storyLoadWriteInterval']
- monitor_base_load = rm_data['monitorBaseLoad']
+ monitor_base_load = rm_data['monitorBaseLoad'] # noqa: F841
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/storyForcesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/storyForcesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(story_load_write_interval)
-
- #Write patch name to integrate forces on
- start_index = foam.find_keyword_line(dict_lines, "patches")
- dict_lines[start_index] = "patches \t({});\n".format("building")
-
- #Write air density to rhoInf
- start_index = foam.find_keyword_line(dict_lines, "rhoInf")
- dict_lines[start_index] = "rhoInf \t\t{:.4f};\n".format(air_density)
-
- #Write center of rotation
- start_index = foam.find_keyword_line(dict_lines, "CofR")
- dict_lines[start_index] = "CofR \t\t({:.4f} {:.4f} {:.4f});\n".format(center_of_rotation[0], center_of_rotation[1], center_of_rotation[2])
-
- #Number of stories as nBins
- start_index = foam.find_keyword_line(dict_lines, "nBin")
- dict_lines[start_index] = " nBin \t{};\n".format(num_stories)
-
- #Write story direction
- start_index = foam.find_keyword_line(dict_lines, "direction")
- dict_lines[start_index] = " direction \t({:.4f} {:.4f} {:.4f});\n".format(0, 0, 1.0)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/storyForces"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{story_load_write_interval};\n'
+
+ # Write patch name to integrate forces on
+ start_index = foam.find_keyword_line(dict_lines, 'patches')
+ dict_lines[start_index] = 'patches \t({});\n'.format('building')
+
+ # Write air density to rhoInf
+ start_index = foam.find_keyword_line(dict_lines, 'rhoInf')
+ dict_lines[start_index] = f'rhoInf \t\t{air_density:.4f};\n'
+
+ # Write center of rotation
+ start_index = foam.find_keyword_line(dict_lines, 'CofR')
+ dict_lines[start_index] = (
+ f'CofR \t\t({center_of_rotation[0]:.4f} {center_of_rotation[1]:.4f} {center_of_rotation[2]:.4f});\n'
+ )
+
+ # Number of stories as nBins
+ start_index = foam.find_keyword_line(dict_lines, 'nBin')
+ dict_lines[start_index] = f' nBin \t{num_stories};\n'
+
+ # Write story direction
+ start_index = foam.find_keyword_line(dict_lines, 'direction')
+ dict_lines[start_index] = f' direction \t({0:.4f} {0:.4f} {1.0:.4f});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/storyForces'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_momentumTransport_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_momentumTransport_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
- RANS_type = turb_data['RANSModelType']
- LES_type = turb_data['LESModelType']
- DES_type = turb_data['DESModelType']
+ RANS_type = turb_data['RANSModelType'] # noqa: N806
+ LES_type = turb_data['LESModelType'] # noqa: N806
+ DES_type = turb_data['DESModelType'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/momentumTransportTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/momentumTransportTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
- dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "simulationType")
- dict_lines[start_index] = "simulationType \t{};\n".format("RAS" if simulation_type=="RANS" else simulation_type)
-
- if simulation_type=="RANS":
- #Write RANS model type
- start_index = foam.find_keyword_line(dict_lines, "RAS") + 2
- added_part = " model \t{};\n".format(RANS_type)
+ dict_file.close()
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'simulationType')
+ dict_lines[start_index] = 'simulationType \t{};\n'.format(
+ 'RAS' if simulation_type == 'RANS' else simulation_type
+ )
+
+ if simulation_type == 'RANS':
+ # Write RANS model type
+ start_index = foam.find_keyword_line(dict_lines, 'RAS') + 2
+ added_part = f' model \t{RANS_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="LES":
- #Write LES SGS model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(LES_type)
+
+ elif simulation_type == 'LES':
+ # Write LES SGS model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{LES_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="DES":
- #Write DES model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(DES_type)
+
+ elif simulation_type == 'DES':
+ # Write DES model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{DES_type};\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/constant/momentumTransport"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/momentumTransport'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-
-def write_physicalProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+
+def write_physicalProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/physicalPropertiesTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/physicalPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.4e};\n".format(kinematic_viscosity)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/physicalProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.4e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/physicalProperties'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_transportProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_transportProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/transportPropertiesTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/transportPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.3e};\n".format(kinematic_viscosity)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/transportProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.3e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/transportProperties'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
+
output_file.close()
-def write_fvSchemes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json", 'r', encoding='utf-8') as json_file:
- json_data = json.load(json_file)
+def write_fvSchemes_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open( # noqa: PTH123
+ input_json_path + '/IsolatedBuildingCFD.json', encoding='utf-8'
+ ) as json_file:
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSchemesTemplate{}".format(simulation_type), "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + f'/fvSchemesTemplate{simulation_type}') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSchemes"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSchemes'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
- output_file.close()
-
-def write_decomposeParDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ output_file.close()
+
+
+def write_decomposeParDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/IsolatedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
num_processors = ns_data['numProcessors']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/decomposeParDictTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/decomposeParDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write number of sub-domains
- start_index = foam.find_keyword_line(dict_lines, "numberOfSubdomains")
- dict_lines[start_index] = "numberOfSubdomains\t{};\n".format(num_processors)
-
- #Write method of decomposition
- start_index = foam.find_keyword_line(dict_lines, "decomposer")
- dict_lines[start_index] = "decomposer\t\t{};\n".format("scotch")
-
- #Write method of decomposition for OF-V9 and lower compatibility
- start_index = foam.find_keyword_line(dict_lines, "method")
- dict_lines[start_index] = "method\t\t{};\n".format("scotch")
-
- #Write edited dict to file
- write_file_name = case_path + "/system/decomposeParDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write number of sub-domains
+ start_index = foam.find_keyword_line(dict_lines, 'numberOfSubdomains')
+ dict_lines[start_index] = f'numberOfSubdomains\t{num_processors};\n'
+
+ # Write method of decomposition
+ start_index = foam.find_keyword_line(dict_lines, 'decomposer')
+ dict_lines[start_index] = 'decomposer\t\t{};\n'.format('scotch')
+
+ # Write method of decomposition for OF-V9 and lower compatibility
+ start_index = foam.find_keyword_line(dict_lines, 'method')
+ dict_lines[start_index] = 'method\t\t{};\n'.format('scotch')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/decomposeParDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
- output_file.close()
-
-def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
- fMax = 200.0
+ output_file.close()
+
+
+def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/IsolatedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
+ fMax = 200.0 # noqa: N806
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
- ns_data = json_data["numericalSetup"]
-
+ wc_data = json_data['windCharacteristics']
+ ns_data = json_data['numericalSetup']
+
wind_speed = wc_data['referenceWindSpeed']
duration = ns_data['duration']
-
- #Generate a little longer duration to be safe
- duration = duration*1.010
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/DFSRTurbDictTemplate", "r")
+ # Generate a little longer duration to be safe
+ duration = duration * 1.010 # noqa: PLR6104
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/DFSRTurbDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write the end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime\t\t\t{:.4f};\n".format(duration)
-
- #Write patch name
- start_index = foam.find_keyword_line(dict_lines, "patchName")
- dict_lines[start_index] = "patchName\t\t\"{}\";\n".format("inlet")
-
- #Write cohUav
- start_index = foam.find_keyword_line(dict_lines, "cohUav")
- dict_lines[start_index] = "cohUav\t\t\t{:.4f};\n".format(wind_speed)
-
- #Write fMax
- start_index = foam.find_keyword_line(dict_lines, "fMax")
- dict_lines[start_index] = "fMax\t\t\t{:.4f};\n".format(fMax)
-
- #Write time step
- start_index = foam.find_keyword_line(dict_lines, "timeStep")
- dict_lines[start_index] = "timeStep\t\t{:.4f};\n".format(1.0/fMax)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/DFSRTurbDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write the end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime\t\t\t{duration:.4f};\n'
+
+ # Write patch name
+ start_index = foam.find_keyword_line(dict_lines, 'patchName')
+ dict_lines[start_index] = 'patchName\t\t"{}";\n'.format('inlet')
+
+ # Write cohUav
+ start_index = foam.find_keyword_line(dict_lines, 'cohUav')
+ dict_lines[start_index] = f'cohUav\t\t\t{wind_speed:.4f};\n'
+
+ # Write fMax
+ start_index = foam.find_keyword_line(dict_lines, 'fMax')
+ dict_lines[start_index] = f'fMax\t\t\t{fMax:.4f};\n'
+
+ # Write time step
+ start_index = foam.find_keyword_line(dict_lines, 'timeStep')
+ dict_lines[start_index] = f'timeStep\t\t{1.0 / fMax:.4f};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/DFSRTurbDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
-
- output_file.close()
-
+
+ output_file.close()
+
+
def write_boundary_data_files(input_json_path, case_path):
- """
- This functions writes wind profile files in "constant/boundaryData/inlet"
- if TInf options are used for the simulation.
- """
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ """This functions writes wind profile files in "constant/boundaryData/inlet"
+ if TInf options are used for the simulation.
+ """ # noqa: D205, D401, D404
+ # Read JSON data
+ with open(input_json_path + '/IsolatedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
geom_data = json_data['GeometricData']
- scale = 1.0/float(geom_data['geometricScale'])
+ scale = 1.0 / float(geom_data['geometricScale'])
norm_type = geom_data['normalizationType']
- building_height = scale*geom_data['buildingHeight']
-
+ building_height = scale * geom_data['buildingHeight']
- if boundary_data['inletBoundaryCondition']=="TInf":
- wind_profiles = np.array(boundary_data["inflowProperties"]['windProfiles'])
+ if boundary_data['inletBoundaryCondition'] == 'TInf':
+ wind_profiles = np.array(boundary_data['inflowProperties']['windProfiles'])
- bd_path = case_path + "/constant/boundaryData/inlet/"
+ bd_path = case_path + '/constant/boundaryData/inlet/'
- #Write points file
+ # Write points file
n_pts = np.shape(wind_profiles)[0]
- points = np.zeros((n_pts, 3))
-
+ points = np.zeros((n_pts, 3))
origin = np.array(geom_data['origin'])
-
- Ly = geom_data['domainWidth']
- Lf = geom_data['fetchLength']
-
- if norm_type=="Relative":
- Ly *= building_height
- Lf *= building_height
-
+
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
+ if norm_type == 'Relative':
+ Ly *= building_height # noqa: N806
+ Lf *= building_height # noqa: N806
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
+ y_min = -Ly / 2.0 - origin[1]
y_max = y_min + Ly
- points[:,0] = x_min
- points[:,1] = (y_min + y_max)/2.0
- points[:,2] = wind_profiles[:, 0]
+ points[:, 0] = x_min
+ points[:, 1] = (y_min + y_max) / 2.0
+ points[:, 2] = wind_profiles[:, 0]
- #Shift the last element of the y coordinate
- #a bit to make planer interpolation easier
+ # Shift the last element of the y coordinate
+ # a bit to make planer interpolation easier
points[-1:, 1] = y_max
- foam.write_foam_field(points, bd_path + "points")
+ foam.write_foam_field(points, bd_path + 'points')
- #Write wind speed file as a scalar field
- foam.write_scalar_field(wind_profiles[:, 1], bd_path + "U")
+ # Write wind speed file as a scalar field
+ foam.write_scalar_field(wind_profiles[:, 1], bd_path + 'U')
- #Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
- foam.write_foam_field(wind_profiles[:, 2:8], bd_path + "R")
+ # Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
+ foam.write_foam_field(wind_profiles[:, 2:8], bd_path + 'R')
- #Write length scale file (8 columns -> it's a tensor field)
- foam.write_foam_field(wind_profiles[:, 8:17], bd_path + "L")
+ # Write length scale file (8 columns -> it's a tensor field)
+ foam.write_foam_field(wind_profiles[:, 8:17], bd_path + 'L')
-if __name__ == '__main__':
-
+
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
input_json_path = sys.argv[1]
template_dict_path = sys.argv[2]
case_path = sys.argv[3]
-
- #Read JSON data
- with open(input_json_path + "/IsolatedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+ # Read JSON data
+ with open(input_json_path + '/IsolatedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
RANS_type = turb_data['RANSModelType']
LES_type = turb_data['LESModelType']
-
- #Write blockMesh
+
+ # Write blockMesh
write_block_mesh_dict(input_json_path, template_dict_path, case_path)
- #Create and write the building .stl file
- #Also, import STL file if the shape is complex, the check is done inside the function
+ # Create and write the building .stl file
+ # Also, import STL file if the shape is complex, the check is done inside the function
write_building_stl_file(input_json_path, case_path)
-
- #Create and write the SnappyHexMeshDict file
+
+ # Create and write the SnappyHexMeshDict file
write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path)
-
- #Create and write the surfaceFeaturesDict file
+
+ # Create and write the surfaceFeaturesDict file
write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path)
-
- #Write files in "0" directory
+
+ # Write files in "0" directory
write_U_file(input_json_path, template_dict_path, case_path)
write_p_file(input_json_path, template_dict_path, case_path)
write_nut_file(input_json_path, template_dict_path, case_path)
write_k_file(input_json_path, template_dict_path, case_path)
-
- if simulation_type == "RANS" and RANS_type=="kEpsilon":
+
+ if simulation_type == 'RANS' and RANS_type == 'kEpsilon':
write_epsilon_file(input_json_path, template_dict_path, case_path)
- #Write control dict
+ # Write control dict
write_controlDict_file(input_json_path, template_dict_path, case_path)
-
- #Write results to be monitored
+
+ # Write results to be monitored
write_base_forces_file(input_json_path, template_dict_path, case_path)
write_story_forces_file(input_json_path, template_dict_path, case_path)
- write_generated_pressure_probes_file(input_json_path, template_dict_path, case_path)
- write_imported_pressure_probes_file(input_json_path, template_dict_path, case_path)
-
- #Write fvSolution dict
+ write_generated_pressure_probes_file(
+ input_json_path, template_dict_path, case_path
+ )
+ write_imported_pressure_probes_file(
+ input_json_path, template_dict_path, case_path
+ )
+
+ # Write fvSolution dict
write_fvSolution_file(input_json_path, template_dict_path, case_path)
- #Write fvSchemes dict
+ # Write fvSchemes dict
write_fvSchemes_file(input_json_path, template_dict_path, case_path)
- #Write momentumTransport dict
+ # Write momentumTransport dict
write_momentumTransport_file(input_json_path, template_dict_path, case_path)
-
- #Write physicalProperties dict
+
+ # Write physicalProperties dict
write_physicalProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
+
+ # Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
write_transportProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write decomposeParDict
+
+ # Write decomposeParDict
write_decomposeParDict_file(input_json_path, template_dict_path, case_path)
-
+
# #Write DFSRTurb dict
# write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path)
- #Write TInf files
+ # Write TInf files
write_boundary_data_files(input_json_path, case_path)
-
diff --git a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/CMakeLists.txt b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/CMakeLists.txt
index c5e58b9ac..4ec43b536 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/CMakeLists.txt
+++ b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/CMakeLists.txt
@@ -5,7 +5,7 @@ simcenter_add_file(NAME nutFileTemplate)
simcenter_add_file(NAME pFileTemplate)
simcenter_add_file(NAME epsilonFileTemplate)
-#Files in "costant" directory
+#Files in "constant" directory
simcenter_add_file(NAME physicalPropertiesTemplate)
simcenter_add_file(NAME transportPropertiesTemplate)
simcenter_add_file(NAME momentumTransportTemplate)
diff --git a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
index c8b14b2c4..00f3fc64e 100755
--- a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
+++ b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
@@ -57,7 +57,7 @@ C (//x y z
windProfile
{
//read scaling factors for I, L
- //that varies with hieght
+ //that varies with height
adjustProfile off;
//Factors to scale turbulence intensities and length scale profiles
diff --git a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
index c93a2398e..62ed6e269 100644
--- a/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
+++ b/modules/createEVENT/IsolatedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
@@ -18,7 +18,7 @@ numberOfSubdomains 8;
decomposer hierarchical;
-//Needed for compatability
+//Needed for compatibility
method hierarchical;
distributor ptscotch;
diff --git a/modules/createEVENT/Istanbul/IstanbulApp.py b/modules/createEVENT/Istanbul/IstanbulApp.py
index 78db9e782..e8c47a923 100644
Binary files a/modules/createEVENT/Istanbul/IstanbulApp.py and b/modules/createEVENT/Istanbul/IstanbulApp.py differ
diff --git a/modules/createEVENT/Istanbul/IstanbulApp2.py b/modules/createEVENT/Istanbul/IstanbulApp2.py
index 2130ee6cc..ece3d9c2b 100644
--- a/modules/createEVENT/Istanbul/IstanbulApp2.py
+++ b/modules/createEVENT/Istanbul/IstanbulApp2.py
@@ -1,38 +1,58 @@
-#%%
-import os
+# %% # noqa: CPY001, D100, INP001
import json
-from datetime import datetime
+import os
import time
+from datetime import datetime
+
from agavepy.agave import Agave
# change the directory to the current directory
-os.chdir(os.path.dirname(os.path.realpath(__file__)))
+os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+
-def Submit_tapis_job():
+def Submit_tapis_job(): # noqa: N802, D103
ag = Agave.restore()
- with open("TapisFiles/information.json", "r") as file:
- information = json.load(file)
+ with open('TapisFiles/information.json') as file: # noqa: PLW1514, PTH123
+ information = json.load(file)
file.close()
-
# %%
- profile = ag.profiles.get()
- username = profile['username']
- savingDirectory = information["directory"]
- if not os.path.exists(savingDirectory):
- os.makedirs(savingDirectory)
-
+ profile = ag.profiles.get()
+ username = profile['username']
+ savingDirectory = information['directory'] # noqa: N806
+ if not os.path.exists(savingDirectory): # noqa: PTH110
+ os.makedirs(savingDirectory) # noqa: PTH103
- print("Uploading files to designsafe storage")
- ag.files.manage(systemId="designsafe.storage.default", filePath=f"{username}/", body={'action': 'mkdir','path': "physics_based"})
- ag.files.manage(systemId="designsafe.storage.default", filePath=f"{username}/physics_based", body={'action': 'mkdir','path': "Istanbul"})
+ print('Uploading files to designsafe storage') # noqa: T201
+ ag.files.manage(
+ systemId='designsafe.storage.default',
+ filePath=f'{username}/',
+ body={'action': 'mkdir', 'path': 'physics_based'},
+ )
+ ag.files.manage(
+ systemId='designsafe.storage.default',
+ filePath=f'{username}/physics_based',
+ body={'action': 'mkdir', 'path': 'Istanbul'},
+ )
# ag.files_mkdir(systemId="designsafe.storage.default", filePath=f"{username}/physics_based/Istanbul2")
- with open("TapisFiles/Istanbul.py", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/Istanbul/",fileToUpload=file,systemId='designsafe.storage.default')
- with open("TapisFiles/information.json", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/Istanbul/",fileToUpload=file,systemId='designsafe.storage.default')
- with open("TapisFiles/selectedSites.csv", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/Istanbul/",fileToUpload=file,systemId='designsafe.storage.default')
+ with open('TapisFiles/Istanbul.py', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData(
+ filePath=f'{username}/physics_based/Istanbul/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
+ with open('TapisFiles/information.json', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData(
+ filePath=f'{username}/physics_based/Istanbul/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
+ with open('TapisFiles/selectedSites.csv', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData( # noqa: F841
+ filePath=f'{username}/physics_based/Istanbul/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
# %%
jobdict = {
@@ -41,61 +61,66 @@ def Submit_tapis_job():
'nodeCount': 1,
'processorsPerNode': 1,
'archive': True,
- 'archiveOnAppError':True,
+ 'archiveOnAppError': True,
'inputs': {'inputDirectory': ''},
- 'parameters' : {'inputScript':'Istanbul.py'},
+ 'parameters': {'inputScript': 'Istanbul.py'},
'maxRunTime': '00:01:00',
'memoryPerNode': '1GB',
- 'archiveSystem':'designsafe.storage.default',
+ 'archiveSystem': 'designsafe.storage.default',
}
# Generate a timestamp to append to the job name an
- timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
- jobname = f"PhysicsBasedMotion_Istanbul_{username}_{timestamp}"
+ timestamp = datetime.now().strftime('%Y%m%d%H%M%S') # noqa: DTZ005
+ jobname = f'PhysicsBasedMotion_Istanbul_{username}_{timestamp}'
- print("Submitting job")
- # submit the job
+ print('Submitting job') # noqa: T201
+ # submit the job
jobdict['name'] = jobname
- jobdict['inputs']['inputDirectory'] = f"agave://designsafe.storage.default/{username}/physics_based/Istanbul/"
+ jobdict['inputs']['inputDirectory'] = (
+ f'agave://designsafe.storage.default/{username}/physics_based/Istanbul/'
+ )
# %%
res = ag.jobs.submit(body=jobdict)
jobid = res['id']
- status = ""
- last_status = ""
+ status = ''
+ last_status = ''
count = 0
- while status != "FINISHED":
+ while status != 'FINISHED':
status = ag.jobs.getStatus(jobId=jobid)['status']
if count == 0:
last_status = status
- print("Job status: ", status)
+ print('Job status: ', status) # noqa: T201
count += 1
if last_status != status:
- print("Job status: ", status)
+ print('Job status: ', status) # noqa: T201
last_status = status
- if status == "FAILED":
- print("Job failed")
+ if status == 'FAILED':
+ print('Job failed') # noqa: T201
break
time.sleep(10)
-
# %%
- print("Downloading extracted motions")
- archivePath = ag.jobs.get(jobId=jobid)["archivePath"]
- archivePath = f"{archivePath}/Istanbul/Events/"
+ print('Downloading extracted motions') # noqa: T201
+ archivePath = ag.jobs.get(jobId=jobid)['archivePath'] # noqa: N806
+ archivePath = f'{archivePath}/Istanbul/Events/' # noqa: N806
- files = ag.files.list(filePath=archivePath, systemId="designsafe.storage.default")
+ files = ag.files.list(
+ filePath=archivePath, systemId='designsafe.storage.default'
+ )
# %%
if len(files) <= 1:
- print("No files in the archive")
- else :
+ print('No files in the archive') # noqa: T201
+ else:
for file in files:
filename = file['name']
- if filename == ".":
+ if filename == '.':
continue
- path = f"{archivePath}/{filename}"
- res = ag.files.download(filePath=path, systemId="designsafe.storage.default")
- with open(f"{savingDirectory}/{filename}", "wb") as f:
+ path = f'{archivePath}/{filename}'
+ res = ag.files.download(
+ filePath=path, systemId='designsafe.storage.default'
+ )
+ with open(f'{savingDirectory}/{filename}', 'wb') as f: # noqa: FURB103, PTH123
f.write(res.content)
# %%
diff --git a/modules/createEVENT/Istanbul/IstanbulRun.py b/modules/createEVENT/Istanbul/IstanbulRun.py
index a2cd44c6f..07d5ca007 100644
--- a/modules/createEVENT/Istanbul/IstanbulRun.py
+++ b/modules/createEVENT/Istanbul/IstanbulRun.py
@@ -1,47 +1,49 @@
+import argparse # noqa: CPY001, D100, INP001
import os
-import IstanbulStations
-import IstanbulApp2
-import argparse
-if __name__ == "__main__":
+import IstanbulApp2
+import IstanbulStations
+
+if __name__ == '__main__':
information = {
- "RegionFlag" : False,
- "LocationFlag" : True,
- "TopoFlag" : True,
- "BedrockFlag" : True,
- "RegionShape" : "Rectangle",
- "min_lat" : 40.9940,
- "max_lat" : 40.9945,
- "min_lon" : 28.8985,
- "max_lon" : 28.8995,
- "lat" : 40.9940,
- "lon" : 28.8990,
- "directory" : "Events",
- "number_of_realizations": 2,
+ 'RegionFlag': False,
+ 'LocationFlag': True,
+ 'TopoFlag': True,
+ 'BedrockFlag': True,
+ 'RegionShape': 'Rectangle',
+ 'min_lat': 40.9940,
+ 'max_lat': 40.9945,
+ 'min_lon': 28.8985,
+ 'max_lon': 28.8995,
+ 'lat': 40.9940,
+ 'lon': 28.8990,
+ 'directory': 'Events',
+ 'number_of_realizations': 2,
}
-
parser = argparse.ArgumentParser()
- parser.add_argument('--lat', help="Latitude", required=False)
- parser.add_argument('--lng', help="Longitude", required=False)
- parser.add_argument('-g', '--gridType', help="grid Type", required=False)
- parser.add_argument('-n', '--number', help="number of realizations", required=False)
- parser.add_argument('-o', '--output', help="number of realizations", required=False)
+ parser.add_argument('--lat', help='Latitude', required=False)
+ parser.add_argument('--lng', help='Longitude', required=False)
+ parser.add_argument('-g', '--gridType', help='grid Type', required=False)
+ parser.add_argument(
+ '-n', '--number', help='number of realizations', required=False
+ )
+ parser.add_argument(
+ '-o', '--output', help='number of realizations', required=False
+ )
args = parser.parse_args()
-
+
if args.lat:
- information['lat']=float(args.lat)
+ information['lat'] = float(args.lat)
if args.lng:
- information['lon']=float(args.lng)
+ information['lon'] = float(args.lng)
if args.output:
- information['directory']=args.output
+ information['directory'] = args.output
if args.number:
- information['number_of_realizations']=int(args.number)
-
+ information['number_of_realizations'] = int(args.number)
# change the directory to the file location
- os.chdir(os.path.dirname(os.path.realpath(__file__)))
- IstanbulStations.getStations(information,plot=False,show=False)
+ os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+ IstanbulStations.getStations(information, plot=False, show=False)
IstanbulApp2.Submit_tapis_job()
- exit()
-
+ exit() # noqa: PLR1722
diff --git a/modules/createEVENT/Istanbul/IstanbulStations.py b/modules/createEVENT/Istanbul/IstanbulStations.py
index f9ce00aa4..a8d2cb2c6 100644
--- a/modules/createEVENT/Istanbul/IstanbulStations.py
+++ b/modules/createEVENT/Istanbul/IstanbulStations.py
@@ -1,140 +1,159 @@
-import os
+import json # noqa: CPY001, D100, INP001
import math
+import os
+
import geopandas as gpd
import pandas as pd
-from shapely.geometry import Polygon,Point
-import json
+from shapely.geometry import Point, Polygon
-def getStations(information,plot=False,show=False):
- '''
- This function is used to retreive the information of the Istanbul physics-based simulations
-
- '''
-
- RegionFlag = information['RegionFlag']
- LocationFlag = information['LocationFlag']
+def getStations(information, plot=False, show=False): # noqa: FBT002, C901, N802
+ """This function is used to retrieve the information of the Istanbul physics-based simulations""" # noqa: D400, D401, D404
+ RegionFlag = information['RegionFlag'] # noqa: N806
+ LocationFlag = information['LocationFlag'] # noqa: N806
if LocationFlag:
# get the location of the site
- lat = information['lat']
- lon = information['lon']
-
+ lat = information['lat']
+ lon = information['lon']
if RegionFlag:
- if information["RegionShape"] == "Rectangle":
+ if information['RegionShape'] == 'Rectangle':
# get the region of the desirable sites
min_lat = information['min_lat']
max_lat = information['max_lat']
min_lon = information['min_lon']
max_lon = information['max_lon']
- if information["RegionShape"] == "Circle":
+ if information['RegionShape'] == 'Circle':
# get the region of the desirable sites
lat = information['lat']
lon = information['lon']
radius = information['radius']
# Read the data from the csv file ignore indexing
- df_allSites = pd.read_csv('All_Stations_Lat_Lon_Vs30_BedrockDepth.csv',index_col=False)
- df_allSites = df_allSites[["Longitude", "Latitude", "Depth (m)"]]
+ df_allSites = pd.read_csv( # noqa: N806
+ 'All_Stations_Lat_Lon_Vs30_BedrockDepth.csv', index_col=False
+ )
+ df_allSites = df_allSites[['Longitude', 'Latitude', 'Depth (m)']] # noqa: N806
# add geometry using Lonnitude and Latitude
- gdf = gpd.GeoDataFrame(df_allSites,
- geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude))
-
+ gdf = gpd.GeoDataFrame(
+ df_allSites,
+ geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude),
+ )
# filter all the sites on the surface
if information['BedrockFlag']:
- gdf = gdf[gdf["Depth (m)"] < 0+1e-5]
- else :
- gdf = gdf[gdf["Depth (m)"] > 0+1e-5]
-
+ gdf = gdf[gdf['Depth (m)'] < 0 + 1e-5]
+ else:
+ gdf = gdf[gdf['Depth (m)'] > 0 + 1e-5]
- # delte the df_allSites to save memory
+ # delete the df_allSites to save memory
del df_allSites
- directory = information['directory'] ;# directory to save the data
+ directory = information['directory'] # directory to save the data
# create the directory if it does not exist
- if not os.path.exists(directory):
- os.makedirs(directory)
+ if not os.path.exists(directory): # noqa: PTH110
+ os.makedirs(directory) # noqa: PTH103
# empty the directory
files = os.listdir(directory)
for file in files:
- os.remove(directory + "/" + file)
-
+ os.remove(directory + '/' + file) # noqa: PTH107
if LocationFlag:
# find the nearest site to the location
gdf['distance'] = gdf.distance(Point(lon, lat))
gdf = gdf.sort_values('distance')
- # create a coulmn of the distance color and make the first 4 nearest sites red
+ # create a column of the distance color and make the first 4 nearest sites red
gdf['Color'] = 'blue'
gdf.loc[gdf.index[:4], 'Color'] = 'red'
-
-
-
if RegionFlag:
- if information["RegionShape"] == "Rectangle":
+ if information['RegionShape'] == 'Rectangle':
# Create a polygton using min_lat, max_lat, min_lon, max_lon
- RegionofInterset = Polygon([(min_lon, min_lat), (min_lon, max_lat), (max_lon, max_lat), (max_lon, min_lat)])
+ RegionofInterset = Polygon( # noqa: N806
+ [
+ (min_lon, min_lat),
+ (min_lon, max_lat),
+ (max_lon, max_lat),
+ (max_lon, min_lat),
+ ]
+ )
# filter the sites that are within the polygon
withinindicies = gdf.within(RegionofInterset)
- gdf["Color"] = 'blue'
+ gdf['Color'] = 'blue'
gdf.loc[withinindicies, 'Color'] = 'red'
# gdf = gdf[gdf.within(RegionofInterset)]
# check if the gdf is empty
if withinindicies.sum() == 0:
- print('No sites are found in the selected region please change the region of interest')
+ print( # noqa: T201
+ 'No sites are found in the selected region please change the region of interest'
+ )
return
-
-
- if information["RegionShape"] == "Circle":
- # chage the gdf to calculte the distance from the center of the circle in km
- gdf['distance'] = gdf.apply(lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']), axis=1)
- gdf["Color"] = ["red" if row['distance'] < radius else "blue" for _, row in gdf.iterrows()]
- gdf = gdf[gdf['distance'] < radius]
-
+ if information['RegionShape'] == 'Circle':
+ # change the gdf to calculate the distance from the center of the circle in km
+ gdf['distance'] = gdf.apply(
+ lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']),
+ axis=1,
+ )
+ gdf['Color'] = [
+ 'red' if row['distance'] < radius else 'blue'
+ for _, row in gdf.iterrows()
+ ]
+ gdf = gdf[gdf['distance'] < radius]
if RegionFlag:
- gdf["Selected Site"] = gdf["Color"].apply(lambda x: "Yes" if x == "red" else "No")
+ gdf['Selected Site'] = gdf['Color'].apply(
+ lambda x: 'Yes' if x == 'red' else 'No'
+ )
if LocationFlag:
- gdf["Selected Site"] = "No"
- gdf.iloc[0, gdf.columns.get_loc('Selected Site')] = "The closest site to the location"
- gdf.iloc[1, gdf.columns.get_loc('Selected Site')] = "The second closest site to the location"
- gdf.iloc[2, gdf.columns.get_loc('Selected Site')] = "The third closest site to the location"
- gdf.iloc[3, gdf.columns.get_loc('Selected Site')] = "The fourth closest site to the location"
-
+ gdf['Selected Site'] = 'No'
+ gdf.iloc[0, gdf.columns.get_loc('Selected Site')] = (
+ 'The closest site to the location'
+ )
+ gdf.iloc[1, gdf.columns.get_loc('Selected Site')] = (
+ 'The second closest site to the location'
+ )
+ gdf.iloc[2, gdf.columns.get_loc('Selected Site')] = (
+ 'The third closest site to the location'
+ )
+ gdf.iloc[3, gdf.columns.get_loc('Selected Site')] = (
+ 'The fourth closest site to the location'
+ )
if plot:
- import plotly.express as px
+ import plotly.express as px # noqa: PLC0415
+
# plot the sites
if LocationFlag:
centerlat = lat
centerlon = lon
if RegionFlag:
- if information["RegionShape"] == "Circle":
+ if information['RegionShape'] == 'Circle':
centerlat = lat
centerlon = lon
- if information["RegionShape"] == "Rectangle":
- centerlat = (min_lat + max_lat)/2
- centerlon = (min_lon + max_lon)/2
-
- gdf["Color"] = gdf["Color"].replace({"blue": "All sites", "red": "Selected sites"})
- fig = px.scatter_mapbox(gdf,
- lat="Latitude",
- lon="Longitude",
- color="Color",
- hover_name = gdf.index,
- hover_data = {"Selected Site": True},
- color_discrete_map={"All sites": '#1f77b4', "Selected sites": "#ff7f0e"},
- center={"lat": centerlat, "lon": centerlon},
- zoom=15,
- mapbox_style="open-street-map"
- )
+ if information['RegionShape'] == 'Rectangle':
+ centerlat = (min_lat + max_lat) / 2
+ centerlon = (min_lon + max_lon) / 2
+
+ gdf['Color'] = gdf['Color'].replace(
+ {'blue': 'All sites', 'red': 'Selected sites'}
+ )
+ fig = px.scatter_mapbox(
+ gdf,
+ lat='Latitude',
+ lon='Longitude',
+ color='Color',
+ hover_name=gdf.index,
+ hover_data={'Selected Site': True},
+ color_discrete_map={'All sites': '#1f77b4', 'Selected sites': '#ff7f0e'},
+ center={'lat': centerlat, 'lon': centerlon},
+ zoom=15,
+ mapbox_style='open-street-map',
+ )
# save the html file
# fig.write_html("Istanbul.html")
@@ -142,61 +161,53 @@ def getStations(information,plot=False,show=False):
fig.show()
if RegionFlag:
- gdf = gdf[gdf["Selected Site"] == "Yes"]
-
- if LocationFlag:
- gdf = gdf[gdf["Selected Site"] != "No"]
-
- gdf.drop(columns=["geometry", "Color", "Selected Site"]).to_csv(f'TapisFiles/selectedSites.csv', index=True)
- json.dump(information, open("TapisFiles/information.json", "w"), indent=2)
-
-
-
-
+ gdf = gdf[gdf['Selected Site'] == 'Yes']
+ if LocationFlag:
+ gdf = gdf[gdf['Selected Site'] != 'No']
+ gdf.drop(columns=['geometry', 'Color', 'Selected Site']).to_csv(
+ 'TapisFiles/selectedSites.csv', index=True
+ )
+ json.dump(information, open('TapisFiles/information.json', 'w'), indent=2) # noqa: PLW1514, PTH123, SIM115
def haversine(lat1, lon1, lat2, lon2):
- """
- Calculate the great circle distance between two points
+ """Calculate the great circle distance between two points
on the earth specified in decimal degrees.
- """
+ """ # noqa: D205
# Convert decimal degrees to radians
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
- a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
- c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
+ a = (
+ math.sin(dlat / 2) ** 2
+ + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
+ )
+ c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
r = 6371 # Radius of the Earth in kilometers
distance = r * c
- return distance
-
-
+ return distance # noqa: RET504
-if __name__ == "__main__":
+if __name__ == '__main__':
information = {
- "RegionFlag": True,
- "LocationFlag": False,
- "RegionShape": "Rectangle",
- "min_lat": 40.9938,
- "max_lat": 40.9945,
- "min_lon": 28.8978,
- "max_lon": 28.8995,
- "BedrockFlag": True,
- "directory": "Events",
- "number_of_realizations": 1,
- "TopoFlag" : True,
- }
+ 'RegionFlag': True,
+ 'LocationFlag': False,
+ 'RegionShape': 'Rectangle',
+ 'min_lat': 40.9938,
+ 'max_lat': 40.9945,
+ 'min_lon': 28.8978,
+ 'max_lon': 28.8995,
+ 'BedrockFlag': True,
+ 'directory': 'Events',
+ 'number_of_realizations': 1,
+ 'TopoFlag': True,
+ }
# change the directory to the file location
- os.chdir(os.path.dirname(os.path.realpath(__file__)))
- getStations(information,plot=False,show=False)
-
-
-
-
+ os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+ getStations(information, plot=False, show=False)
diff --git a/modules/createEVENT/Istanbul/TapisFiles/Istanbul.py b/modules/createEVENT/Istanbul/TapisFiles/Istanbul.py
index 0d828b969..d6cff38a3 100644
--- a/modules/createEVENT/Istanbul/TapisFiles/Istanbul.py
+++ b/modules/createEVENT/Istanbul/TapisFiles/Istanbul.py
@@ -1,93 +1,91 @@
-# %%
+# %% # noqa: CPY001, D100, INP001
import os
-directory = "./Events"
-# check if the directory exists
-if not os.path.exists(directory):
- os.makedirs(directory)
-import numpy as np
-import pandas as pd
-import json
-
+directory = './Events'
+# check if the directory exists
+if not os.path.exists(directory): # noqa: PTH110
+ os.makedirs(directory) # noqa: PTH103
+import json # noqa: E402
-def Istanbul(information):
+import numpy as np # noqa: E402
+import pandas as pd # noqa: E402
- TopoFlag = information['TopoFlag']
- LocationFlag = information['LocationFlag']
- numSiteGM = information['number_of_realizations']
+def Istanbul(information): # noqa: N802, D103
+ TopoFlag = information['TopoFlag'] # noqa: N806
+ LocationFlag = information['LocationFlag'] # noqa: N806
+ numSiteGM = information['number_of_realizations'] # noqa: N806
- randomFLag = True ;# if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected
- maxnumSiteGM = 57;
- numSiteGM = min(numSiteGM, maxnumSiteGM) ;# number of realizations
+ randomFLag = True # if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected # noqa: N806
+ maxnumSiteGM = 57 # noqa: N806
+ numSiteGM = min(numSiteGM, maxnumSiteGM) # number of realizations # noqa: N806
- directory = "./Events"
+ directory = './Events'
# check if the directory exists
- if not os.path.exists(directory):
- os.makedirs(directory)
+ if not os.path.exists(directory): # noqa: PTH110
+ os.makedirs(directory) # noqa: PTH103
# changing realizations order
- indicies = list(range(1,maxnumSiteGM+1));
+ indices = list(range(1, maxnumSiteGM + 1))
if randomFLag:
- np.random.shuffle(indicies)
- indicies = indicies[:numSiteGM]
-
+ np.random.shuffle(indices)
+ indices = indices[:numSiteGM]
gdf = pd.read_csv('selectedSites.csv', index_col=0)
-
-
- if "TopoFlag":
+ if 'TopoFlag':
# IstanbulDirectory = '/corral-repl/projects/NHERI/published/PRJ-3712/GM_data/GM_topo/'
- IstanbulDirectory = '/home/jovyan/work/projects/PRJ-3712/GM_data/GM_topo/'
- else :
+ IstanbulDirectory = '/home/jovyan/work/projects/PRJ-3712/GM_data/GM_topo/' # noqa: N806
+ else:
# IstanbulDirectory = '/corral-repl/projects/NHERI/published/PRJ-3712/GM_data/GM_flat/'
- IstanbulDirectory = '/home/jovyan/work/projects/PRJ-3712/GM_data/GM_flat/'
-
+ IstanbulDirectory = '/home/jovyan/work/projects/PRJ-3712/GM_data/GM_flat/' # noqa: N806
-
- # print number of cites
- print(f'Number of sites: {len(gdf)}')
- for realization in indicies:
+ # print number of cites
+ print(f'Number of sites: {len(gdf)}') # noqa: T201
+ for realization in indices:
# load the data frame from the hdf file
if TopoFlag:
- df = pd.HDFStore(f'{IstanbulDirectory}/Istanbul_sim{realization}.hdf5', 'r')
- else :
- df = pd.HDFStore(f'{IstanbulDirectory}/Istanbul_sim{realization}_flat.hdf5', 'r')
-
+ df = pd.HDFStore( # noqa: PD901
+ f'{IstanbulDirectory}/Istanbul_sim{realization}.hdf5', 'r'
+ )
+ else:
+ df = pd.HDFStore( # noqa: PD901
+ f'{IstanbulDirectory}/Istanbul_sim{realization}_flat.hdf5', 'r'
+ )
+
# return df
for site in gdf.index:
- time = df["/Ax_data"][0]
+ time = df['/Ax_data'][0]
motiondict = {
- "Data": "Time history generated using Istanbul simulations",
- "dT" : time[1] - time[0],
- "name": f"site{site}_{realization}",
- "numSteps" : len(time),
- "accel_x" : df["Ax_data"][site+1].tolist(),
- "accel_y" : df["Ay_data"][site+1].tolist(),
- "accel_z" : df["Az_data"][site+1].tolist(),
- }
- write_motion(site, "./Events", realization, motiondict)
- gdf['filename'] = f"site_{site}_{realization}"
+ 'Data': 'Time history generated using Istanbul simulations',
+ 'dT': time[1] - time[0],
+ 'name': f'site{site}_{realization}',
+ 'numSteps': len(time),
+ 'accel_x': df['Ax_data'][site + 1].tolist(),
+ 'accel_y': df['Ay_data'][site + 1].tolist(),
+ 'accel_z': df['Az_data'][site + 1].tolist(),
+ }
+ write_motion(site, './Events', realization, motiondict)
+ gdf['filename'] = f'site_{site}_{realization}'
if LocationFlag:
- break;
-
+ break
+
if LocationFlag:
gdf = gdf.loc[[0]]
# save the gdf to a csv file in the directory just "Station Name", "Latitude", "Longitude"
- gdf["Bedrock_Vs"] = 750
- gdf[['filename', 'Latitude', 'Longitude', 'Bedrock_Vs']].to_csv(f'{directory}/sites.csv', index=False)
-
-
-def write_motion(site_name, directory, i, motiondict):
- filename = f"{directory}/site_{site_name}_{i}.json"
- with open(filename, 'w') as f:
- json.dump(motiondict, f, indent=2)
-
+ gdf['Bedrock_Vs'] = 750
+ gdf[['filename', 'Latitude', 'Longitude', 'Bedrock_Vs']].to_csv(
+ f'{directory}/sites.csv', index=False
+ )
+
+def write_motion(site_name, directory, i, motiondict): # noqa: D103
+ filename = f'{directory}/site_{site_name}_{i}.json'
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(motiondict, f, indent=2)
-# get the location flag
-with open("information.json", "r") as file:
+# get the location flag
+with open('information.json') as file: # noqa: PLW1514, PTH123
information = json.load(file)
Istanbul(information)
diff --git a/modules/createEVENT/LLNL_SW4/LLNL_SW4.py b/modules/createEVENT/LLNL_SW4/LLNL_SW4.py
index 001e90be5..7eb290415 100644
--- a/modules/createEVENT/LLNL_SW4/LLNL_SW4.py
+++ b/modules/createEVENT/LLNL_SW4/LLNL_SW4.py
@@ -1,104 +1,117 @@
-import argparse, posixpath, json, sys
+import argparse # noqa: CPY001, D100, INP001
+import json
+import posixpath
+import sys
+
import numpy as np
-def write_RV(BIM_file, EVENT_file, data_dir):
-
- with open(BIM_file, 'r') as f:
- bim_data = json.load(f)
-
- event_file = {
- 'randomVariables': [],
- 'Events': []
- }
-
- events = bim_data['Events']['Events']
-
- if len(events) > 1:
- event_file['randomVariables'].append({
- 'distribution': 'discrete_design_set_string',
- 'name': 'eventID',
- 'value': 'RV.eventID',
- 'elements': []
- })
- event_file['Events'].append({
- 'type': 'Seismic',
- 'subtype': 'SW4_Event',
- 'event_id': 'RV.eventID'
- })
- else:
- event_file['Events'].append({
- 'type': 'Seismic',
- 'subtype': 'SW4_Event',
- 'event_id': 0,
- })
-
- RV_elements = []
- for event in events:
- if event['EventClassification'] == 'Earthquake':
- RV_elements.append(event['fileName'])
-
- event_file['randomVariables'][0]['elements'] = RV_elements
-
- # load the first event
- event_file['Events'][0].update(load_record(events[0]['fileName'], data_dir, empty=True))
-
- with open(EVENT_file, 'w') as f:
- json.dump(event_file, f, indent=2)
-
-def load_record(fileName, data_dir, scale_factor=1.0, empty=False):
-
- fileName = fileName.split('x')[0]
-
- with open(posixpath.join(data_dir,'{}.json'.format(fileName)), 'r') as f:
- event_data = json.load(f)
-
- event_dic = {
- 'name': fileName,
- 'dT' : event_data['dT'],
- 'numSteps': len(event_data['data_x']),
- 'timeSeries': [],
- 'pattern': []
- }
-
- if not empty:
- for i, (src_label, tar_label) in enumerate(zip(['data_x', 'data_y'],
- ['accel_X', 'accel_Y'])):
- if src_label in event_data.keys():
-
- event_dic['timeSeries'].append({
- 'name': tar_label,
- 'type': 'Value',
- 'dT': event_data['dT'],
- 'data': list(np.array(event_data[src_label])*scale_factor)
- })
- event_dic['pattern'].append({
- 'type': 'UniformAcceleration',
- 'timeSeries': tar_label,
- 'dof': i+1
- })
-
- return event_dic
-
-def get_records(BIM_file, EVENT_file, data_dir):
-
- with open(BIM_file, 'r') as f:
- bim_file = json.load(f)
-
- with open(EVENT_file, 'r') as f:
- event_file = json.load(f)
-
- event_id = event_file['Events'][0]['event_id']
-
- scale_factor = dict([(evt['fileName'], evt.get('factor',1.0)) for evt in bim_file["Events"]["Events"]])[event_id]
-
- event_file['Events'][0].update(
- load_record(event_id, data_dir, scale_factor))
-
- with open(EVENT_file, 'w') as f:
- json.dump(event_file, f, indent=2)
-if __name__ == '__main__':
+def write_RV(BIM_file, EVENT_file, data_dir): # noqa: N802, N803, D103
+ with open(BIM_file) as f: # noqa: PLW1514, PTH123
+ bim_data = json.load(f)
+
+ event_file = {'randomVariables': [], 'Events': []}
+
+ events = bim_data['Events']['Events']
+ if len(events) > 1:
+ event_file['randomVariables'].append(
+ {
+ 'distribution': 'discrete_design_set_string',
+ 'name': 'eventID',
+ 'value': 'RV.eventID',
+ 'elements': [],
+ }
+ )
+ event_file['Events'].append(
+ {'type': 'Seismic', 'subtype': 'SW4_Event', 'event_id': 'RV.eventID'}
+ )
+ else:
+ event_file['Events'].append(
+ {
+ 'type': 'Seismic',
+ 'subtype': 'SW4_Event',
+ 'event_id': 0,
+ }
+ )
+
+ RV_elements = [] # noqa: N806
+ for event in events:
+ if event['EventClassification'] == 'Earthquake':
+ RV_elements.append(event['fileName']) # noqa: PERF401
+
+ event_file['randomVariables'][0]['elements'] = RV_elements
+
+ # load the first event
+ event_file['Events'][0].update(
+ load_record(events[0]['fileName'], data_dir, empty=True)
+ )
+
+ with open(EVENT_file, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(event_file, f, indent=2)
+
+
+def load_record(fileName, data_dir, scale_factor=1.0, empty=False): # noqa: FBT002, N803, D103
+ fileName = fileName.split('x')[0] # noqa: N806
+
+ with open(posixpath.join(data_dir, f'{fileName}.json')) as f: # noqa: PLW1514, PTH123
+ event_data = json.load(f)
+
+ event_dic = {
+ 'name': fileName,
+ 'dT': event_data['dT'],
+ 'numSteps': len(event_data['data_x']),
+ 'timeSeries': [],
+ 'pattern': [],
+ }
+
+ if not empty:
+ for i, (src_label, tar_label) in enumerate(
+ zip(['data_x', 'data_y'], ['accel_X', 'accel_Y'])
+ ):
+ if src_label in event_data.keys(): # noqa: SIM118
+ event_dic['timeSeries'].append(
+ {
+ 'name': tar_label,
+ 'type': 'Value',
+ 'dT': event_data['dT'],
+ 'data': list(np.array(event_data[src_label]) * scale_factor),
+ }
+ )
+ event_dic['pattern'].append(
+ {
+ 'type': 'UniformAcceleration',
+ 'timeSeries': tar_label,
+ 'dof': i + 1,
+ }
+ )
+
+ return event_dic
+
+
+def get_records(BIM_file, EVENT_file, data_dir): # noqa: N803, D103
+ with open(BIM_file) as f: # noqa: PLW1514, PTH123
+ bim_file = json.load(f)
+
+ with open(EVENT_file) as f: # noqa: PLW1514, PTH123
+ event_file = json.load(f)
+
+ event_id = event_file['Events'][0]['event_id']
+
+ scale_factor = dict( # noqa: C404
+ [
+ (evt['fileName'], evt.get('factor', 1.0))
+ for evt in bim_file['Events']['Events']
+ ]
+ )[event_id]
+
+ event_file['Events'][0].update(load_record(event_id, data_dir, scale_factor))
+
+ with open(EVENT_file, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(event_file, f, indent=2)
+
+
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
@@ -107,6 +120,8 @@ def get_records(BIM_file, EVENT_file, data_dir):
args = parser.parse_args()
if args.getRV:
- sys.exit(write_RV(args.filenameAIM, args.filenameEVENT, args.pathSW4results))
+ sys.exit(write_RV(args.filenameAIM, args.filenameEVENT, args.pathSW4results))
else:
- sys.exit(get_records(args.filenameAIM, args.filenameEVENT, args.pathSW4results))
+ sys.exit(
+ get_records(args.filenameAIM, args.filenameEVENT, args.pathSW4results)
+ )
diff --git a/modules/createEVENT/LowRiseTPU/LowRiseTPU.cpp b/modules/createEVENT/LowRiseTPU/LowRiseTPU.cpp
index fbd20188d..ef35e3b70 100644
--- a/modules/createEVENT/LowRiseTPU/LowRiseTPU.cpp
+++ b/modules/createEVENT/LowRiseTPU/LowRiseTPU.cpp
@@ -94,7 +94,7 @@ main(int argc, char **argv) {
json_t *generalInformation = json_object_get(input, "GeneralInformation");
json_t *inputEventsArray = json_object_get(input, "Events");
if (generalInformation == NULL || inputEventsArray == NULL) {
- std::cerr << "FATAL ERROR - input file conatins no Events key-pair\n";
+ std::cerr << "FATAL ERROR - input file contains no Events key-pair\n";
exit(-1);
}
@@ -128,7 +128,7 @@ main(int argc, char **argv) {
json_object_set(units,"time",json_string("sec"));
json_object_set(outputEvent,"units",units);
- // call function to fill in event details .. depends on getRV flag what is acually done
+ // call function to fill in event details .. depends on getRV flag what is actually done
addEvent(generalInformation, inputEvent, outputEvent, doRV);
json_array_append(outputEventsArray, outputEvent);
@@ -180,7 +180,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
widthJO == NULL ||
depthJO == NULL ||
storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -260,7 +260,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
// std::cerr << "lU, lT: " << lambdaU << " " << lambdaT << "\n";;
// std::cerr << "dT: " << dT << "numSteps: " << numSteps << " " << modelFrequency << " " << lambdaT << "\n";
- // fmk,sy - Note we are outputing in "kN" unit, opensees will follow this unit instead of GI units
+ // fmk,sy - Note we are outputting in "kN" unit, opensees will follow this unit instead of GI units
double loadFactor = airDensity*0.5*windSpeed*windSpeed / 1000.; //double loadFactor = airDensity*0.5*windSpeed*windSpeed;
// std::cerr << "\n LOAD FACTOR: " << loadFactor << "\n";
@@ -338,7 +338,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
}
//
- // for each tap determine factors fr moments and forces for the buiding asuming a mesh discretization
+ // for each tap determine factors fr moments and forces for the building assuming a mesh discretization
//
int numDivisionX = 10;
@@ -544,7 +544,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
json_t *storiesJO = json_object_get(generalInfo,"stories");
if (storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -647,7 +647,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
//
// function to add factors for forces and moment contribution coefficients for taps to building floor
-// determine coeffiecients for each tap for a building face. This is done by going over each story of
+// determine coefficients for each tap for a building face. This is done by going over each story of
// For each story break into numDiv X numDiv segments. For each segment assume point load at center
// segment and equal in mag to area of segment and using simply supported beam formula determine force
// at floor below and floor above. based on distance from center line of story determine actibg moments
@@ -737,9 +737,9 @@ int addForcesFace(TAP *theTaps, int numTaps,
// function to fnd nearest tap
// inputs: theTAPS: array of Taps,
// numTaps: number of taps in array
-// xLoc, yLoc: is location of inut point
+// xLoc, yLoc: is location of Inuit point
// face: if of face
-// output: pinter to nearest TAp in the array, NULL if no taps with face
+// output: pointer to nearest TAp in the array, NULL if no taps with face
//
TAP *findNearestTAP(TAP *theTAPS, int numTaps, double locX, double locY, int face) {
diff --git a/modules/createEVENT/M9/M9API.py b/modules/createEVENT/M9/M9API.py
index 9e240afb1..3e1abb4e5 100644
--- a/modules/createEVENT/M9/M9API.py
+++ b/modules/createEVENT/M9/M9API.py
@@ -1,138 +1,147 @@
-# %%
+# %% # noqa: CPY001, D100, INP001
+import subprocess # noqa: S404
import sys
-import subprocess
from importlib import metadata as importlib_metadata
#
# need to make sure we have some python modules .. identify missing and install with python -m pip
#
-modules_reqd = {'numpy','pandas','geopandas', 'shapely', 'requests', 'argparse'}
+modules_reqd = {'numpy', 'pandas', 'geopandas', 'shapely', 'requests', 'argparse'}
modules_installed = set()
for x in importlib_metadata.distributions():
- try:
+ try: # noqa: SIM105
modules_installed.add(x.name)
- except:
+ except: # noqa: S110, PERF203, E722
pass
# If installed packages could not be detected, use importlib_metadata backport:
if not modules_installed:
import importlib_metadata
+
for x in importlib_metadata.distributions():
- try:
+ try: # noqa: SIM105
modules_installed.add(x.name)
- except:
- pass
+ except: # noqa: S110, PERF203, E722
+ pass
missing = modules_reqd - modules_installed
if missing:
- python = sys.executable
- print('\nInstalling packages required for running this widget...')
- subprocess.check_call([python, '-m', 'pip', 'install', '--user', *missing],
- stdout=subprocess.DEVNULL)
- print('Successfully installed the required packages')
+ python = sys.executable
+ print('\nInstalling packages required for running this widget...') # noqa: T201
+ subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', '--user', *missing],
+ stdout=subprocess.DEVNULL,
+ )
+ print('Successfully installed the required packages') # noqa: T201
#
# now import our packages
#
-import os
-import numpy as np
-import pandas as pd
-import geopandas as gpd
-from shapely.geometry import Polygon, Point
-import requests
-import json
-import math
+import json # noqa: E402
+import math # noqa: E402
+import os # noqa: E402
+import geopandas as gpd # noqa: E402
+import numpy as np # noqa: E402
+import pandas as pd # noqa: E402
+import requests # noqa: E402
+from shapely.geometry import Point, Polygon # noqa: E402
-#%%
-def M9(information):
- """
- the default is to select sites from all M9 sites, but
+
+# %%
+def M9(information): # noqa: C901, N802
+ """The default is to select sites from all M9 sites, but
grid type (options: A, B, C, D, E, Y, and Z, can be empty)
(ref: https://sites.uw.edu/pnet/m9-simulations/about-m9-simulations/extent-of-model/)
- """
-
+ """ # noqa: D205, D400, D401
site_location = information['LocationFlag']
-
+
if site_location:
+ lat = information['lat']
+ lon = information['lon']
- lat = information['lat']
- lon = information['lon']
-
else:
-
# its a regional location specified
-
- if information["RegionShape"] == "Rectangle":
+
+ if information['RegionShape'] == 'Rectangle':
# get the region of the desirable sites
min_lat = information['min_lat']
max_lat = information['max_lat']
min_lon = information['min_lon']
max_lon = information['max_lon']
- if information["RegionShape"] == "Circle":
+ if information['RegionShape'] == 'Circle':
# get the region of the desirable sites
lat = information['lat']
lon = information['lon']
radius = information['radius']
- grid_type = information['grid_type'] ;# grid type (options: A, B, C, D, E, Y, and Z, can be "all")
-
- randomFLag = True ;# if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected
- numSiteGM = information['number_of_realizations'] ;# number of realizations
- maxnumSiteGM = 30;
- numSiteGM = min(numSiteGM, maxnumSiteGM) ;# number of realizations
+ grid_type = information[
+ 'grid_type'
+ ] # grid type (options: A, B, C, D, E, Y, and Z, can be "all")
+
+ randomFLag = True # if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected # noqa: N806
+ numSiteGM = information[ # noqa: N806
+ 'number_of_realizations'
+ ] # number of realizations
+ maxnumSiteGM = 30 # noqa: N806
+ numSiteGM = min(numSiteGM, maxnumSiteGM) # number of realizations # noqa: N806
# changing realizations order
- indicies = list(range(maxnumSiteGM));
+ indices = list(range(maxnumSiteGM))
if randomFLag:
- np.random.shuffle(indicies)
- indicies = indicies[:numSiteGM]
+ np.random.shuffle(indices)
+ indices = indices[:numSiteGM]
- directory = information['directory'] ;# directory to save the data
+ directory = information['directory'] # directory to save the data
# create the directory if it does not exist
- if not os.path.exists(directory):
- os.makedirs(directory)
-
- ## remove the files in the directory
- #os.system(f'rm -r {directory}/*')
+ if not os.path.exists(directory): # noqa: PTH110
+ os.makedirs(directory) # noqa: PTH103
+
+ # remove the files in the directory
+ # os.system(f'rm -r {directory}/*')
# load the sites information
- path_script = os.path.dirname(os.path.abspath(__file__))
+ path_script = os.path.dirname(os.path.abspath(__file__)) # noqa: PTH100, PTH120
path_site_file = path_script + '/M9_sites.csv'
- print(path_site_file)
- df_allSites = pd.read_csv(path_site_file,index_col=False)
-
+ print(path_site_file) # noqa: T201
+ df_allSites = pd.read_csv(path_site_file, index_col=False) # noqa: N806
+
# create a geopandas dataframe
- gdf = gpd.GeoDataFrame(df_allSites,
- geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude))
-
- # deelte the df_allSites to save memory
+ gdf = gpd.GeoDataFrame(
+ df_allSites,
+ geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude),
+ )
+
+ # delete the df_allSites to save memory
del df_allSites
# limitation of each grid type (minx, miny, maxx, maxy)
- Gridboxes = {
- "A":(-123.2147269, 46.90566609, -121.1246222, 48.31489086),
- "B":(-128.4741831, 40.26059707, -121.0785236, 49.1785082),
- "C":(-123.2568915, 45.19862425, -122.2252305, 45.92126901),
- "D":(-123.3293999, 48.9970249, -122.3929914, 49.35841212),
- "E":(-123.8686827, 48.31165993, -123.1877513, 48.70158023),
- "Y":(-127.7497215, 40.41719958, -120.6351016, 50.13127206),
- "Z":(-127.7578767, 40.41524519, -121.2331997, 49.27983578),
- "All":(-128.4741831, 40.26059707, -121.0785236, 49.35841212)
+ Gridboxes = { # noqa: N806
+ 'A': (-123.2147269, 46.90566609, -121.1246222, 48.31489086),
+ 'B': (-128.4741831, 40.26059707, -121.0785236, 49.1785082),
+ 'C': (-123.2568915, 45.19862425, -122.2252305, 45.92126901),
+ 'D': (-123.3293999, 48.9970249, -122.3929914, 49.35841212),
+ 'E': (-123.8686827, 48.31165993, -123.1877513, 48.70158023),
+ 'Y': (-127.7497215, 40.41719958, -120.6351016, 50.13127206),
+ 'Z': (-127.7578767, 40.41524519, -121.2331997, 49.27983578),
+ 'All': (-128.4741831, 40.26059707, -121.0785236, 49.35841212),
}
# create a polygon for the allowable region
- region = Polygon([ (Gridboxes[grid_type][0], Gridboxes[grid_type][1]),
- (Gridboxes[grid_type][0], Gridboxes[grid_type][3]),
- (Gridboxes[grid_type][2], Gridboxes[grid_type][3]),
- (Gridboxes[grid_type][2], Gridboxes[grid_type][1])])
-
+ region = Polygon(
+ [
+ (Gridboxes[grid_type][0], Gridboxes[grid_type][1]),
+ (Gridboxes[grid_type][0], Gridboxes[grid_type][3]),
+ (Gridboxes[grid_type][2], Gridboxes[grid_type][3]),
+ (Gridboxes[grid_type][2], Gridboxes[grid_type][1]),
+ ]
+ )
if grid_type != 'All':
# filter the site that the Station Name is start with the grid type
@@ -140,177 +149,187 @@ def M9(information):
else:
gdf = gdf[gdf['Station Name'].str.startswith(('A', 'B', 'C', 'D', 'E'))]
-
if site_location:
-
# first check if the location is inner the regoin
if not region.contains(Point(lon, lat)):
- print('The location is not in the selected grid region')
- print('Please select a location in the region or change the grid type to "All"')
- return None
- else:
+ print('The location is not in the selected grid region') # noqa: T201
+ print( # noqa: T201
+ 'Please select a location in the region or change the grid type to "All"'
+ )
+ return
+ else: # noqa: RET505
# find the nearest site to the location
gdf['distance'] = gdf.distance(Point(lon, lat))
gdf = gdf.sort_values('distance')
gdf = gdf.iloc[0:4]
else:
-
# its regional
-
- if information["RegionShape"] == "Rectangle":
-
+
+ if information['RegionShape'] == 'Rectangle':
# Create a polygton using min_lat, max_lat, min_lon, max_lon
- RegionofInterset = Polygon([(min_lon, min_lat), (min_lon, max_lat), (max_lon, max_lat), (max_lon, min_lat)])
+ RegionofInterset = Polygon( # noqa: N806
+ [
+ (min_lon, min_lat),
+ (min_lon, max_lat),
+ (max_lon, max_lat),
+ (max_lon, min_lat),
+ ]
+ )
-
# Check that if the RegionofInterset and the region has intersection
if not region.intersects(RegionofInterset):
- print('The selected region is not in the selected grid region')
- print('Please select a region in in the or change the grid type to "All"')
- return None
- else:
+ print('The selected region is not in the selected grid region') # noqa: T201
+ print( # noqa: T201
+ 'Please select a region in in the or change the grid type to "All"'
+ )
+ return
+ else: # noqa: RET505
# Check if the RegionofInterset is in the region
if not region.contains(RegionofInterset):
- print('The selected region is not entirely in the selected grid region')
- print("The selected region will be changed to the intersection of the selected region and the grid region")
- RegionofInterset = region.intersection(RegionofInterset)
+ print( # noqa: T201
+ 'The selected region is not entirely in the selected grid region'
+ )
+ print( # noqa: T201
+ 'The selected region will be changed to the intersection of the selected region and the grid region'
+ )
+ RegionofInterset = region.intersection(RegionofInterset) # noqa: N806
else:
- print('The selected region is entirely in the selected grid region')
+ print( # noqa: T201
+ 'The selected region is entirely in the selected grid region'
+ )
# now filter the sites that are in the regionofInterset
- gdf["Color"] = ["red" if RegionofInterset.contains(gdf.geometry[i]) else "blue" for i in range(len(gdf))]
+ gdf['Color'] = [
+ 'red' if RegionofInterset.contains(gdf.geometry[i]) else 'blue'
+ for i in range(len(gdf))
+ ]
gdf = gdf[gdf.within(RegionofInterset)]
-
-
- if information["RegionShape"] == "Circle":
- # chage the gdf to calculte the distance from the center of the circle in km
- gdf['distance'] = gdf.apply(lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']), axis=1)
- gdf["Color"] = ["red" if row['distance'] < radius else "blue" for _, row in gdf.iterrows()]
- gdf = gdf[gdf['distance'] < radius]
-
- APIFLAG = information['APIFLAG'] # if the APIFLAG is True, we use M9 API to get the motion data
+ if information['RegionShape'] == 'Circle':
+ # change the gdf to calculate the distance from the center of the circle in km
+ gdf['distance'] = gdf.apply(
+ lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']),
+ axis=1,
+ )
+ gdf['Color'] = [
+ 'red' if row['distance'] < radius else 'blue'
+ for _, row in gdf.iterrows()
+ ]
+ gdf = gdf[gdf['distance'] < radius]
+ APIFLAG = information[ # noqa: N806
+ 'APIFLAG'
+ ] # if the APIFLAG is True, we use M9 API to get the motion data
if APIFLAG:
- #query flags
- ResponseSpectra = True
+ # query flags
+ ResponseSpectra = True # noqa: N806
# get the motion data from the API
for _, site in gdf.iterrows():
# get the motion data from the API
- site_name = site['Station Name']
- jobURL = f'https://m9-broadband-download-rwqks6gbba-uc.a.run.app/getMotionFromStationName?StationName={site_name}&ResponseSpectra={ResponseSpectra}'
+ site_name = site['Station Name']
+ jobURL = f'https://m9-broadband-download-rwqks6gbba-uc.a.run.app/getMotionFromStationName?StationName={site_name}&ResponseSpectra={ResponseSpectra}' # noqa: N806
res_success = False
- iter_num = 0
- max_iter = 5
- print(f'Getting the motion data for {site_name}')
+ iter_num = 0
+ max_iter = 5
+ print(f'Getting the motion data for {site_name}') # noqa: T201
- while not(res_success) and (iter_num < max_iter):
- res = requests.get(jobURL)
- res_success = res.status_code == 200
- iter_num = iter_num + 1
+ while not (res_success) and (iter_num < max_iter):
+ res = requests.get(jobURL) # noqa: S113
+ res_success = res.status_code == 200 # noqa: PLR2004
+ iter_num = iter_num + 1 # noqa: PLR6104
if res_success:
- gmData = res.json()
- for i in indicies:
- write_motion(site_name, directory, i,gmData[i],APIFLAG)
- gdf["filename"] = f"{site_name}_{i}"
+ gmData = res.json() # noqa: N806
+ for i in indices:
+ write_motion(site_name, directory, i, gmData[i], APIFLAG)
+ gdf['filename'] = f'{site_name}_{i}'
if site_location:
break
else:
- print(f'URL not replied for {site_name}, skipping for now')
+ print(f'URL not replied for {site_name}, skipping for now') # noqa: T201
if site_location:
- print("trying the next nearest site")
-
- if site_location and not(res_success):
- print("None of the nearest sites have motion data")
- print("Please check your internet connection or try again later")
+ print('trying the next nearest site') # noqa: T201
+ if site_location and not (res_success):
+ print('None of the nearest sites have motion data') # noqa: T201
+ print('Please check your internet connection or try again later') # noqa: T201
- if not(APIFLAG):
- indicies = ["030"]
- for i in indicies:
- for _,site in gdf.iterrows():
+ if not (APIFLAG):
+ indices = ['030']
+ for i in indices:
+ for _, site in gdf.iterrows():
# find the first Letter of the site name
site_name = site['Station Name']
lat = site['Latitude']
lon = site['Longitude']
- firstLetter = site_name[0]
- filename = f'./csz{indicies[0]}/{firstLetter}/Xarray.nc'
+ firstLetter = site_name[0] # noqa: N806
+ filename = f'./csz{indices[0]}/{firstLetter}/Xarray.nc'
# reading the nc file
- data = xr.open_dataset(filename)
+ data = xr.open_dataset(filename) # noqa: F821
subset = data.sel(lat=lat, lon=lon, method='nearest')
- dt = data.coords['time'].values
+ dt = data.coords['time'].values # noqa: PD011
dt = dt[1] - dt[0]
sitedata = {
- "dT" : dt,
- "accel_x" : subset['acc_x'].values.tolist(),
- "accel_y" : subset['acc_y'].values.tolist(),
- "accel_z" : subset['acc_z'].values.tolist(),
+ 'dT': dt,
+ 'accel_x': subset['acc_x'].values.tolist(), # noqa: PD011
+ 'accel_y': subset['acc_y'].values.tolist(), # noqa: PD011
+ 'accel_z': subset['acc_z'].values.tolist(), # noqa: PD011
}
write_motion(site_name, directory, i, sitedata, APIFLAG)
- gdf["filename"] = f"{site_name}_{i}"
-
-
+ gdf['filename'] = f'{site_name}_{i}'
# save the gdf to a csv file in the directory just "Station Name", "Latitude", "Longitude"
- gdf[['filename', 'Latitude', 'Longitude']].to_csv(f'{directory}/sites.csv', index=False)
+ gdf[['filename', 'Latitude', 'Longitude']].to_csv(
+ f'{directory}/sites.csv', index=False
+ )
-def write_motion(site_name, directory, i, motiondict, APIFLAG):
- filename = f"{directory}/{site_name}_{i}.json"
+def write_motion(site_name, directory, i, motiondict, APIFLAG): # noqa: N803, D103
+ filename = f'{directory}/{site_name}_{i}.json'
if APIFLAG:
accel_x = 'AccelerationHistory-EW'
accel_y = 'AccelerationHistory-NS'
accel_z = 'AccelerationHistory-Vert'
- dt = 'TimeStep'
+ dt = 'TimeStep'
datatowrite = {
- "name":f"{site_name}_{i}",
- "dT" : motiondict[dt],
- "numSteps": len(motiondict[accel_x]),
- "accel_x":motiondict[accel_x],
- "accel_y":motiondict[accel_y],
- "accel_z":motiondict[accel_z]
- }
+ 'name': f'{site_name}_{i}',
+ 'dT': motiondict[dt],
+ 'numSteps': len(motiondict[accel_x]),
+ 'accel_x': motiondict[accel_x],
+ 'accel_y': motiondict[accel_y],
+ 'accel_z': motiondict[accel_z],
+ }
else:
datatowrite = motiondict
- datatowrite['Data'] = "Time history generated using M9 simulations"
- datatowrite['name'] = f"{site_name}_{i}"
-
+ datatowrite['Data'] = 'Time history generated using M9 simulations'
+ datatowrite['name'] = f'{site_name}_{i}'
- with open(filename, 'w') as f:
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
json.dump(datatowrite, f, indent=2)
-
-
-
def haversine(lat1, lon1, lat2, lon2):
-
- """
- Calculate the great circle distance between two points
+ """Calculate the great circle distance between two points
on the earth specified in decimal degrees.
- """
+ """ # noqa: D205
# Convert decimal degrees to radians
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
- a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
- c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
+ a = (
+ math.sin(dlat / 2) ** 2
+ + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
+ )
+ c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
r = 6371 # Radius of the Earth in kilometers
distance = r * c
- return distance
-
-
-
-
-
-
+ return distance # noqa: RET504
diff --git a/modules/createEVENT/M9/M9App.py b/modules/createEVENT/M9/M9App.py
index 86d6812c9..69d6cd13c 100644
--- a/modules/createEVENT/M9/M9App.py
+++ b/modules/createEVENT/M9/M9App.py
@@ -1,117 +1,127 @@
-#%%
-import os
-from subprocess import PIPE, run
+# %% # noqa: CPY001, D100, INP001
import json
-from datetime import datetime
+import os
import time
+from datetime import datetime
+from subprocess import PIPE, run # noqa: S404
# change the directory to the current directory
-os.chdir(os.path.dirname(os.path.realpath(__file__)))
+os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+
# %%
# helper function to call the tapis command
-def call(command):
+def call(command): # noqa: D103
command = command.split()
- command.append("-f")
- command.append("json")
- result = run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True)
+ command.append('-f')
+ command.append('json')
+ result = run(command, stdout=PIPE, stderr=PIPE, text=True, check=False) # noqa: S603, UP022
result = json.loads(result.stdout)
- return result
-
-# %%
-def Submit_tapis_job():
- with open("TapisFiles/information.json", "r") as file:
- information = json.load(file)
- file.close()
-
- profile = call("tapis profiles show self")
- username = profile['username']
- email = profile['email']
- savingDirectory = information["directory"]
-
- if not os.path.exists(savingDirectory):
- os.makedirs(savingDirectory)
-
- print("Uploading files to designsafe storage")
- call(f"tapis files mkdir agave://designsafe.storage.default/{username}/ physics_based")
- call(f"tapis files mkdir agave://designsafe.storage.default/{username}/physics_based M9")
-
- call(f"tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/M9.py ")
- call(f"tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/information.json ")
- call(f"tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/selectedSites.csv ")
-
-
- jobdict = {
- 'name': '',
- 'appId': 'physicsBasedMotionApp-0.0.1',
- 'nodeCount': 1,
- 'processorsPerNode': 1,
- 'archive': True,
- 'archiveOnAppError':True,
- 'inputs': {'inputDirectory': ''},
- 'parameters' : {'inputScript':'M9.py'},
- 'maxRunTime': '00:01:00',
- 'memoryPerNode': '1GB',
- 'archiveSystem':'designsafe.storage.default',
- 'notifications': [{'url' : '','event': '*'}]
- }
-
- # Generate a timestamp to append to the job name an
- timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
- jobname = f"PhysicsBasedMotion_M9_{username}_{timestamp}"
-
- print("Submitting job")
- jobdict['name'] = jobname
- jobdict['inputs']['inputDirectory'] = f"agave://designsafe.storage.default/{username}/physics_based/M9/"
- jobdict['notifications'][0]['url'] = f"{email}"
-
- # submit the job
- jobfile = "./TapisFiles/job.json"
- json.dump(jobdict, open(jobfile, "w"), indent=2)
- res = call(f"tapis jobs submit -F {jobfile}")
-
- # delete the job file
- # os.remove(jobfile)
-
- res = call(f"tapis jobs search --name eq {jobname}")
- jobid = res[0]["id"]
- status = ""
- last_status = ""
- count = 0
- while status != "FINISHED":
- status = call (f"tapis jobs status {jobid} ")["status"]
- if count == 0:
- last_status = status
- print("Job status: ", status)
- count += 1
- if last_status != status:
- print("Job status: ", status)
- last_status = status
- if status == "FAILED":
- print("Job failed")
- break
-
- time.sleep(10)
- # # %%
-
-
- # # %%
- print("Downloading extracted motions")
- archivePath = call(f"tapis jobs show {jobid}")["archivePath"]
- archivePath = f"agave://designsafe.storage.default/{archivePath}/M9"
-
- files = call(f"tapis files list {archivePath}/Events/")
- if len(files) == 0:
- print("No files in the archive")
- else:
- command = f"tapis files download {archivePath}/Events/ -W {savingDirectory}/"
- command = command.split()
- run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True)
-
- return res
-
-if __name__ == "__main__":
- Submit_tapis_job()
-
+ return result # noqa: RET504
+# %%
+def Submit_tapis_job(): # noqa: N802, D103
+ with open('TapisFiles/information.json') as file: # noqa: PLW1514, PTH123
+ information = json.load(file)
+ file.close()
+
+ profile = call('tapis profiles show self')
+ username = profile['username']
+ email = profile['email']
+ savingDirectory = information['directory'] # noqa: N806
+
+ if not os.path.exists(savingDirectory): # noqa: PTH110
+ os.makedirs(savingDirectory) # noqa: PTH103
+
+ print('Uploading files to designsafe storage') # noqa: T201
+ call(
+ f'tapis files mkdir agave://designsafe.storage.default/{username}/ physics_based'
+ )
+ call(
+ f'tapis files mkdir agave://designsafe.storage.default/{username}/physics_based M9'
+ )
+
+ call(
+ f'tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/M9.py '
+ )
+ call(
+ f'tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/information.json '
+ )
+ call(
+ f'tapis files upload agave://designsafe.storage.default/{username}/physics_based/M9/ TapisFiles/selectedSites.csv '
+ )
+
+ jobdict = {
+ 'name': '',
+ 'appId': 'physicsBasedMotionApp-0.0.1',
+ 'nodeCount': 1,
+ 'processorsPerNode': 1,
+ 'archive': True,
+ 'archiveOnAppError': True,
+ 'inputs': {'inputDirectory': ''},
+ 'parameters': {'inputScript': 'M9.py'},
+ 'maxRunTime': '00:01:00',
+ 'memoryPerNode': '1GB',
+ 'archiveSystem': 'designsafe.storage.default',
+ 'notifications': [{'url': '', 'event': '*'}],
+ }
+
+ # Generate a timestamp to append to the job name an
+ timestamp = datetime.now().strftime('%Y%m%d%H%M%S') # noqa: DTZ005
+ jobname = f'PhysicsBasedMotion_M9_{username}_{timestamp}'
+
+ print('Submitting job') # noqa: T201
+ jobdict['name'] = jobname
+ jobdict['inputs']['inputDirectory'] = (
+ f'agave://designsafe.storage.default/{username}/physics_based/M9/'
+ )
+ jobdict['notifications'][0]['url'] = f'{email}'
+
+ # submit the job
+ jobfile = './TapisFiles/job.json'
+ json.dump(jobdict, open(jobfile, 'w'), indent=2) # noqa: PLW1514, PTH123, SIM115
+ res = call(f'tapis jobs submit -F {jobfile}')
+
+ # delete the job file
+ # os.remove(jobfile)
+
+ res = call(f'tapis jobs search --name eq {jobname}')
+ jobid = res[0]['id']
+ status = ''
+ last_status = ''
+ count = 0
+ while status != 'FINISHED':
+ status = call(f'tapis jobs status {jobid} ')['status']
+ if count == 0:
+ last_status = status
+ print('Job status: ', status) # noqa: T201
+ count += 1
+ if last_status != status:
+ print('Job status: ', status) # noqa: T201
+ last_status = status
+ if status == 'FAILED':
+ print('Job failed') # noqa: T201
+ break
+
+ time.sleep(10)
+ # # %%
+
+ # # %%
+ print('Downloading extracted motions') # noqa: T201
+ archivePath = call(f'tapis jobs show {jobid}')['archivePath'] # noqa: N806
+ archivePath = f'agave://designsafe.storage.default/{archivePath}/M9' # noqa: N806
+
+ files = call(f'tapis files list {archivePath}/Events/')
+ if len(files) == 0:
+ print('No files in the archive') # noqa: T201
+ else:
+ command = f'tapis files download {archivePath}/Events/ -W {savingDirectory}/'
+ command = command.split()
+ run(command, stdout=PIPE, stderr=PIPE, text=True, check=False) # noqa: S603, UP022
+
+ return res
+
+
+if __name__ == '__main__':
+ Submit_tapis_job()
diff --git a/modules/createEVENT/M9/M9App2.py b/modules/createEVENT/M9/M9App2.py
index 9f139f689..87f3c6c75 100644
--- a/modules/createEVENT/M9/M9App2.py
+++ b/modules/createEVENT/M9/M9App2.py
@@ -1,38 +1,58 @@
-#%%
-import os
+# %% # noqa: CPY001, D100, INP001
import json
-from datetime import datetime
+import os
import time
+from datetime import datetime
+
from agavepy.agave import Agave
# change the directory to the current directory
-os.chdir(os.path.dirname(os.path.realpath(__file__)))
+os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+
-def Submit_tapis_job():
+def Submit_tapis_job(): # noqa: N802, D103
ag = Agave.restore()
- with open("TapisFiles/information.json", "r") as file:
- information = json.load(file)
+ with open('TapisFiles/information.json') as file: # noqa: PLW1514, PTH123
+ information = json.load(file)
file.close()
-
# %%
- profile = ag.profiles.get()
- username = profile['username']
- savingDirectory = information["directory"]
- if not os.path.exists(savingDirectory):
- os.makedirs(savingDirectory)
-
+ profile = ag.profiles.get()
+ username = profile['username']
+ savingDirectory = information['directory'] # noqa: N806
+ if not os.path.exists(savingDirectory): # noqa: PTH110
+ os.makedirs(savingDirectory) # noqa: PTH103
- print("Uploading files to designsafe storage")
- ag.files.manage(systemId="designsafe.storage.default", filePath=f"{username}/", body={'action': 'mkdir','path': "physics_based"})
- ag.files.manage(systemId="designsafe.storage.default", filePath=f"{username}/physics_based", body={'action': 'mkdir','path': "M9"})
+ print('Uploading files to designsafe storage') # noqa: T201
+ ag.files.manage(
+ systemId='designsafe.storage.default',
+ filePath=f'{username}/',
+ body={'action': 'mkdir', 'path': 'physics_based'},
+ )
+ ag.files.manage(
+ systemId='designsafe.storage.default',
+ filePath=f'{username}/physics_based',
+ body={'action': 'mkdir', 'path': 'M9'},
+ )
# ag.files_mkdir(systemId="designsafe.storage.default", filePath=f"{username}/physics_based/Istanbul2")
- with open("TapisFiles/M9.py", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/M9/",fileToUpload=file,systemId='designsafe.storage.default')
- with open("TapisFiles/information.json", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/M9/",fileToUpload=file,systemId='designsafe.storage.default')
- with open("TapisFiles/selectedSites.csv", 'rb') as file:
- result = ag.files.importData(filePath= f"{username}/physics_based/M9/",fileToUpload=file,systemId='designsafe.storage.default')
+ with open('TapisFiles/M9.py', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData(
+ filePath=f'{username}/physics_based/M9/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
+ with open('TapisFiles/information.json', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData(
+ filePath=f'{username}/physics_based/M9/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
+ with open('TapisFiles/selectedSites.csv', 'rb') as file: # noqa: PTH123
+ result = ag.files.importData( # noqa: F841
+ filePath=f'{username}/physics_based/M9/',
+ fileToUpload=file,
+ systemId='designsafe.storage.default',
+ )
# %%
jobdict = {
@@ -41,61 +61,66 @@ def Submit_tapis_job():
'nodeCount': 1,
'processorsPerNode': 1,
'archive': True,
- 'archiveOnAppError':True,
+ 'archiveOnAppError': True,
'inputs': {'inputDirectory': ''},
- 'parameters' : {'inputScript':'M9.py'},
+ 'parameters': {'inputScript': 'M9.py'},
'maxRunTime': '00:01:00',
'memoryPerNode': '1GB',
- 'archiveSystem':'designsafe.storage.default',
+ 'archiveSystem': 'designsafe.storage.default',
}
# Generate a timestamp to append to the job name an
- timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
- jobname = f"PhysicsBasedMotion_M9_{username}_{timestamp}"
+ timestamp = datetime.now().strftime('%Y%m%d%H%M%S') # noqa: DTZ005
+ jobname = f'PhysicsBasedMotion_M9_{username}_{timestamp}'
- print("Submitting job")
- # submit the job
+ print('Submitting job') # noqa: T201
+ # submit the job
jobdict['name'] = jobname
- jobdict['inputs']['inputDirectory'] = f"agave://designsafe.storage.default/{username}/physics_based/M9/"
+ jobdict['inputs']['inputDirectory'] = (
+ f'agave://designsafe.storage.default/{username}/physics_based/M9/'
+ )
# %%
res = ag.jobs.submit(body=jobdict)
jobid = res['id']
- status = ""
- last_status = ""
+ status = ''
+ last_status = ''
count = 0
- while status != "FINISHED":
+ while status != 'FINISHED':
status = ag.jobs.getStatus(jobId=jobid)['status']
if count == 0:
last_status = status
- print("Job status: ", status)
+ print('Job status: ', status) # noqa: T201
count += 1
if last_status != status:
- print("Job status: ", status)
+ print('Job status: ', status) # noqa: T201
last_status = status
- if status == "FAILED":
- print("Job failed")
+ if status == 'FAILED':
+ print('Job failed') # noqa: T201
break
time.sleep(10)
-
# %%
- print("Downloading extracted motions")
- archivePath = ag.jobs.get(jobId=jobid)["archivePath"]
- archivePath = f"{archivePath}/M9/Events/"
+ print('Downloading extracted motions') # noqa: T201
+ archivePath = ag.jobs.get(jobId=jobid)['archivePath'] # noqa: N806
+ archivePath = f'{archivePath}/M9/Events/' # noqa: N806
- files = ag.files.list(filePath=archivePath, systemId="designsafe.storage.default")
+ files = ag.files.list(
+ filePath=archivePath, systemId='designsafe.storage.default'
+ )
# %%
if len(files) <= 1:
- print("No files in the archive")
- else :
+ print('No files in the archive') # noqa: T201
+ else:
for file in files:
filename = file['name']
- if filename == ".":
+ if filename == '.':
continue
- path = f"{archivePath}/{filename}"
- res = ag.files.download(filePath=path, systemId="designsafe.storage.default")
- with open(f"{savingDirectory}/{filename}", "wb") as f:
+ path = f'{archivePath}/{filename}'
+ res = ag.files.download(
+ filePath=path, systemId='designsafe.storage.default'
+ )
+ with open(f'{savingDirectory}/{filename}', 'wb') as f: # noqa: FURB103, PTH123
f.write(res.content)
# %%
diff --git a/modules/createEVENT/M9/M9Run.py b/modules/createEVENT/M9/M9Run.py
index bada19c66..5df3a439e 100644
--- a/modules/createEVENT/M9/M9Run.py
+++ b/modules/createEVENT/M9/M9Run.py
@@ -1,64 +1,68 @@
-import os
-import M9Stations
-import M9App2
-import M9API
-import argparse
+import argparse # noqa: CPY001, D100, INP001
+import os
-if __name__ == "__main__":
+import M9API
+import M9App2
+import M9Stations
- information = {
- 'LocationFlag': True,
- 'RegionFlag' : False,
- 'APIFLAG' : False,
- 'lat' : 47.65290010591034,
- 'lon' : -122.30531923052669,
- 'RegionShape' : "Circle",
- 'min_lat' : 47.58,
- 'max_lat' : 47.62,
- 'min_lon' : -122.38,
- 'max_lon' : -122.34,
- 'radius' : 10,
- 'grid_type' : 'A',
- 'directory' : 'Events',
+if __name__ == '__main__':
+ information = {
+ 'LocationFlag': True,
+ 'RegionFlag': False,
+ 'APIFLAG': False,
+ 'lat': 47.65290010591034,
+ 'lon': -122.30531923052669,
+ 'RegionShape': 'Circle',
+ 'min_lat': 47.58,
+ 'max_lat': 47.62,
+ 'min_lon': -122.38,
+ 'max_lon': -122.34,
+ 'radius': 10,
+ 'grid_type': 'A',
+ 'directory': 'Events',
'number_of_realizations': 1,
}
-
#
# create a parser to parse input args & update default information struct
#
-
+
parser = argparse.ArgumentParser()
- parser.add_argument('--lat', help="Latitude", required=False)
- parser.add_argument('--lng', help="Longitude", required=False)
- parser.add_argument('-g', '--gridType', help="grid Type", required=False)
- parser.add_argument('-n', '--number', help="number of realizations", required=False)
- parser.add_argument('-o', '--output', help="number of realizations", required=False)
- parser.add_argument('-a', "--API",help="API FLAG", required=False)
+ parser.add_argument('--lat', help='Latitude', required=False)
+ parser.add_argument('--lng', help='Longitude', required=False)
+ parser.add_argument('-g', '--gridType', help='grid Type', required=False)
+ parser.add_argument(
+ '-n', '--number', help='number of realizations', required=False
+ )
+ parser.add_argument(
+ '-o', '--output', help='number of realizations', required=False
+ )
+ parser.add_argument('-a', '--API', help='API FLAG', required=False)
args = parser.parse_args()
-
+
if args.lat:
- information['lat']=float(args.lat)
+ information['lat'] = float(args.lat)
if args.lng:
- information['lon']=float(args.lng)
+ information['lon'] = float(args.lng)
if args.output:
- information['directory']=args.output
+ information['directory'] = args.output
if args.number:
- information['number_of_realizations']=int(args.number)
+ information['number_of_realizations'] = int(args.number)
if args.gridType:
- information['grid_type']=args.gridType
+ information['grid_type'] = args.gridType
if args.API == 'true':
information['APIFLAG'] = True
-
#
# go get the motions
#
- os.chdir(os.path.dirname(os.path.realpath(__file__)))
+ os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
if information['APIFLAG']:
- print("Using API for extracting motions:\n This may take a while. Please be patient.")
+ print( # noqa: T201
+ 'Using API for extracting motions:\n This may take a while. Please be patient.'
+ )
M9API.M9(information)
else:
- M9Stations.getStations(information,plot=False,show=False)
+ M9Stations.getStations(information, plot=False, show=False)
M9App2.Submit_tapis_job()
- exit()
\ No newline at end of file
+ exit() # noqa: PLR1722
diff --git a/modules/createEVENT/M9/M9Stations.py b/modules/createEVENT/M9/M9Stations.py
index 75b877da9..aa393b17f 100644
--- a/modules/createEVENT/M9/M9Stations.py
+++ b/modules/createEVENT/M9/M9Stations.py
@@ -1,70 +1,73 @@
-# %%
+# %% # noqa: CPY001, D100, INP001
# required libraries numpy, geoandas,pandas,plotly
+import json
import math
+
import geopandas as gpd
import pandas as pd
-from shapely.geometry import Polygon, Point
-import json
-
-def getStations(information, plot=False, show = False):
+from shapely.geometry import Point, Polygon
- RegionFlag = information['RegionFlag']
- LocationFlag = information['LocationFlag']
+def getStations(information, plot=False, show=False): # noqa: FBT002, C901, N802, D103
+ RegionFlag = information['RegionFlag'] # noqa: N806
+ LocationFlag = information['LocationFlag'] # noqa: N806
if LocationFlag:
# get the location of the site
- lat = information['lat']
- lon = information['lon']
-
+ lat = information['lat']
+ lon = information['lon']
+
if RegionFlag:
- if information["RegionShape"] == "Rectangle":
+ if information['RegionShape'] == 'Rectangle':
# get the region of the desirable sites
min_lat = information['min_lat']
max_lat = information['max_lat']
min_lon = information['min_lon']
max_lon = information['max_lon']
- if information["RegionShape"] == "Circle":
+ if information['RegionShape'] == 'Circle':
# get the region of the desirable sites
lat = information['lat']
lon = information['lon']
radius = information['radius']
-
-
- grid_type = information['grid_type'] ;# grid type (options: A, B, C, D, E, Y, and Z, can be "all")
-
-
+ grid_type = information[
+ 'grid_type'
+ ] # grid type (options: A, B, C, D, E, Y, and Z, can be "all")
# load the sites information
- df_allSites = pd.read_csv('M9_sites.csv',index_col=False)
+ df_allSites = pd.read_csv('M9_sites.csv', index_col=False) # noqa: N806
# create a geopandas dataframe
- gdf = gpd.GeoDataFrame(df_allSites,
- geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude))
-
- # deelte the df_allSites to save memory
+ gdf = gpd.GeoDataFrame(
+ df_allSites,
+ geometry=gpd.points_from_xy(df_allSites.Longitude, df_allSites.Latitude),
+ )
+
+ # delete the df_allSites to save memory
del df_allSites
# limitation of each grid type (minx, miny, maxx, maxy)
- Gridboxes = {
- "A":(-123.2147269, 46.90566609, -121.1246222, 48.31489086),
- "B":(-128.4741831, 40.26059707, -121.0785236, 49.1785082),
- "C":(-123.2568915, 45.19862425, -122.2252305, 45.92126901),
- "D":(-123.3293999, 48.9970249, -122.3929914, 49.35841212),
- "E":(-123.8686827, 48.31165993, -123.1877513, 48.70158023),
- "Y":(-127.7497215, 40.41719958, -120.6351016, 50.13127206),
- "Z":(-127.7578767, 40.41524519, -121.2331997, 49.27983578),
- "All":(-128.4741831, 40.26059707, -121.0785236, 49.35841212)
+ Gridboxes = { # noqa: N806
+ 'A': (-123.2147269, 46.90566609, -121.1246222, 48.31489086),
+ 'B': (-128.4741831, 40.26059707, -121.0785236, 49.1785082),
+ 'C': (-123.2568915, 45.19862425, -122.2252305, 45.92126901),
+ 'D': (-123.3293999, 48.9970249, -122.3929914, 49.35841212),
+ 'E': (-123.8686827, 48.31165993, -123.1877513, 48.70158023),
+ 'Y': (-127.7497215, 40.41719958, -120.6351016, 50.13127206),
+ 'Z': (-127.7578767, 40.41524519, -121.2331997, 49.27983578),
+ 'All': (-128.4741831, 40.26059707, -121.0785236, 49.35841212),
}
# create a polygon for the allowable region
- region = Polygon([ (Gridboxes[grid_type][0], Gridboxes[grid_type][1]),
- (Gridboxes[grid_type][0], Gridboxes[grid_type][3]),
- (Gridboxes[grid_type][2], Gridboxes[grid_type][3]),
- (Gridboxes[grid_type][2], Gridboxes[grid_type][1])])
-
+ region = Polygon(
+ [
+ (Gridboxes[grid_type][0], Gridboxes[grid_type][1]),
+ (Gridboxes[grid_type][0], Gridboxes[grid_type][3]),
+ (Gridboxes[grid_type][2], Gridboxes[grid_type][3]),
+ (Gridboxes[grid_type][2], Gridboxes[grid_type][1]),
+ ]
+ )
if grid_type != 'All':
# filter the site that the Station Name is start with the grid type
@@ -72,92 +75,122 @@ def getStations(information, plot=False, show = False):
else:
gdf = gdf[gdf['Station Name'].str.startswith(('A', 'B', 'C', 'D', 'E'))]
-
if LocationFlag:
# first check if the location is inner the regoin
if not region.contains(Point(lon, lat)):
- print('The location is not in the selected grid region')
- print('Please select a location in the region or change the grid type to "All"')
- return None
- else:
+ print('The location is not in the selected grid region') # noqa: T201
+ print( # noqa: T201
+ 'Please select a location in the region or change the grid type to "All"'
+ )
+ return
+ else: # noqa: RET505
# find the nearest site to the location
gdf['distance'] = gdf.distance(Point(lon, lat))
gdf = gdf.sort_values('distance')
- gdf["Color"] = "blue"
+ gdf['Color'] = 'blue'
for i in range(4):
- gdf.iloc[i, gdf.columns.get_loc('Color')] = "red"
-
-
-
-
+ gdf.iloc[i, gdf.columns.get_loc('Color')] = 'red'
if RegionFlag:
- if information["RegionShape"] == "Rectangle":
+ if information['RegionShape'] == 'Rectangle':
# Create a polygton using min_lat, max_lat, min_lon, max_lon
- RegionofInterset = Polygon([(min_lon, min_lat), (min_lon, max_lat), (max_lon, max_lat), (max_lon, min_lat)])
+ RegionofInterset = Polygon( # noqa: N806
+ [
+ (min_lon, min_lat),
+ (min_lon, max_lat),
+ (max_lon, max_lat),
+ (max_lon, min_lat),
+ ]
+ )
-
# Check that if the RegionofInterset and the region has intersection
if not region.intersects(RegionofInterset):
- print('The selected region is not in the selected grid region')
- print('Please select a region in in the or change the grid type to "All"')
- return None
- else:
+ print('The selected region is not in the selected grid region') # noqa: T201
+ print( # noqa: T201
+ 'Please select a region in in the or change the grid type to "All"'
+ )
+ return
+ else: # noqa: RET505
# Check if the RegionofInterset is in the region
if not region.contains(RegionofInterset):
- print('The selected region is not entirely in the selected grid region')
- print("The selected region will be changed to the intersection of the selected region and the grid region")
- RegionofInterset = region.intersection(RegionofInterset)
+ print( # noqa: T201
+ 'The selected region is not entirely in the selected grid region'
+ )
+ print( # noqa: T201
+ 'The selected region will be changed to the intersection of the selected region and the grid region'
+ )
+ RegionofInterset = region.intersection(RegionofInterset) # noqa: N806
else:
- print('The selected region is entirely in the selected grid region')
+ print( # noqa: T201
+ 'The selected region is entirely in the selected grid region'
+ )
# now filter the sites that are in the regionofInterset
- gdf["Color"] = ["red" if RegionofInterset.contains(gdf.geometry[i]) else "blue" for i in range(len(gdf))]
-
-
-
- if information["RegionShape"] == "Circle":
- # chage the gdf to calculte the distance from the center of the circle in km
- gdf['distance'] = gdf.apply(lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']), axis=1)
- gdf["Color"] = ["red" if row['distance'] < radius else "blue" for _, row in gdf.iterrows()]
-
-
+ gdf['Color'] = [
+ 'red' if RegionofInterset.contains(gdf.geometry[i]) else 'blue'
+ for i in range(len(gdf))
+ ]
+
+ if information['RegionShape'] == 'Circle':
+ # change the gdf to calculate the distance from the center of the circle in km
+ gdf['distance'] = gdf.apply(
+ lambda row: haversine(lat, lon, row['Latitude'], row['Longitude']),
+ axis=1,
+ )
+ gdf['Color'] = [
+ 'red' if row['distance'] < radius else 'blue'
+ for _, row in gdf.iterrows()
+ ]
if RegionFlag:
- gdf["Selected Site"] = ["Yes" if gdf["Color"][i] == "red" else "No" for i in range(len(gdf))]
+ gdf['Selected Site'] = [
+ 'Yes' if gdf['Color'][i] == 'red' else 'No' for i in range(len(gdf))
+ ]
if LocationFlag:
- gdf["Selected Site"] = "No"
- gdf.iloc[0, gdf.columns.get_loc('Selected Site')] = "The closest site to the location"
- gdf.iloc[1, gdf.columns.get_loc('Selected Site')] = "The second closest site to the location"
- gdf.iloc[2, gdf.columns.get_loc('Selected Site')] = "The third closest site to the location"
- gdf.iloc[3, gdf.columns.get_loc('Selected Site')] = "The fourth closest site to the location"
+ gdf['Selected Site'] = 'No'
+ gdf.iloc[0, gdf.columns.get_loc('Selected Site')] = (
+ 'The closest site to the location'
+ )
+ gdf.iloc[1, gdf.columns.get_loc('Selected Site')] = (
+ 'The second closest site to the location'
+ )
+ gdf.iloc[2, gdf.columns.get_loc('Selected Site')] = (
+ 'The third closest site to the location'
+ )
+ gdf.iloc[3, gdf.columns.get_loc('Selected Site')] = (
+ 'The fourth closest site to the location'
+ )
# plot the sites
if LocationFlag:
centerlat = lat
centerlon = lon
if RegionFlag:
- if information["RegionShape"] == "Circle":
+ if information['RegionShape'] == 'Circle':
centerlat = lat
centerlon = lon
- if information["RegionShape"] == "Rectangle":
- centerlat = (min_lat + max_lat)/2
- centerlon = (min_lon + max_lon)/2
-
+ if information['RegionShape'] == 'Rectangle':
+ centerlat = (min_lat + max_lat) / 2
+ centerlon = (min_lon + max_lon) / 2
+
if plot:
- import plotly.express as px
- gdf["Color"] = gdf["Color"].replace({"blue": "All sites", "red": "Selected sites"})
- fig = px.scatter_mapbox(gdf,
- lat="Latitude",
- lon="Longitude",
- color="Color",
- hover_name = gdf.index,
- hover_data = {"Station Name": True, "Selected Site": True},
- color_discrete_map={"All sites": '#1f77b4', "Selected sites": "#ff7f0e"},
- # dont show the selected site in the legend
- center={"lat": centerlat, "lon": centerlon},
- zoom=10,
- mapbox_style="open-street-map"
- )
+ import plotly.express as px # noqa: PLC0415
+
+ gdf['Color'] = gdf['Color'].replace(
+ {'blue': 'All sites', 'red': 'Selected sites'}
+ )
+ fig = px.scatter_mapbox(
+ gdf,
+ lat='Latitude',
+ lon='Longitude',
+ color='Color',
+ hover_name=gdf.index,
+ hover_data={'Station Name': True, 'Selected Site': True},
+ color_discrete_map={'All sites': '#1f77b4', 'Selected sites': '#ff7f0e'},
+ # dont show the selected site in the legend
+ center={'lat': centerlat, 'lon': centerlon},
+ zoom=10,
+ mapbox_style='open-street-map',
+ )
# fig.show()
# save the html file
# fig.write_html("M9_sites.html")
@@ -165,42 +198,35 @@ def getStations(information, plot=False, show = False):
# fig.
if show:
fig.show()
-
+
if RegionFlag:
- gdf = gdf[gdf["Selected Site"] == "Yes"]
-
+ gdf = gdf[gdf['Selected Site'] == 'Yes']
+
if LocationFlag:
- gdf = gdf[gdf["Selected Site"] != "No"]
- gdf.drop(columns=["geometry", "Color", "Selected Site"]).to_csv(f'TapisFiles/selectedSites.csv', index=True)
- json.dump(information, open("TapisFiles/information.json", "w"), indent=2)
+ gdf = gdf[gdf['Selected Site'] != 'No']
+ gdf.drop(columns=['geometry', 'Color', 'Selected Site']).to_csv(
+ 'TapisFiles/selectedSites.csv', index=True
+ )
+ json.dump(information, open('TapisFiles/information.json', 'w'), indent=2) # noqa: PLW1514, PTH123, SIM115
# fig.show()
-
-
-
-
def haversine(lat1, lon1, lat2, lon2):
-
- """
- Calculate the great circle distance between two points
+ """Calculate the great circle distance between two points
on the earth specified in decimal degrees.
- """
+ """ # noqa: D205
# Convert decimal degrees to radians
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
- a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
- c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
+ a = (
+ math.sin(dlat / 2) ** 2
+ + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
+ )
+ c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
r = 6371 # Radius of the Earth in kilometers
distance = r * c
- return distance
-
-
-
-
-
-
+ return distance # noqa: RET504
diff --git a/modules/createEVENT/M9/TapisFiles/M9.py b/modules/createEVENT/M9/TapisFiles/M9.py
index a4f765801..9a0e440e4 100644
--- a/modules/createEVENT/M9/TapisFiles/M9.py
+++ b/modules/createEVENT/M9/TapisFiles/M9.py
@@ -1,135 +1,114 @@
-# %%
+# %% # noqa: CPY001, D100, INP001
+import json
import os
+
import numpy as np
import pandas as pd
-import json
import xarray as xr
+
+
# 'netcdf4', 'h5netcdf', 'scipy'
-#%%
-def M9(information):
- """
- the default is to select sites from all M9 sites, but
+# %%
+def M9(information): # noqa: N802
+ """The default is to select sites from all M9 sites, but
grid type (options: A, B, C, D, E, Y, and Z, can be empty)
(ref: https://sites.uw.edu/pnet/m9-simulations/about-m9-simulations/extent-of-model/)
- """
-
- LocationFlag = information['LocationFlag']
- numSiteGM = information['number_of_realizations']
- grid_type = information['grid_type'] ;# grid type (options: A, B, C, D, E, Y, and Z, can be "all")
-
-
- randomFLag = True ;# if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected
- maxnumSiteGM = 30;
- numSiteGM = min(numSiteGM, maxnumSiteGM) ;# number of realizations
-
+ """ # noqa: D205, D400, D401
+ LocationFlag = information['LocationFlag'] # noqa: N806
+ numSiteGM = information['number_of_realizations'] # noqa: N806
+ grid_type = information[ # noqa: F841
+ 'grid_type'
+ ] # grid type (options: A, B, C, D, E, Y, and Z, can be "all")
+ randomFLag = True # if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected # noqa: N806
+ maxnumSiteGM = 30 # noqa: N806
+ numSiteGM = min(numSiteGM, maxnumSiteGM) # number of realizations # noqa: N806
# changing realizations order
- # indicies = list(range(maxnumSiteGM));
- Realizations = [f"{i:03}" for i in range(1, 33)]
- indicies = np.arange(32)
+ # indices = list(range(maxnumSiteGM));
+ Realizations = [f'{i:03}' for i in range(1, 33)] # noqa: N806
+ indices = np.arange(32)
if randomFLag:
- np.random.shuffle(indicies)
- indicies = indicies[:numSiteGM]
-
- M9Path = "/home/jovyan/work/projects/PRJ-4603"
+ np.random.shuffle(indices)
+ indices = indices[:numSiteGM]
- directory = "./Events" ;# directory to save the data
- # create the directory if it does not exist
- if not os.path.exists(directory):
- os.makedirs(directory)
+ M9Path = '/home/jovyan/work/projects/PRJ-4603' # noqa: N806
+ directory = './Events' # directory to save the data
+ # create the directory if it does not exist
+ if not os.path.exists(directory): # noqa: PTH110
+ os.makedirs(directory) # noqa: PTH103
gdf = pd.read_csv('selectedSites.csv', index_col=0)
- APIFLAG = information['APIFLAG'] # if the APIFLAG is True, we use M9 API to get the motion data
-
- if not(APIFLAG):
- for i in indicies:
- for _,site in gdf.iterrows():
+ APIFLAG = information[ # noqa: N806
+ 'APIFLAG'
+ ] # if the APIFLAG is True, we use M9 API to get the motion data
+
+ if not (APIFLAG):
+ for i in indices:
+ for _, site in gdf.iterrows():
# find the first Letter of the site name
site_name = site['Station Name']
lat = site['Latitude']
lon = site['Longitude']
- firstLetter = site_name[0]
+ firstLetter = site_name[0] # noqa: N806
filename = f'{M9Path}/csz{Realizations[i]}/{firstLetter}/Xarray.nc'
# reading the nc file
data = xr.open_dataset(filename)
subset = data.sel(lat=lat, lon=lon, method='nearest')
- dt = data.coords['time'].values
+ dt = data.coords['time'].values # noqa: PD011
dt = dt[1] - dt[0]
sitedata = {
- "dT" : dt,
- "accel_x" : subset['acc_x'].values.tolist(),
- "accel_y" : subset['acc_y'].values.tolist(),
- "accel_z" : subset['acc_z'].values.tolist(),
+ 'dT': dt,
+ 'accel_x': subset['acc_x'].values.tolist(), # noqa: PD011
+ 'accel_y': subset['acc_y'].values.tolist(), # noqa: PD011
+ 'accel_z': subset['acc_z'].values.tolist(), # noqa: PD011
}
write_motion(site_name, directory, i, sitedata, APIFLAG)
- gdf["filename"] = f"{site_name}_{i}"
+ gdf['filename'] = f'{site_name}_{i}'
if LocationFlag:
- break;
+ break
if LocationFlag:
gdf = gdf.loc[[0]]
# save the gdf to a csv file in the directory just "Station Name", "Latitude", "Longitude"
- gdf[['filename', 'Latitude', 'Longitude']].to_csv(f'{directory}/sites.csv', index=False)
+ gdf[['filename', 'Latitude', 'Longitude']].to_csv(
+ f'{directory}/sites.csv', index=False
+ )
-
-def write_motion(site_name, directory, i, motiondict, APIFLAG):
- filename = f"{directory}/{site_name}_{i}.json"
+def write_motion(site_name, directory, i, motiondict, APIFLAG): # noqa: N803, D103
+ filename = f'{directory}/{site_name}_{i}.json'
if APIFLAG:
accel_x = 'AccelerationHistory-EW'
accel_y = 'AccelerationHistory-NS'
accel_z = 'AccelerationHistory-Vert'
- dt = 'TimeStep'
+ dt = 'TimeStep'
datatowrite = {
- "Data": "Time history generated using M9 simulations",
- "dT" : motiondict[dt],
- "name": f"{site_name}_{i}",
- "numSteps": len(motiondict[accel_x]),
- "accel_x": motiondict[accel_x],
- "accel_y": motiondict[accel_y],
- "accel_z": motiondict[accel_z],
+ 'Data': 'Time history generated using M9 simulations',
+ 'dT': motiondict[dt],
+ 'name': f'{site_name}_{i}',
+ 'numSteps': len(motiondict[accel_x]),
+ 'accel_x': motiondict[accel_x],
+ 'accel_y': motiondict[accel_y],
+ 'accel_z': motiondict[accel_z],
}
else:
datatowrite = motiondict
- datatowrite['Data'] = "Time history generated using M9 simulations"
- datatowrite['name'] = f"{site_name}_{i}"
-
+ datatowrite['Data'] = 'Time history generated using M9 simulations'
+ datatowrite['name'] = f'{site_name}_{i}'
- with open(filename, 'w') as f:
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
json.dump(datatowrite, f, indent=2)
-
-
-
-if __name__ == "__main__":
+if __name__ == '__main__':
# change the directory to the directory of the current file
- os.chdir(os.path.dirname(os.path.abspath(__file__)))
+ os.chdir(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
- with open("information.json", "r") as file:
+ with open('information.json') as file: # noqa: PLW1514, PTH123
information = json.load(file)
M9(information)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/createEVENT/MPM/MPM.py b/modules/createEVENT/MPM/MPM.py
index 5184e98a5..47f0b8832 100644
--- a/modules/createEVENT/MPM/MPM.py
+++ b/modules/createEVENT/MPM/MPM.py
@@ -1,139 +1,127 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(timeSeriesArray, patternsArray, force, direction, floor, dT):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "HydroForceSeries_" + str(floor) + direction
- timeSeries = {
- "name": seriesName,
- "dT": dT,
- "type": "Value",
- "data": force
- }
-
+
+def addFloorForceToEvent( # noqa: N802
+ timeSeriesArray, # noqa: N803
+ patternsArray, # noqa: N803
+ force,
+ direction,
+ floor,
+ dT, # noqa: N803
+):
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'HydroForceSeries_' + str(floor) + direction # noqa: N806
+ timeSeries = {'name': seriesName, 'dT': dT, 'type': 'Value', 'data': force} # noqa: N806
+
timeSeriesArray.append(timeSeries)
- patternName = "HydroForcePattern_" + str(floor) + direction
+ patternName = 'HydroForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "HydroFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'HydroFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "HydroForceSeries_" + str(floor) + direction
- patternName = "HydroForcePattern_" + str(floor) + direction
+
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803, F811
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'HydroForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'HydroForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "HydroFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'HydroFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def addFloorPressure(pressureArray, floor):
- """
- Add floor pressure in the event file
- """
- floorPressure = {
- "story":str(floor),
- "pressure":[0.0, 0.0]
- }
+
+def addFloorPressure(pressureArray, floor): # noqa: N802, N803
+ """Add floor pressure in the event file""" # noqa: D400
+ floorPressure = {'story': str(floor), 'pressure': [0.0, 0.0]} # noqa: N806
pressureArray.append(floorPressure)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- timeSeriesArray = []
- patternsArray = []
- pressureArray = []
- hydroEventJson = {
- "type" : "Hydro", # Using HydroUQ
- "subtype": "MPM", # Using ClaymoreUW Material Point Method
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ timeSeriesArray = [] # noqa: N806, F841
+ patternsArray = [] # noqa: N806
+ pressureArray = [] # noqa: N806
+ hydroEventJson = { # noqa: N806
+ 'type': 'Hydro', # Using HydroUQ
+ 'subtype': 'MPM', # Using ClaymoreUW Material Point Method
# "timeSeries": [], # From GeoClawOpenFOAM
- "pattern": patternsArray,
- "pressure": pressureArray,
+ 'pattern': patternsArray,
+ 'pressure': pressureArray,
# "dT": deltaT, # From GeoClawOpenFOAM
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
# Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [hydroEventJson]}
+ eventDict = {'randomVariables': [], 'Events': [hydroEventJson]} # noqa: N806
# Adding floor forces
- for floorForces in forces:
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
# addFloorPressure(pressureArray, floor) # From GeoClawOpenFOAM
-
- with open(eventFilePath, "w", encoding='utf-8') as eventsFile:
+
+ with open(eventFilePath, 'w', encoding='utf-8') as eventsFile: # noqa: PTH123, N806
json.dump(eventDict, eventsFile)
-
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r', encoding='utf-8') as BIMFile:
+
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath, encoding='utf-8') as BIMFile: # noqa: PTH123, N806
bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using HydroUQ MPM (ClaymoreUW Material Point Method)
"""
# CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by HydroUQ MPM")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true', default=False)
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by HydroUQ MPM'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument(
+ '--getRV', help='getRV', required=False, action='store_true', default=False
+ )
# Parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
+ if arguments.getRV == True: # noqa: E712
# Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM) # Reads BIM file
+ # Reads BIM file
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
# Write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
diff --git a/modules/createEVENT/MPM/foam_file_processor.py b/modules/createEVENT/MPM/foam_file_processor.py
index 5d6e7a758..202998636 100644
--- a/modules/createEVENT/MPM/foam_file_processor.py
+++ b/modules/createEVENT/MPM/foam_file_processor.py
@@ -1,71 +1,72 @@
-# This script contains functions for reading and writing
-# OpenFoam dictionaries and filses.
+# This script contains functions for reading and writing # noqa: CPY001, D100, INP001
+# OpenFoam dictionaries and filses.
#
-import numpy as np
import os
-def find_keyword_line(dict_lines, keyword):
-
+import numpy as np
+
+
+def find_keyword_line(dict_lines, keyword): # noqa: D103
start_line = -1
-
+
count = 0
for line in dict_lines:
- l = line.lstrip(" ")
-
+ l = line.lstrip(' ') # noqa: E741
+
if l.startswith(keyword):
start_line = count
break
-
- count += 1
+
+ count += 1 # noqa: SIM113
return start_line
-
+
+
def write_foam_field(field, file_name):
- """
- Writes a given numpy two dimensional array to OpenFOAM
- field format. It can handel the following formats:
+ """Writes a given numpy two dimensional array to OpenFOAM
+ field format. It can handle the following formats:
pointField,
vectorField,
tensorField,
symmTensorField
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D400, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name, "w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- line = "\n("
+ line = '\n('
for j in range(size[1]):
- line += " {:.6e}".format(field[i,j])
- line += ")"
+ line += f' {field[i, j]:.6e}'
+ line += ')'
foam_file.write(line)
-
- foam_file.write('\n);')
+
+ foam_file.write('\n);')
foam_file.close()
+
def write_scalar_field(field, file_name):
- """
- Writes a given one dimensional numpy array to OpenFOAM
+ """Writes a given one dimensional numpy array to OpenFOAM
scalar field format.
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name,"w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- foam_file.write("\n {:.6e}".format(field.flatten()[i]))
-
- foam_file.write('\n);')
- foam_file.close()
\ No newline at end of file
+ foam_file.write(f'\n {field.flatten()[i]:.6e}')
+
+ foam_file.write('\n);')
+ foam_file.close()
diff --git a/modules/createEVENT/MPM/post_process_output.py b/modules/createEVENT/MPM/post_process_output.py
index 49b04f4d8..f11de48cf 100644
--- a/modules/createEVENT/MPM/post_process_output.py
+++ b/modules/createEVENT/MPM/post_process_output.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016-2017, The Regents of the University of California (Regents).
+# Copyright (c) 2016-2017, The Regents of the University of California (Regents). # noqa: D100, INP001
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -38,126 +37,124 @@
#
-# This script reads OpenFOAM output and plot the characteristics of the
-# approaching wind. For now, it read and plots only velocity field data and
-# pressure on predicted set of probes.
+# This script reads OpenFOAM output and plot the characteristics of the
+# approaching wind. For now, it read and plots only velocity field data and
+# pressure on predicted set of probes.
#
-import sys
-import os
-import subprocess
+import argparse
import json
-import stat
+import os
import shutil
from pathlib import Path
+
import numpy as np
-import matplotlib.pyplot as plt
-import matplotlib.gridspec as gridspec
-from scipy import signal
-from scipy.interpolate import interp1d
-from scipy.interpolate import UnivariateSpline
-from scipy import stats
-import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots
-import argparse
+from scipy import signal
+def readPressureProbes(fileName): # noqa: N802, N803
+ """Created on Wed May 16 14:31:42 2018
-def readPressureProbes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
Reads pressure probe data from OpenFOAM and return the probe location, time, and the pressure
for each time step.
-
+
@author: Abiy
- """
+ """ # noqa: D400, D401
probes = []
p = []
- time = []
-
- with open(fileName, "r") as f:
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
- probes.append([float(line[3]),float(line[4]),float(line[5])])
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+ probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.split()
+ else:
+ line = line.split() # noqa: PLW2901
time.append(float(line[0]))
p_probe_i = np.zeros([len(probes)])
- for i in range(len(probes)):
+ for i in range(len(probes)):
p_probe_i[i] = float(line[i + 1])
p.append(p_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
p = np.asarray(p, dtype=np.float32)
-
+
return probes, time, p
+
def read_pressure_data(file_names):
- """
- This functions takes names of different OpenFOAM pressure measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might show up.
+ """This functions takes names of different OpenFOAM pressure measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might show up.
Parameters
----------
- *args
- List of file pashes of pressure data to be connected together.
+ *args
+ List of file pashes of pressure data to be connected together.
Returns
-------
time, pressure
Returns the pressure time and pressure data of the connected file.
- """
- no_files = len(file_names)
- connected_time = [] # Connected array of time
+
+ """ # noqa: D205, D401, D404
+ no_files = len(file_names)
+ connected_time = [] # Connected array of time
connected_p = [] # connected array of pressure.
time1 = []
- p1 = []
+ p1 = []
time2 = []
- p2 = []
- probes= []
-
- for i in range(no_files):
- probes, time2, p2 = readPressureProbes(file_names[i])
-
- if i==0:
+ p2 = []
+ probes = []
+
+ for i in range(no_files):
+ probes, time2, p2 = readPressureProbes(file_names[i])
+
+ if i == 0:
connected_time = time2
- connected_p = p2
+ connected_p = p2
else:
try:
index = np.where(time2 > time1[-1])[0][0]
- # index += 1
+ # index += 1
- except:
- # sys.exit('Fatal Error!: the pressure filese have time gap')
- index = 0 # Joint them even if they have a time gap
+ except: # noqa: E722
+ # sys.exit('Fatal Error!: the pressure files have time gap')
+ index = 0 # Joint them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
connected_p = np.concatenate((connected_p, p2[index:]))
time1 = time2
- p1 = p2
+ p1 = p2 # noqa: F841
return probes, connected_time, connected_p
class PressureData:
- """
- A class that holds a pressure data and performs the following operations:
- - mean and rms pressure coefficients
- - peak pressure coefficients
- """
- def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
- start_time=None, end_time=None):
+ """A class that holds a pressure data and performs the following operations:
+ - mean and rms pressure coefficients
+ - peak pressure coefficients
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ u_ref=0.0,
+ rho=1.25,
+ p_ref=0.0,
+ start_time=None,
+ end_time=None,
+ ):
self.path = path
self.u_ref = u_ref
self.p_ref = p_ref
@@ -168,375 +165,369 @@ def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
self.__set_time()
self.Nt = len(self.time)
self.T = self.time[-1]
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
- self.dt = np.mean(np.diff(self.time))
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
+ self.dt = np.mean(np.diff(self.time))
self.probe_count = np.shape(self.probes)[0]
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
# print(sorted_index)
# print("\tTime directories: %s" %(time_names))
- file_names = []
-
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]],'p')
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'p') # noqa: PTH118
file_names.append(file_name)
-
+
# print(file_names)
self.probes, self.time, self.p = read_pressure_data(file_names)
- self.p = self.rho*np.transpose(self.p) # OpenFOAM gives p/rho
+ self.p = self.rho * np.transpose(self.p) # OpenFOAM gives p/rho
# self.p = np.transpose(self.p) # OpenFOAM gives p/rho
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
- def __set_time (self):
- if(self.start_time != None):
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
# self.cp = self.cp[:,start_index:]
- try:
- self.p = self.p[:,start_index:]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, start_index:]
+ except: # noqa: S110, E722
pass
-
- if(self.end_time != None):
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
# self.cp = self.cp[:,:end_index]
- try:
- self.p = self.p[:,:end_index]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, :end_index]
+ except: # noqa: S110, E722
pass
-
-
+def von_karman_spectrum(f, Uav, I, L, comp=0): # noqa: N803, E741, D103
+ psd = np.zeros(len(f)) # noqa: F841
-def von_karman_spectrum(f, Uav, I, L, comp=0):
-
- psd = np.zeros(len(f))
+ if comp == 0:
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ / np.power(1.0 + 70.8 * np.power(f * L / Uav, 2.0), 5.0 / 6.0)
+ )
- if comp==0:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)/np.power(1.0 + 70.8*np.power(f*L/ Uav, 2.0), 5.0 / 6.0)
+ if comp == 1 or comp == 2: # noqa: RET503, PLR1714, PLR2004
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ * (1.0 + 188.4 * np.power(2.0 * f * L / Uav, 2.0))
+ / np.power(1.0 + 70.8 * np.power(2.0 * f * L / Uav, 2.0), 11.0 / 6.0)
+ )
- if comp==1 or comp==2:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)*(1.0 + 188.4*np.power(2.0*f*L/Uav, 2.0)) /np.power(1.0 + 70.8*np.power(2.0*f*L/Uav, 2.0), 11.0/6.0)
def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
def write_open_foam_vector_field(p, file_name):
-
- """
- Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
- format.
-
- """
- f = open(file_name,"w+")
- f.write('%d' % len(p[:,2]))
+ """Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
+ format.
+
+ """ # noqa: D205, D401
+ f = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ f.write('%d' % len(p[:, 2]))
f.write('\n(')
- for i in range(len(p[:,2])):
- f.write('\n ({:.7e} {:.7e} {:.7e})'.format(p[i,0], p[i,1], p[i,2]))
-
- f.write('\n);')
- f.close()
+ for i in range(len(p[:, 2])):
+ f.write(f'\n ({p[i, 0]:.7e} {p[i, 1]:.7e} {p[i, 2]:.7e})')
+ f.write('\n);')
+ f.close()
-def read_openFoam_scalar_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_scalar_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ sField = [] # noqa: N806
- sField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
itrf = iter(f)
next(itrf)
for line in itrf:
- if line.startswith('(') or line.startswith(')'):
- continue
- else:
- line = line.split()
+ if line.startswith('(') or line.startswith(')'): # noqa: PIE810
+ continue
+ else: # noqa: RET507
+ line = line.split() # noqa: PLW2901
sField.append(float(line[0]))
-
- sField = np.asarray(sField, dtype=np.float32)
-
- return sField
+ sField = np.asarray(sField, dtype=np.float32) # noqa: N806
+
+ return sField # noqa: RET504
-def read_openFoam_vector_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_vector_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
- if len(line) < 3:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
+ if len(line) < 3: # noqa: PLR2004
continue
-
- vField.append([float(line[0]),float(line[1]),float(line[2])])
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField.append([float(line[0]), float(line[1]), float(line[2])])
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 9
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_symmetric_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_symmetric_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 6
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
+ return vField # noqa: RET504
def read_velocity_data(path):
- """
- This functions takes names of different OpenFOAM velocity measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might showup.
+ """This functions takes names of different OpenFOAM velocity measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might showup.
Parameters
----------
- *args
- List of file paths of velocity data to be connected together.
+ *args
+ List of file paths of velocity data to be connected together.
Returns
-------
time, pressure
Returns the velocity time and velocity data of the connected file.
- """
- num_files = len(path)
- connected_time = [] # Connected array of time
- connected_U = [] # connected array of pressure.
+ """ # noqa: D205, D401, D404
+ num_files = len(path)
+ connected_time = [] # Connected array of time
+ connected_U = [] # connected array of pressure. # noqa: N806
- time1 = []
- U1 = []
+ time1 = []
+ U1 = [] # noqa: N806
time2 = []
- U2 = []
+ U2 = [] # noqa: N806
probes = []
-
- for i in range(num_files):
- probes, time2, U2 = read_velocity_probes(path[i])
- if i != 0:
+
+ for i in range(num_files):
+ probes, time2, U2 = read_velocity_probes(path[i]) # noqa: N806
+ if i != 0:
try:
index = np.where(time2 > time1[-1])[0][0]
- except:
+ except: # noqa: E722
# sys.exit('Fatal Error!: the pressure files have time gap')
- index = 0 # Join them even if they have a time gap
+ index = 0 # Join them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
- connected_U = np.concatenate((connected_U, U2[index:]))
+ connected_U = np.concatenate((connected_U, U2[index:])) # noqa: N806
else:
connected_time = time2
- connected_U = U2
+ connected_U = U2 # noqa: N806
time1 = time2
- U1 = U2
+ U1 = U2 # noqa: N806, F841
shape = np.shape(connected_U)
- U = np.zeros((shape[1], shape[2], shape[0]))
-
+ U = np.zeros((shape[1], shape[2], shape[0])) # noqa: N806
+
for i in range(shape[1]):
for j in range(shape[2]):
- U[i,j,:] = connected_U[:,i,j]
+ U[i, j, :] = connected_U[:, i, j]
return probes, connected_time, U
-def read_velocity_probes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
- Reads velocity probe data from OpenFOAM and return the probe location, time,
+
+def read_velocity_probes(fileName): # noqa: N803
+ """Created on Wed May 16 14:31:42 2018
+
+ Reads velocity probe data from OpenFOAM and return the probe location, time,
and the velocity vector for each time step.
- """
+ """ # noqa: D400, D401
probes = []
- U = []
- time = []
-
- with open(fileName, "r") as f:
+ U = [] # noqa: N806
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ else:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
try:
time.append(float(line[0]))
- except:
+ except: # noqa: S112, E722
continue
- u_probe_i = np.zeros([len(probes),3])
- for i in range(len(probes)):
- u_probe_i[i,:] = [float(line[3*i + 1]), float(line[3*i + 2]), float(line[3*i + 3])]
+ u_probe_i = np.zeros([len(probes), 3])
+ for i in range(len(probes)):
+ u_probe_i[i, :] = [
+ float(line[3 * i + 1]),
+ float(line[3 * i + 2]),
+ float(line[3 * i + 3]),
+ ]
U.append(u_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
- U = np.asarray(U, dtype=np.float32)
+ U = np.asarray(U, dtype=np.float32) # noqa: N806
return probes, time, U
+
def calculate_length_scale(u, uav, dt, min_corr=0.0):
-
- """
- Calculates the length scale of a velocity time history given.
-
- """
-
- u = u - np.mean(u)
-
- corr = signal.correlate(u, u, mode='full')
-
- u_std = np.std(u)
-
- corr = corr[int(len(corr)/2):]/(u_std**2*len(u))
-
- loc = np.argmax(corr < min_corr)
-
- corr = corr[:loc]
-
- L = uav*np.trapz(corr, dx=dt)
-
- return L
+ """Calculates the length scale of a velocity time history given.""" # noqa: D401
+ u = u - np.mean(u) # noqa: PLR6104
+
+ corr = signal.correlate(u, u, mode='full')
+
+ u_std = np.std(u)
+
+ corr = corr[int(len(corr) / 2) :] / (u_std**2 * len(u))
+
+ loc = np.argmax(corr < min_corr)
+
+ corr = corr[:loc]
+
+ L = uav * np.trapz(corr, dx=dt) # noqa: NPY201, N806
+
+ return L # noqa: RET504
-def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+
+def psd(x, dt, nseg): # noqa: F811
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
+
class VelocityData:
- """
- A class that holds a velocity data and performs the following operations:
- - mean velocity profile
- - turbulence intensity profiles
- - integral scale of turbulence profiles
- """
- def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
- start_time=None, end_time=None, resample_dt = None):
+ """A class that holds a velocity data and performs the following operations:
+ - mean velocity profile
+ - turbulence intensity profiles
+ - integral scale of turbulence profiles
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ sampling_rate=400,
+ filter_data=False, # noqa: FBT002
+ filter_freq=400,
+ start_time=None,
+ end_time=None,
+ resample_dt=None,
+ ):
self.path = path
self.sampling_rate = sampling_rate
self.filter_data = filter_data
@@ -545,502 +536,850 @@ def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
self.end_time = end_time
self.component_count = 3
self.resample_dt = resample_dt
- self.__read_cfd_data()
+ self.__read_cfd_data()
self.__set_time()
self.Nt = len(self.time)
- self.T = self.time[-1]
- self.dt = np.mean(np.diff(self.time))
- self.f_max = 1.0/(2.0*self.dt)
+ self.T = self.time[-1]
+ self.dt = np.mean(np.diff(self.time))
+ self.f_max = 1.0 / (2.0 * self.dt)
self.probe_count = np.shape(self.probes)[0]
self.Np = self.probe_count
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
self.__filter_signal()
self.__calculate_all()
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
- file_names = []
-
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]], "U")
- file_names.append( file_name)
-
-
- self.probes, self.time, self.U = read_velocity_data(file_names)
-
- #Distance along the path of the profile
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'U') # noqa: PTH118
+ file_names.append(file_name)
+
+ self.probes, self.time, self.U = read_velocity_data(file_names)
+
+ # Distance along the path of the profile
n_points = np.shape(self.probes)[0]
self.dist = np.zeros(n_points)
- for i in range(n_points-1):
- self.dist[i + 1] = self.dist[i] + np.linalg.norm(self.probes[i + 1, :] - self.probes[i, :])
-
+ for i in range(n_points - 1):
+ self.dist[i + 1] = self.dist[i] + np.linalg.norm(
+ self.probes[i + 1, :] - self.probes[i, :]
+ )
# Coefficient of variation
- cv = np.std(np.diff(self.time))/np.mean(np.diff(self.time))
-
- if cv > 1.0e-4:
+ cv = np.std(np.diff(self.time)) / np.mean(np.diff(self.time))
+
+ if cv > 1.0e-4: # noqa: PLR2004
self.__adjust_time_step()
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
-
- def __adjust_time_step (self):
-
- if self.resample_dt == None:
- dt = np.mean(np.diff(self.time))
- else:
- dt = self.resample_dt
-
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __adjust_time_step(self):
+ if self.resample_dt == None: # noqa: E711
+ dt = np.mean(np.diff(self.time))
+ else:
+ dt = self.resample_dt
+
time = np.arange(start=self.time[0], stop=self.time[-1], step=dt)
-
+
shape = np.shape(self.U)
-
- U = np.zeros((shape[0],shape[1],len(time)))
+
+ U = np.zeros((shape[0], shape[1], len(time))) # noqa: N806
for i in range(shape[0]):
for j in range(shape[1]):
U[i, j, :] = np.interp(time, self.time, self.U[i, j, :])
-
- self.time = time
- self.U = U
-
+ self.time = time
+ self.U = U
+
def __filter_signal(self):
if self.filter_data:
- low_pass = signal.butter(10, self.filter_freq,'lowpass', fs=self.sampling_rate, output='sos')
+ low_pass = signal.butter(
+ 10, self.filter_freq, 'lowpass', fs=self.sampling_rate, output='sos'
+ )
for i in range(self.probe_count):
for j in range(self.component_count):
- self.U[i,j,:] = signal.sosfilt(low_pass, self.U[i,j,:])
+ self.U[i, j, :] = signal.sosfilt(low_pass, self.U[i, j, :])
- def __set_time (self):
- if(self.start_time != None):
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
- self.U = self.U[:,:,start_index:]
-
- if(self.end_time != None):
+ self.U = self.U[:, :, start_index:]
+
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
- self.U = self.U[:,:,:end_index]
+ self.U = self.U[:, :, :end_index]
def __calculate_all(self):
-
self.u = np.zeros((self.probe_count, self.component_count, self.Nt))
- #Calculate the mean velocity profile.
+ # Calculate the mean velocity profile.
- self.Uav = np.mean(self.U[:,0,:], axis=1)
+ self.Uav = np.mean(self.U[:, 0, :], axis=1)
+
+ # Calculate the turbulence intensity.
+ self.I = np.std(self.U, axis=2) # gets the standard deviation
+ self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
+ self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
+ self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
- #Calculate the turbulence intensity.
- self.I = np.std(self.U, axis=2) # gets the standard deviation
- self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
- self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
- self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
for i in range(self.component_count):
- self.I[:,i] = self.I[:,i]/self.Uav
-
-
- #Calculate the length scale profiles.
+ self.I[:, i] = self.I[:, i] / self.Uav # noqa: PLR6104
+
+ # Calculate the length scale profiles.
self.L = np.zeros((self.probe_count, self.component_count))
for i in range(self.probe_count):
for j in range(self.component_count):
- self.u[i,j,:] = self.U[i,j,:] - np.mean(self.U[i,j,:])
- self.L[i,j] = calculate_length_scale(self.u[i,j,:], self.Uav[i], self.dt, 0.05)
+ self.u[i, j, :] = self.U[i, j, :] - np.mean(self.U[i, j, :])
+ self.L[i, j] = calculate_length_scale(
+ self.u[i, j, :], self.Uav[i], self.dt, 0.05
+ )
-
- #Calculate the shear stress profiles.
+ # Calculate the shear stress profiles.
self.uv_bar = np.zeros(self.Np)
self.uw_bar = np.zeros(self.Np)
-
+
for i in range(self.Np):
- self.uv_bar[i] = np.cov(self.U[i,0,:], self.U[i,1,:])[0,1]
- self.uw_bar[i] = np.cov(self.U[i,0,:], self.U[i,2,:])[0,1]
+ self.uv_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 1, :])[0, 1]
+ self.uw_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 2, :])[0, 1]
+
+ def get_Uav(self, z): # noqa: N802, D102
+ from scipy import interpolate # noqa: PLC0415
- def get_Uav(self, z):
- from scipy import interpolate
-
f = interpolate.interp1d(self.z, self.Uav)
-
- return f(z)
+ return f(z)
def copy_vtk_planes_and_order(input_path, output_path, field):
- """
- This code reads VTK sample plane data from OpenFOAM case directory and
- copies them into other directory with all vtks files ordered in their
- respective time sequence in one directory.
-
+ """This code reads VTK sample plane data from OpenFOAM case directory and
+ copies them into other directory with all vtks files ordered in their
+ respective time sequence in one directory.
+
input_path: path of the vtk files in the postProcessing directory
ouput_path: path to write the vtk files in order
- """
+ """ # noqa: D205, D401, D404
+ if not os.path.isdir(input_path): # noqa: PTH112
+ print(f'Cannot find the path for: {input_path}') # noqa: T201
+ return
- if not os.path.isdir(input_path):
- print("Cannot find the path for: {}".format(input_path))
- return
-
- if not os.path.isdir(output_path):
- print("Cannot find the path for: {}".format(output_path))
- return
-
-
- print("Reading from path: {}".format(input_path))
+ if not os.path.isdir(output_path): # noqa: PTH112
+ print(f'Cannot find the path for: {output_path}') # noqa: T201
+ return
+
+ print(f'Reading from path: {input_path}') # noqa: T201
time_names = os.listdir(input_path)
- times = np.float_(time_names)
+ times = np.float64(time_names)
sorted_index = np.argsort(times).tolist()
-
- n_times = len(times)
-
- print("\tNumber of time direcories: {} ".format(n_times))
- print("\tTime step: {:.4f} s".format(np.mean(np.diff(times))))
- print("\tTotal duration: {:.4f} s".format(times[sorted_index[-1]] - times[sorted_index[0]]))
-
-
+
+ n_times = len(times)
+
+ print(f'\tNumber of time directories: {n_times} ') # noqa: T201
+ print(f'\tTime step: {np.mean(np.diff(times)):.4f} s') # noqa: T201
+ print( # noqa: T201
+ f'\tTotal duration: {times[sorted_index[-1]] - times[sorted_index[0]]:.4f} s'
+ )
+
for i in range(n_times):
index = sorted_index[i]
- pathi = os.path.join(input_path, time_names[index])
+ pathi = os.path.join(input_path, time_names[index]) # noqa: PTH118
os.listdir(pathi)
-
- new_name = "{}_T{:04d}.vtk".format(field, i + 1)
+
+ new_name = f'{field}_T{i + 1:04d}.vtk'
for f in os.listdir(pathi):
- if f.endswith(".vtk"):
- new_path = os.path.join(output_path, new_name)
- old_path = os.path.join(pathi, f)
+ if f.endswith('.vtk'):
+ new_path = os.path.join(output_path, new_name) # noqa: PTH118
+ old_path = os.path.join(pathi, f) # noqa: PTH118
shutil.copyfile(old_path, new_path)
- print("Copied path: {}".format(old_path))
+ print(f'Copied path: {old_path}') # noqa: T201
+
-def plot_wind_profiles_and_spectra(case_path, output_path, prof_name):
-
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
+def plot_wind_profiles_and_spectra(case_path, output_path, prof_name): # noqa: D103
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
-
+
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
- ref_h = wc_data["referenceHeight"]
-
+ wc_data = json_data['windCharacteristics']
+
+ ref_h = wc_data['referenceHeight']
+
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
prof = VelocityData(prof_path, start_time=None, end_time=None)
-
- #Create wind profile data profile z, Uav, Iu ..., Lu ...,
+ # Create wind profile data profile z, Uav, Iu ..., Lu ...,
prof_np = np.zeros((len(prof.z), 9))
- prof_np[:,0] = prof.z
- prof_np[:,1] = prof.Uav
- prof_np[:,2] = prof.I[:,0]
- prof_np[:,3] = prof.I[:,1]
- prof_np[:,4] = prof.I[:,2]
- prof_np[:,5] = prof.uw_bar
- prof_np[:,6] = prof.L[:,0]
- prof_np[:,7] = prof.L[:,1]
- prof_np[:,8] = prof.L[:,2]
-
-
-
- #Read the target wind profile data
- tar_path = os.path.join(case_path, "constant", "boundaryData", "inlet")
-
- tar_p = read_openFoam_vector_field(os.path.join(tar_path, "points"))
- tar_U = read_openFoam_scalar_field(os.path.join(tar_path, "U"))
- tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, "R"))
- tar_L = read_openFoam_tensor_field(os.path.join(tar_path, "L"))
-
- tar_U_ref = np.interp(ref_h, tar_p[:,2], tar_U)
-
-
- tar_Iu = np.sqrt(tar_R[:, 0])/tar_U
- tar_Iv = np.sqrt(tar_R[:, 3])/tar_U
- tar_Iw = np.sqrt(tar_R[:, 5])/tar_U
+ prof_np[:, 0] = prof.z
+ prof_np[:, 1] = prof.Uav
+ prof_np[:, 2] = prof.I[:, 0]
+ prof_np[:, 3] = prof.I[:, 1]
+ prof_np[:, 4] = prof.I[:, 2]
+ prof_np[:, 5] = prof.uw_bar
+ prof_np[:, 6] = prof.L[:, 0]
+ prof_np[:, 7] = prof.L[:, 1]
+ prof_np[:, 8] = prof.L[:, 2]
+
+ # Read the target wind profile data
+ tar_path = os.path.join(case_path, 'constant', 'boundaryData', 'inlet') # noqa: PTH118
+
+ tar_p = read_openFoam_vector_field(os.path.join(tar_path, 'points')) # noqa: PTH118
+ tar_U = read_openFoam_scalar_field(os.path.join(tar_path, 'U')) # noqa: PTH118, N806
+ tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, 'R')) # noqa: PTH118, N806
+ tar_L = read_openFoam_tensor_field(os.path.join(tar_path, 'L')) # noqa: PTH118, N806
+
+ tar_U_ref = np.interp(ref_h, tar_p[:, 2], tar_U) # noqa: N806, F841
+
+ tar_Iu = np.sqrt(tar_R[:, 0]) / tar_U # noqa: N806
+ tar_Iv = np.sqrt(tar_R[:, 3]) / tar_U # noqa: N806
+ tar_Iw = np.sqrt(tar_R[:, 5]) / tar_U # noqa: N806
tar_uw = tar_R[:, 2]
-
- tar_Lu = tar_L[:, 0]
- tar_Lv = tar_L[:, 3]
- tar_Lw = tar_L[:, 6]
-
- tar_I = np.zeros((3, len(tar_Iu)))
- tar_L = np.zeros((3, len(tar_Lu)))
-
- tar_I[0,:] = tar_Iu
- tar_I[1,:] = tar_Iv
- tar_I[2,:] = tar_Iw
-
- tar_L[0,:] = tar_Lu
- tar_L[1,:] = tar_Lv
- tar_L[2,:] = tar_Lw
-
-
- subplot_titles = ("Mean Velocity", "Turbulence Intensity, Iu", "Turbulence Intensity, Iv", "Turbulence Intensity, Iw",
- "Shear Stress", "Length Scale, Lu", "Length Scale, Lv", "Length Scale, Lw")
-
- fig = make_subplots(rows=2, cols=4, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
-
- fig.add_trace(go.Scatter(x=tar_U, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,1], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="$U_{av} [m/s]$", range=[0, 1.25*np.max(prof_np[:,1])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+
+ tar_Lu = tar_L[:, 0] # noqa: N806
+ tar_Lv = tar_L[:, 3] # noqa: N806
+ tar_Lw = tar_L[:, 6] # noqa: N806
+
+ tar_I = np.zeros((3, len(tar_Iu))) # noqa: N806
+ tar_L = np.zeros((3, len(tar_Lu))) # noqa: N806
+
+ tar_I[0, :] = tar_Iu
+ tar_I[1, :] = tar_Iv
+ tar_I[2, :] = tar_Iw
+
+ tar_L[0, :] = tar_Lu
+ tar_L[1, :] = tar_Lv
+ tar_L[2, :] = tar_Lw
+
+ subplot_titles = (
+ 'Mean Velocity',
+ 'Turbulence Intensity, Iu',
+ 'Turbulence Intensity, Iv',
+ 'Turbulence Intensity, Iw',
+ 'Shear Stress',
+ 'Length Scale, Lu',
+ 'Length Scale, Lv',
+ 'Length Scale, Lw',
+ )
+
+ fig = make_subplots(
+ rows=2,
+ cols=4,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ fig.add_trace(
+ go.Scatter(
+ x=tar_U,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 1],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='$U_{av} [m/s]$',
+ range=[0, 1.25 * np.max(prof_np[:, 1])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
# Turbulence Intensity Iu
- fig.add_trace(go.Scatter(x=tar_Iu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,2], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=2)
- fig.update_xaxes(title_text="$I_{u}$", range=[0, 1.3*np.max(prof_np[:,2])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 2],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$I_{u}$',
+ range=[0, 1.3 * np.max(prof_np[:, 2])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
# Turbulence Intensity Iv
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,3], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=3)
- fig.update_xaxes(title_text="$I_{v}$", range=[0, 1.3*np.max(prof_np[:,3])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 3],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$I_{v}$',
+ range=[0, 1.3 * np.max(prof_np[:, 3])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
# Turbulence Intensity Iw
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,4], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=4)
- fig.update_xaxes(title_text="$I_{w}$", range=[0, 1.3*np.max(prof_np[:,4])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
-
-
- # Shear Stress Profile
- fig.add_trace(go.Scatter(x=tar_uw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,5], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=1)
- fig.update_xaxes(title_text=r'$\overline{uw}$', range=[1.3*np.min(prof_np[:,5]), 1.5*np.max(prof_np[:,5])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 4],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$I_{w}$',
+ range=[0, 1.3 * np.max(prof_np[:, 4])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+
+ # Shear Stress Profile
+ fig.add_trace(
+ go.Scatter(
+ x=tar_uw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 5],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=1,
+ )
+ fig.update_xaxes(
+ title_text=r'$\overline{uw}$',
+ range=[1.3 * np.min(prof_np[:, 5]), 1.5 * np.max(prof_np[:, 5])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
# Length scale Lu
- fig.add_trace(go.Scatter(x=tar_Lu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,6], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=2)
- fig.update_xaxes(title_text="$L_{u} [m]$", range=[0, 1.5*np.max(prof_np[:,6])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 6],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$L_{u} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 6])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
# Length scale Lv
- fig.add_trace(go.Scatter(x=tar_Lv, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,7], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=3)
- fig.update_xaxes(title_text="$L_{v} [m]$", range=[0, 1.5*np.max(prof_np[:,7])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lv,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 7],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$L_{v} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 7])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
# Length scale Lw
- fig.add_trace(go.Scatter(x=tar_Lw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,8], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=4)
- fig.update_xaxes(title_text="$L_{w} [m]$", range=[0, 1.5*np.max(prof_np[:,8])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
-
-
- fig.update_layout(height=850, width=1200, title_text="",showlegend=False)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 8],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$L_{w} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 8])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+
+ fig.update_layout(height=850, width=1200, title_text='', showlegend=False)
fig.show()
- fig.write_html(os.path.join(output_path, prof_name + ".html"), include_mathjax="cdn")
-
+ fig.write_html(
+ os.path.join(output_path, prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
+ # Plot the spectra at four locations
- #Plot the spectra at four locations
-
- spec_h = ref_h*np.array([0.25, 0.50, 1.00, 2.00])
+ spec_h = ref_h * np.array([0.25, 0.50, 1.00, 2.00])
n_spec = len(spec_h)
nseg = 5
ncomp = 3
- ylabel = ['$fS_{u}/\sigma^2_{u}$',
- '$fS_{v}/\sigma^2_{v}$',
- '$fS_{w}/\sigma^2_{w}$']
-
+ ylabel = [
+ r'$fS_{u}/\sigma^2_{u}$',
+ r'$fS_{v}/\sigma^2_{v}$',
+ r'$fS_{w}/\sigma^2_{w}$',
+ ]
for i in range(n_spec):
- loc = np.argmin(np.abs(prof_np[:,0] - spec_h[i]))
-
- loc_tar = np.argmin(np.abs(tar_p[:,2] - spec_h[i]))
-
- subplot_titles = ("u-component", "v-component", "w-component")
- fig = make_subplots(rows=1, cols=3, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
- U_ref_prof = np.interp(spec_h[i], prof_np[:,0], prof_np[:,1])
- U_ref_tar = np.interp(spec_h[i], tar_p[:,2], tar_U)
-
- #Plot each component
+ loc = np.argmin(np.abs(prof_np[:, 0] - spec_h[i]))
+
+ loc_tar = np.argmin(np.abs(tar_p[:, 2] - spec_h[i]))
+
+ subplot_titles = ('u-component', 'v-component', 'w-component')
+ fig = make_subplots(
+ rows=1,
+ cols=3,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ U_ref_prof = np.interp(spec_h[i], prof_np[:, 0], prof_np[:, 1]) # noqa: N806
+ U_ref_tar = np.interp(spec_h[i], tar_p[:, 2], tar_U) # noqa: N806
+
+ # Plot each component
for j in range(ncomp):
- freq, spec = psd(prof.u[loc, j,:], prof.dt, nseg)
-
- f_min = np.min(freq)/1.5
- f_max = 1.5*np.max(freq)
-
- u_var = np.var(prof.u[loc, j,:])
-
- spec = freq*spec/u_var
- freq = freq*spec_h[i]/U_ref_prof
-
-
- tar_Iz = tar_I[j,loc_tar]
- tar_Lz = tar_L[j,loc_tar]
-
-
- vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
- vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
-
- vonk_psd = vonk_f*vonk_psd/np.square(U_ref_tar*tar_Iz)
- vonk_f = vonk_f*spec_h[i]/U_ref_tar
-
-
- fig.add_trace(go.Scatter(x=freq, y=spec, line=dict(color='firebrick', width=1.5),
- mode='lines', name=prof_name, ), row=1, col=1+j)
- fig.add_trace(go.Scatter(x=vonk_f, y=vonk_psd, line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target(von Karman)', ), row=1, col=1+j)
- fig.update_xaxes(type="log", title_text="$fz/U$",
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
- fig.update_yaxes(type="log", title_text=ylabel[j], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
-
- fig.update_layout(height=450, width=1500, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "spectra_" + prof_name + "_H" + str(1 + i) + ".html"), include_mathjax="cdn")
-
+ freq, spec = psd(prof.u[loc, j, :], prof.dt, nseg)
+
+ f_min = np.min(freq) / 1.5
+ f_max = 1.5 * np.max(freq)
+
+ u_var = np.var(prof.u[loc, j, :])
+ spec = freq * spec / u_var
+ freq = freq * spec_h[i] / U_ref_prof
+ tar_Iz = tar_I[j, loc_tar] # noqa: N806
+ tar_Lz = tar_L[j, loc_tar] # noqa: N806
-def plot_pressure_profile(case_path, output_path, prof_name):
+ vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
+ vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
+ vonk_psd = vonk_f * vonk_psd / np.square(U_ref_tar * tar_Iz)
+ vonk_f = vonk_f * spec_h[i] / U_ref_tar
+
+ fig.add_trace(
+ go.Scatter(
+ x=freq,
+ y=spec,
+ line=dict(color='firebrick', width=1.5), # noqa: C408
+ mode='lines',
+ name=prof_name,
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=vonk_f,
+ y=vonk_psd,
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target(von Karman)',
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.update_xaxes(
+ type='log',
+ title_text='$fz/U$',
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+ fig.update_yaxes(
+ type='log',
+ title_text=ylabel[j],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+
+ fig.update_layout(height=450, width=1500, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join( # noqa: PTH118
+ output_path, 'spectra_' + prof_name + '_H' + str(1 + i) + '.html'
+ ),
+ include_mathjax='cdn',
+ )
- prof = PressureData(prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0)
+def plot_pressure_profile(case_path, output_path, prof_name): # noqa: D103
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
+
+ prof = PressureData(
+ prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0
+ )
std_p = np.std(prof.p, axis=1)
-
-
- subplot_titles = ("Pressure Fluctuation",)
-
- fig = make_subplots(rows=1, cols=1, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
+ subplot_titles = ('Pressure Fluctuation',)
+
+ fig = make_subplots(
+ rows=1,
+ cols=1,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
# Plot pressure fluctuation Velocity
- fig.add_trace(go.Scatter(x=prof.x-np.min(prof.x), y=std_p, line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="Distance from inlet (x) [m]", range=[np.min(prof.x-np.min(prof.x)), np.max(prof.x-np.min(prof.x))],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text=r"Pressure R.M.S", range=[0, 1.15*np.max(std_p)], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=prof.x - np.min(prof.x),
+ y=std_p,
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='Distance from inlet (x) [m]',
+ range=[np.min(prof.x - np.min(prof.x)), np.max(prof.x - np.min(prof.x))],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text=r'Pressure R.M.S',
+ range=[0, 1.15 * np.max(std_p)],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+
+ fig.update_layout(height=400, width=800, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join(output_path, 'pressure_' + prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
- fig.update_layout(height=400, width=800, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "pressure_" + prof_name + ".html"), include_mathjax="cdn")
-
-if __name__ == '__main__':
+if __name__ == '__main__':
""""
Entry point to read the simulation results from OpenFOAM case and post-process it.
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get EVENT file from OpenFOAM output")
- parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True)
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get EVENT file from OpenFOAM output'
+ )
+ parser.add_argument(
+ '-c', '--case', help='OpenFOAM case directory', required=True
+ )
arguments, unknowns = parser.parse_known_args()
-
- case_path = arguments.case
-
- print("Case full path: ", case_path)
+ case_path = arguments.case
+
+ print('Case full path: ', case_path) # noqa: T201
# prof_name = sys.argv[2]
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
- json_data = json.load(json_file)
-
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
-
+ rm_data = json_data['resultMonitoring']
+
wind_profiles = rm_data['windProfiles']
vtk_planes = rm_data['vtkPlanes']
-
- prof_output_path = os.path.join(case_path, "constant", "simCenter", "output", "windProfiles")
- #Check if it exists and remove files
- if os.path.exists(prof_output_path):
+ prof_output_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'output', 'windProfiles'
+ )
+
+ # Check if it exists and remove files
+ if os.path.exists(prof_output_path): # noqa: PTH110
shutil.rmtree(prof_output_path)
-
- #Create new path
+
+ # Create new path
Path(prof_output_path).mkdir(parents=True, exist_ok=True)
-
- #Plot velocity and pressure profiles
+
+ # Plot velocity and pressure profiles
for prof in wind_profiles:
- name = prof["name"]
- field = prof["field"]
- print(name)
- print(field)
-
- if field=="Velocity":
+ name = prof['name']
+ field = prof['field']
+ print(name) # noqa: T201
+ print(field) # noqa: T201
+
+ if field == 'Velocity':
plot_wind_profiles_and_spectra(case_path, prof_output_path, name)
-
- if field=="Pressure":
+
+ if field == 'Pressure':
plot_pressure_profile(case_path, prof_output_path, name)
-
-
+
# Copy the VTK files renamed
for pln in vtk_planes:
- name = pln["name"]
- field = pln["field"]
-
- vtk_path = os.path.join(case_path, "postProcessing", name)
- vtk_path_renamed = os.path.join(case_path, "postProcessing", name + "_renamed")
+ name = pln['name']
+ field = pln['field']
+
+ vtk_path = os.path.join(case_path, 'postProcessing', name) # noqa: PTH118
+ vtk_path_renamed = os.path.join( # noqa: PTH118
+ case_path, 'postProcessing', name + '_renamed'
+ )
Path(vtk_path_renamed).mkdir(parents=True, exist_ok=True)
copy_vtk_planes_and_order(vtk_path, vtk_path_renamed, field)
-
- #Check if it exists and remove files
- if os.path.exists(vtk_path):
- shutil.rmtree(vtk_path)
\ No newline at end of file
+
+ # Check if it exists and remove files
+ if os.path.exists(vtk_path): # noqa: PTH110
+ shutil.rmtree(vtk_path)
diff --git a/modules/createEVENT/MPM/post_process_sensors.py b/modules/createEVENT/MPM/post_process_sensors.py
index 577a64bbb..ccba2f779 100644
--- a/modules/createEVENT/MPM/post_process_sensors.py
+++ b/modules/createEVENT/MPM/post_process_sensors.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016-2017, The Regents of the University of California (Regents).
+# Copyright (c) 2016-2017, The Regents of the University of California (Regents). # noqa: INP001
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -42,38 +41,42 @@
# Plots are saved to a specified directory.
#
-"""
-Entry point to read the simulation results from MPM case and post-processes it.
+"""Entry point to read the simulation results from MPM case and post-processes it."""
-"""
-import numpy as np
-import pandas as pd
-import matplotlib.pyplot as plt
-import os
+import os
import sys
-import argparse
+
+import matplotlib.pyplot as plt
+import pandas as pd
+
# import json
# from pathlib import Path
# import plotly.graph_objects as go
# from plotly.subplots import make_subplots
-if __name__ == '__main__':
-
- #CLI parser
+if __name__ == '__main__':
+ # CLI parser
input_args = sys.argv[1:]
- print("post_process_sensors.py - Backend-script post_process_sensors.py reached main. Starting...")
- print("post_process_sensors.py - Backend-script post_process_sensors.py running: " + str(sys.argv[0]))
- print("post_process_sensors.py - Backend-script post_process_sensors.py recieved input args: " + str(input_args))
+ print( # noqa: T201
+ 'post_process_sensors.py - Backend-script post_process_sensors.py reached main. Starting...'
+ )
+ print( # noqa: T201
+ 'post_process_sensors.py - Backend-script post_process_sensors.py running: '
+ + str(sys.argv[0])
+ )
+ print( # noqa: T201
+ 'post_process_sensors.py - Backend-script post_process_sensors.py received input args: '
+ + str(input_args)
+ )
# parser = argparse.ArgumentParser(description="Get sensor measurements from output, process them, plot them, and then save the figures.")
# parser.add_argument('-i', '--input_directory', help="Sensor Measurement Input Directory", required=True)
# parser.add_argument('-o', '--output_directory', help="Sensor Plot Output Directory", required=True)
# parser.add_argument('-f', '--files', help="Sensor Measurement Files", required=True)
# arguments, unknowns = parser.parse_known_args()
- # print("post_process_sensors.py - Backend-script post_process_sensors.py recieved: " + str(arguments))
- # print("post_process_sensors.py - Backend-script post_process_sensors.py recieved: " + str(unknowns))
-
+ # print("post_process_sensors.py - Backend-script post_process_sensors.py received: " + str(arguments))
+ # print("post_process_sensors.py - Backend-script post_process_sensors.py received: " + str(unknowns))
# Get the directory of the sensor data
# Get the directory to save the plots
@@ -84,66 +87,106 @@
# sensor_data_dir = arguments.input_directory
# output_dir = arguments.output_directory
- # sensor_files = (arguments.files).split(',')
- print("Sensor data directory: ", sensor_data_dir)
- print("Output directory: ", output_dir)
- print("Sensor files: ", sensor_files)
+ # sensor_files = (arguments.files).split(',')
+ print('Sensor data directory: ', sensor_data_dir) # noqa: T201
+ print('Output directory: ', output_dir) # noqa: T201
+ print('Sensor files: ', sensor_files) # noqa: T201
# json_path = os.path.join(case_path, "constant", "simCenter", "input", "MPM.json")
# with open(json_path) as json_file:
# json_data = json.load(json_file)
- # print("Backend-script post_process_sensors.py recieved: " + sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] "")
-
+ # print("Backend-script post_process_sensors.py received: " + sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] "")
+
# Get the list of sensor names
- sensor_names = [(sensor_file.split('.')[0]).lstrip('/').strip() for sensor_file in sensor_files]
+ sensor_names = [
+ (sensor_file.split('.')[0]).lstrip('/').strip()
+ for sensor_file in sensor_files
+ ]
# Load the sensor data
sensor_data = {}
for sensor_file in sensor_files:
# Remove any leading '/' from the sensor file
- sensor_file = sensor_file.lstrip('/')
- sensor_file = sensor_file.strip() # Remove whitespace from the sensor file
- sensor_file = sensor_file.split('.') # Split the sensor file by the '.' character
- if (sensor_file[-1] != 'csv'):
- print("Error: Sensor file is not a csv file. Please provide a csv file. Will skip this file: " + sensor_file[0] + '.' + sensor_file[-1])
+ sensor_file = sensor_file.lstrip('/') # noqa: PLW2901
+ # Remove whitespace from the sensor file
+ sensor_file = sensor_file.strip() # noqa: PLW2901
+ sensor_file = sensor_file.split( # noqa: PLW2901
+ '.'
+ ) # Split the sensor file by the '.' character
+ if sensor_file[-1] != 'csv':
+ print( # noqa: T201
+ 'Error: Sensor file is not a csv file. Please provide a csv file. Will skip this file: '
+ + sensor_file[0]
+ + '.'
+ + sensor_file[-1]
+ )
continue
- sensor_file = sensor_file[0] # Get the first part of the sensor file, which is the sensor name
- sensor_data[sensor_file] = pd.read_csv(os.path.join(sensor_data_dir, sensor_file + '.csv'), header=None, skiprows=1, delimiter=',', usecols=[0, 1])
+ sensor_file = sensor_file[ # noqa: PLW2901
+ 0
+ ] # Get the first part of the sensor file, which is the sensor name
+ sensor_data[sensor_file] = pd.read_csv(
+ os.path.join(sensor_data_dir, sensor_file + '.csv'), # noqa: PTH118
+ header=None,
+ skiprows=1,
+ delimiter=',',
+ usecols=[0, 1],
+ )
# Assume that the header is row 0, and that the time is in the first column, and the value is in the second column
sensor_data[sensor_file].columns = ['time', 'value']
-
- please_convert_to_date_time = False # May want to use this later, as wave-flumes tend to report time in date-time formats
- if (please_convert_to_date_time == True and sensor_data[sensor_file]['time'].dtype != 'datetime64[ns]'):
- sensor_data[sensor_file]['time'] = pd.to_datetime(sensor_data[sensor_file]['time'])
-
- # Make sure the output directory exists, and save the sensor raw data to the output directory if they aren't already there
- if not os.path.exists(output_dir):
- print("Output directory not found... Creating output directory: " + output_dir + "...")
- os.makedirs(output_dir)
- if (output_dir != sensor_data_dir):
+
+ please_convert_to_date_time = False # May want to use this later, as wave-flumes tend to report time in date-time formats
+ if (
+ please_convert_to_date_time == True # noqa: E712
+ and sensor_data[sensor_file]['time'].dtype != 'datetime64[ns]'
+ ):
+ sensor_data[sensor_file]['time'] = pd.to_datetime(
+ sensor_data[sensor_file]['time']
+ )
+
+ # Make sure the output directory exists, and save the sensor raw data to the output directory if they aren't already there
+ if not os.path.exists(output_dir): # noqa: PTH110
+ print( # noqa: T201
+ 'Output directory not found... Creating output directory: '
+ + output_dir
+ + '...'
+ )
+ os.makedirs(output_dir) # noqa: PTH103
+ if output_dir != sensor_data_dir:
for sensor_name in sensor_names:
- print("Save " + os.path.join(output_dir, sensor_name) + '.csv' + "...")
- sensor_data[sensor_name].to_csv(os.path.join(output_dir, sensor_name + '.csv'), index=False)
-
+ print('Save ' + os.path.join(output_dir, sensor_name) + '.csv' + '...') # noqa: T201, PTH118
+ sensor_data[sensor_name].to_csv(
+ os.path.join(output_dir, sensor_name + '.csv'), # noqa: PTH118
+ index=False,
+ )
+
# Plot the sensor data, and save the plots to the output directory (html and png files)
for sensor_name in sensor_names:
- print("Plotting " + sensor_name + "...")
+ print('Plotting ' + sensor_name + '...') # noqa: T201
fig, axes = plt.subplots(1, 1)
sensor_name_png = sensor_name + '.png'
sensor_name_html = sensor_name + '.webp'
# axes.set_title(sensor_name)
- axes.plot(sensor_data[sensor_name]['time'], sensor_data[sensor_name]['value'])
+ axes.plot(
+ sensor_data[sensor_name]['time'], sensor_data[sensor_name]['value']
+ )
axes.set_xlabel('Time [s]')
axes.set_ylabel('Sensor Measurement')
- print("Save " + os.path.join(output_dir, sensor_name_png) + "...")
- plt.savefig(os.path.join(output_dir, sensor_name_png), dpi=300, bbox_inches='tight') # save the plot as a png file
- print("Save " + os.path.join(output_dir, sensor_name_html) + "...")
- plt.savefig(os.path.join(output_dir, sensor_name_html), dpi=300, bbox_inches='tight') # save the plot as an html file
- plt.show()
- plt.close()
-
- print("post_process_sensors.py - Backend-script post_process_sensors.py reached end of main. Finished.")
-
-
-
\ No newline at end of file
+ print('Save ' + os.path.join(output_dir, sensor_name_png) + '...') # noqa: T201, PTH118
+ plt.savefig(
+ os.path.join(output_dir, sensor_name_png), # noqa: PTH118
+ dpi=300,
+ bbox_inches='tight',
+ ) # save the plot as a png file
+ print('Save ' + os.path.join(output_dir, sensor_name_html) + '...') # noqa: T201, PTH118
+ plt.savefig(
+ os.path.join(output_dir, sensor_name_html), # noqa: PTH118
+ dpi=300,
+ bbox_inches='tight',
+ ) # save the plot as an html file
+ plt.show()
+ plt.close()
+
+ print( # noqa: T201
+ 'post_process_sensors.py - Backend-script post_process_sensors.py reached end of main. Finished.'
+ )
diff --git a/modules/createEVENT/MPM/setup_case.py b/modules/createEVENT/MPM/setup_case.py
index 2517acdf3..a3b803b75 100644
--- a/modules/createEVENT/MPM/setup_case.py
+++ b/modules/createEVENT/MPM/setup_case.py
@@ -1,1090 +1,1059 @@
-"""
-This script writes BC and initial condition, and setups the OpenFoam case
+"""This script writes BC and initial condition, and setups the OpenFoam case
directory.
-"""
-import numpy as np
-import sys
-import os
+""" # noqa: CPY001, D205, D404, INP001
+
import json
-import numpy as np
+import os
+import sys
+
import foam_file_processor as foam
-from stl import mesh
+import numpy as np
-def write_block_mesh_dict(input_json_path, template_dict_path, case_path):
+def write_block_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["blockMeshParameters"]
+ mesh_data = json_data['blockMeshParameters']
geom_data = json_data['GeometricData']
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
origin = np.array(geom_data['origin'])
- scale = geom_data['geometricScale']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale'] # noqa: F841
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_cells = mesh_data['xNumCells']
y_cells = mesh_data['yNumCells']
z_cells = mesh_data['zNumCells']
-
+
x_grading = mesh_data['xGrading']
y_grading = mesh_data['yGrading']
z_grading = mesh_data['zGrading']
-
- bc_map = {"slip": 'wall', "cyclic": 'cyclic', "noSlip": 'wall',
- "symmetry": 'symmetry', "empty": 'empty', "TInf": 'patch',
- "MeanABL": 'patch', "Uniform": 'patch', "zeroPressureOutlet": 'patch',
- "roughWallFunction": 'wall',"smoothWallFunction": 'wall'}
+ bc_map = {
+ 'slip': 'wall',
+ 'cyclic': 'cyclic',
+ 'noSlip': 'wall',
+ 'symmetry': 'symmetry',
+ 'empty': 'empty',
+ 'TInf': 'patch',
+ 'MeanABL': 'patch',
+ 'Uniform': 'patch',
+ 'zeroPressureOutlet': 'patch',
+ 'roughWallFunction': 'wall',
+ 'smoothWallFunction': 'wall',
+ }
inlet_type = bc_map[boundary_data['inletBoundaryCondition']]
outlet_type = bc_map[boundary_data['outletBoundaryCondition']]
- ground_type = bc_map[boundary_data['groundBoundaryCondition']]
+ ground_type = bc_map[boundary_data['groundBoundaryCondition']]
top_type = bc_map[boundary_data['topBoundaryCondition']]
front_type = bc_map[boundary_data['sidesBoundaryCondition']]
back_type = bc_map[boundary_data['sidesBoundaryCondition']]
length_unit = json_data['lengthUnit']
-
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
x_max = x_min + Lx
y_max = y_min + Ly
z_max = z_min + Lz
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/blockMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/blockMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- dict_lines[17] = "\txMin\t\t{:.4f};\n".format(x_min)
- dict_lines[18] = "\tyMin\t\t{:.4f};\n".format(y_min)
- dict_lines[19] = "\tzMin\t\t{:.4f};\n".format(z_min)
- dict_lines[20] = "\txMax\t\t{:.4f};\n".format(x_max)
- dict_lines[21] = "\tyMax\t\t{:.4f};\n".format(y_max)
- dict_lines[22] = "\tzMax\t\t{:.4f};\n".format(z_max)
+ dict_lines[17] = f'\txMin\t\t{x_min:.4f};\n'
+ dict_lines[18] = f'\tyMin\t\t{y_min:.4f};\n'
+ dict_lines[19] = f'\tzMin\t\t{z_min:.4f};\n'
+ dict_lines[20] = f'\txMax\t\t{x_max:.4f};\n'
+ dict_lines[21] = f'\tyMax\t\t{y_max:.4f};\n'
+ dict_lines[22] = f'\tzMax\t\t{z_max:.4f};\n'
- dict_lines[23] = "\txCells\t\t{:d};\n".format(x_cells)
- dict_lines[24] = "\tyCells\t\t{:d};\n".format(y_cells)
- dict_lines[25] = "\tzCells\t\t{:d};\n".format(z_cells)
+ dict_lines[23] = f'\txCells\t\t{x_cells:d};\n'
+ dict_lines[24] = f'\tyCells\t\t{y_cells:d};\n'
+ dict_lines[25] = f'\tzCells\t\t{z_cells:d};\n'
- dict_lines[26] = "\txGrading\t{:.4f};\n".format(x_grading)
- dict_lines[27] = "\tyGrading\t{:.4f};\n".format(y_grading)
- dict_lines[28] = "\tzGrading\t{:.4f};\n".format(z_grading)
+ dict_lines[26] = f'\txGrading\t{x_grading:.4f};\n'
+ dict_lines[27] = f'\tyGrading\t{y_grading:.4f};\n'
+ dict_lines[28] = f'\tzGrading\t{z_grading:.4f};\n'
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
- dict_lines[31] = "convertToMeters {:.4f};\n".format(convert_to_meters)
- dict_lines[61] = " type {};\n".format(inlet_type)
- dict_lines[70] = " type {};\n".format(outlet_type)
- dict_lines[79] = " type {};\n".format(ground_type)
- dict_lines[88] = " type {};\n".format(top_type)
- dict_lines[97] = " type {};\n".format(front_type)
- dict_lines[106] = " type {};\n".format(back_type)
-
-
- write_file_name = case_path + "/system/blockMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[31] = f'convertToMeters {convert_to_meters:.4f};\n'
+ dict_lines[61] = f' type {inlet_type};\n'
+ dict_lines[70] = f' type {outlet_type};\n'
+ dict_lines[79] = f' type {ground_type};\n'
+ dict_lines[88] = f' type {top_type};\n'
+ dict_lines[97] = f' type {front_type};\n'
+ dict_lines[106] = f' type {back_type};\n'
+
+ write_file_name = case_path + '/system/blockMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
+def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["snappyHexMeshParameters"]
+ mesh_data = json_data['snappyHexMeshParameters']
geom_data = json_data['GeometricData']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
origin = np.array(geom_data['origin'])
-
+
num_cells_between_levels = mesh_data['numCellsBetweenLevels']
resolve_feature_angle = mesh_data['resolveFeatureAngle']
- num_processors = mesh_data['numProcessors']
-
+ num_processors = mesh_data['numProcessors'] # noqa: F841
+
refinement_boxes = mesh_data['refinementBoxes']
-
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
- x_max = x_min + Lx
+ x_max = x_min + Lx # noqa: F841
y_max = y_min + Ly
- z_max = z_min + Lz
-
- inside_point = [x_min + Lf/2.0, (y_min + y_max)/2.0, (z_min + z_max)/2.0]
+ z_max = z_min + Lz
+ inside_point = [x_min + Lf / 2.0, (y_min + y_max) / 2.0, (z_min + z_max) / 2.0]
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/snappyHexMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/snappyHexMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "addLayers")
- dict_lines[start_index] = "addLayers\t{};\n".format("off")
-
- ###################### Edit Geometry Section ##############################
-
- #Add refinement box geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- n_boxes = len(refinement_boxes)
- for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " type searchableBox;\n"
- added_part += " min ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][2], refinement_boxes[i][3], refinement_boxes[i][4])
- added_part += " max ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][5], refinement_boxes[i][6], refinement_boxes[i][7])
- added_part += " }\n"
-
- dict_lines.insert(start_index, added_part)
-
-
- ################# Edit castellatedMeshControls Section ####################
-
- #Write 'nCellsBetweenLevels'
- start_index = foam.find_keyword_line(dict_lines, "nCellsBetweenLevels")
- dict_lines[start_index] = " nCellsBetweenLevels {:d};\n".format(num_cells_between_levels)
- #Write 'resolveFeatureAngle'
- start_index = foam.find_keyword_line(dict_lines, "resolveFeatureAngle")
- dict_lines[start_index] = " resolveFeatureAngle {:d};\n".format(resolve_feature_angle)
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'addLayers')
+ dict_lines[start_index] = 'addLayers\t{};\n'.format('off')
- #Write 'insidePoint'
- start_index = foam.find_keyword_line(dict_lines, "insidePoint")
- dict_lines[start_index] = " insidePoint ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
+ # Edit Geometry Section ##############################
- #For compatibility with OpenFOAM-9 and older
- start_index = foam.find_keyword_line(dict_lines, "locationInMesh")
- dict_lines[start_index] = " locationInMesh ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
- start_index = foam.find_keyword_line(dict_lines, "outsidePoint")
- dict_lines[start_index] = " outsidePoint ({:.4e} {:.4e} {:.4e});\n".format(-1e-20, -1e-20, -1e-20)
+ # Add refinement box geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ n_boxes = len(refinement_boxes)
+ for i in range(n_boxes):
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' type searchableBox;\n'
+ added_part += f' min ({refinement_boxes[i][2]:.4f} {refinement_boxes[i][3]:.4f} {refinement_boxes[i][4]:.4f});\n'
+ added_part += f' max ({refinement_boxes[i][5]:.4f} {refinement_boxes[i][6]:.4f} {refinement_boxes[i][7]:.4f});\n'
+ added_part += ' }\n'
+ dict_lines.insert(start_index, added_part)
-
- #Add box refinements
- added_part = ""
+ # Edit castellatedMeshControls Section ####################
+
+ # Write 'nCellsBetweenLevels'
+ start_index = foam.find_keyword_line(dict_lines, 'nCellsBetweenLevels')
+ dict_lines[start_index] = (
+ f' nCellsBetweenLevels {num_cells_between_levels:d};\n'
+ )
+
+ # Write 'resolveFeatureAngle'
+ start_index = foam.find_keyword_line(dict_lines, 'resolveFeatureAngle')
+ dict_lines[start_index] = f' resolveFeatureAngle {resolve_feature_angle:d};\n'
+
+ # Write 'insidePoint'
+ start_index = foam.find_keyword_line(dict_lines, 'insidePoint')
+ dict_lines[start_index] = (
+ f' insidePoint ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # For compatibility with OpenFOAM-9 and older
+ start_index = foam.find_keyword_line(dict_lines, 'locationInMesh')
+ dict_lines[start_index] = (
+ f' locationInMesh ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
+ start_index = foam.find_keyword_line(dict_lines, 'outsidePoint')
+ dict_lines[start_index] = (
+ f' outsidePoint ({-1e-20:.4e} {-1e-20:.4e} {-1e-20:.4e});\n'
+ )
+
+ # Add box refinements
+ added_part = ''
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " mode inside;\n"
- added_part += " level {};\n".format(refinement_boxes[i][1])
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' mode inside;\n'
+ added_part += f' level {refinement_boxes[i][1]};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/snappyHexMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/snappyHexMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
def write_boundary_data_files(input_json_path, case_path):
- """
- This functions writes wind profile files in "constant/boundaryData/inlet"
- if TInf options are used for the simulation.
- """
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+ """This functions writes wind profile files in "constant/boundaryData/inlet"
+ if TInf options are used for the simulation.
+ """ # noqa: D205, D401, D404
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
geom_data = json_data['GeometricData']
- wind_profiles = np.array(boundary_data["inflowProperties"]['windProfiles'])
+ wind_profiles = np.array(boundary_data['inflowProperties']['windProfiles'])
- bd_path = case_path + "/constant/boundaryData/inlet/"
+ bd_path = case_path + '/constant/boundaryData/inlet/'
- #Write points file
+ # Write points file
n_pts = np.shape(wind_profiles)[0]
- points = np.zeros((n_pts, 3))
-
+ points = np.zeros((n_pts, 3))
origin = np.array(geom_data['origin'])
-
- Ly = geom_data['domainWidth']
- Lf = geom_data['fetchLength']
-
+
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
+ y_min = -Ly / 2.0 - origin[1]
y_max = y_min + Ly
- points[:,0] = x_min
- points[:,1] = (y_min + y_max)/2.0
- points[:,2] = wind_profiles[:, 0]
+ points[:, 0] = x_min
+ points[:, 1] = (y_min + y_max) / 2.0
+ points[:, 2] = wind_profiles[:, 0]
- #Shift the last element of the y coordinate
- #a bit to make planer interpolation easier
+ # Shift the last element of the y coordinate
+ # a bit to make planer interpolation easier
points[-1:, 1] = y_max
- foam.write_foam_field(points, bd_path + "points")
-
- #Write wind speed file as a scalar field
- foam.write_scalar_field(wind_profiles[:, 1], bd_path + "U")
+ foam.write_foam_field(points, bd_path + 'points')
- #Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
- foam.write_foam_field(wind_profiles[:, 2:8], bd_path + "R")
+ # Write wind speed file as a scalar field
+ foam.write_scalar_field(wind_profiles[:, 1], bd_path + 'U')
- #Write length scale file (8 columns -> it's a tensor field)
- foam.write_foam_field(wind_profiles[:, 8:17], bd_path + "L")
+ # Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
+ foam.write_foam_field(wind_profiles[:, 2:8], bd_path + 'R')
+ # Write length scale file (8 columns -> it's a tensor field)
+ foam.write_foam_field(wind_profiles[:, 8:17], bd_path + 'L')
-def write_U_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_U_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- inlet_BC_type = boundary_data['inletBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- sides_BC_type = boundary_data['sidesBoundaryCondition']
-
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ inlet_BC_type = boundary_data['inletBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/UFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/UFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- ##################### Internal Field #########################
- #Initialize the internal fields frow a lower velocity to avoid Courant number
- #instability when the solver starts. Now %10 of roof-height wind speed is set
- start_index = foam.find_keyword_line(dict_lines, "internalField")
+
+ # Internal Field #########################
+ # Initialize the internal fields frow a lower velocity to avoid Courant number
+ # instability when the solver starts. Now %10 of roof-height wind speed is set
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
# dict_lines[start_index] = "internalField uniform ({:.4f} 0 0);\n".format(1.0*wind_speed)
- #Set the internal field to zero to make it easy for the solver to start
- dict_lines[start_index] = "internalField uniform (0 0 0);\n"
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
-
- if inlet_BC_type == "Uniform":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
-
- if inlet_BC_type == "MeanABL":
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletVelocity;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 uniform \t {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
- if inlet_BC_type == "TInf":
- added_part = ""
- added_part += "\t type \t turbulentDFMInlet;\n"
- added_part += "\t filterType \t exponential;\n"
- added_part += "\t filterFactor \t {};\n".format(4)
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t periodicInY \t {};\n".format("true")
- added_part += "\t periodicInZ \t {};\n".format("false")
- added_part += "\t constMeanU \t {};\n".format("true")
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
+ # Set the internal field to zero to make it easy for the solver to start
+ dict_lines[start_index] = 'internalField uniform (0 0 0);\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+
+ if inlet_BC_type == 'Uniform':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+
+ if inlet_BC_type == 'MeanABL':
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletVelocity;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 uniform \t {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
+ if inlet_BC_type == 'TInf':
+ added_part = ''
+ added_part += '\t type \t turbulentDFMInlet;\n'
+ added_part += '\t filterType \t exponential;\n'
+ added_part += f'\t filterFactor \t {4};\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+ added_part += '\t periodicInY \t {};\n'.format('true')
+ added_part += '\t periodicInZ \t {};\n'.format('false')
+ added_part += '\t constMeanU \t {};\n'.format('true')
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform (0 0 0);\n"
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += '\t inletValue \t uniform (0 0 0);\n'
# added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t value \t uniform (0 0 0);\n"
-
- # added_part += "\t type zeroGradient;\n"
+ added_part += '\t value \t uniform (0 0 0);\n'
+ # added_part += "\t type zeroGradient;\n"
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t value \t uniform (0 0 0);\n"
- added_part += "\t uniformValue \t constant (0 0 0);\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+ added_part += '\t uniformValue \t constant (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/U"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/0/U'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_p_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_p_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/pFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/pFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- p0 = 0.0;
+ # BC and initial condition
+ p0 = 0.0
+ # Internal Field #########################
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(p0)
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {p0:.4f};\n'
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(p0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {p0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/p"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/p'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_nut_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_nut_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
# wind_speed = wind_data['roofHeightWindSpeed']
# building_height = wind_data['buildingHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/nutFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/nutFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- nut0 = 0.0
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(nut0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ nut0 = 0.0
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {nut0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(nut0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {nut0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkAtmRoughWallFunction;\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t value \t uniform 0.0;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkAtmRoughWallFunction;\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t value \t uniform 0.0;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/nut"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/nut'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_epsilon_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_epsilon_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/epsilonFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/epsilonFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- epsilon0 = 0.01
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(epsilon0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletEpsilon;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ # BC and initial condition
+ epsilon0 = 0.01
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {epsilon0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletEpsilon;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(epsilon0)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {epsilon0:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- #Note: Should be replaced with smooth wall function for epsilon,
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ # Note: Should be replaced with smooth wall function for epsilon,
# now the same with rough wall function.
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/epsilon"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/epsilon'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_k_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_k_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/kFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/kFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition (you may need to scale to model scale)
+
+ # BC and initial condition (you may need to scale to model scale)
# k0 = 1.3 #not in model scale
-
- I = 0.1
- k0 = 1.5*(I*wind_speed)**2
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField \t uniform {:.4f};\n".format(k0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletK;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ I = 0.1 # noqa: N806, E741
+ k0 = 1.5 * (I * wind_speed) ** 2
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField \t uniform {k0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletK;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(k0)
- added_part += "\t value \t uniform {:.4f};\n".format(k0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {k0:.4f};\n'
+ added_part += f'\t value \t uniform {k0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/k"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/k'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_controlDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_controlDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
- rm_data = json_data["resultMonitoring"]
-
+ ns_data = json_data['numericalSetup']
+ rm_data = json_data['resultMonitoring']
+
solver_type = ns_data['solverType']
duration = ns_data['duration']
time_step = ns_data['timeStep']
max_courant_number = ns_data['maxCourantNumber']
adjust_time_step = ns_data['adjustTimeStep']
-
+
monitor_wind_profiles = rm_data['monitorWindProfile']
monitor_vtk_planes = rm_data['monitorVTKPlane']
wind_profiles = rm_data['windProfiles']
vtk_planes = rm_data['vtkPlanes']
-
- # Need to change this for
- max_delta_t = 10*time_step
-
+ # Need to change this for
+ max_delta_t = 10 * time_step
+
write_interval = 1000
- purge_write = 3
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/controlDictTemplate", "r")
+ purge_write = 3
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/controlDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write application type
- start_index = foam.find_keyword_line(dict_lines, "application")
- dict_lines[start_index] = "application \t{};\n".format(solver_type)
-
- #Write end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime \t{:.6f};\n".format(duration)
-
- #Write time step time
- start_index = foam.find_keyword_line(dict_lines, "deltaT")
- dict_lines[start_index] = "deltaT \t\t{:.6f};\n".format(time_step)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+
+ # Write application type
+ start_index = foam.find_keyword_line(dict_lines, 'application')
+ dict_lines[start_index] = f'application \t{solver_type};\n'
+
+ # Write end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime \t{duration:.6f};\n'
+
+ # Write time step time
+ start_index = foam.find_keyword_line(dict_lines, 'deltaT')
+ dict_lines[start_index] = f'deltaT \t\t{time_step:.6f};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('adjustableRunTime')
else:
- dict_lines[start_index] = "writeControl \t\t{};\n".format("timeStep")
-
- #Write adjustable time step or not
- start_index = foam.find_keyword_line(dict_lines, "adjustTimeStep")
- dict_lines[start_index] = "adjustTimeStep \t\t{};\n".format("yes" if adjust_time_step else "no")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ dict_lines[start_index] = 'writeControl \t\t{};\n'.format('timeStep')
+
+ # Write adjustable time step or not
+ start_index = foam.find_keyword_line(dict_lines, 'adjustTimeStep')
+ dict_lines[start_index] = 'adjustTimeStep \t\t{};\n'.format(
+ 'yes' if adjust_time_step else 'no'
+ )
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f'writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(write_interval)
-
- #Write maxCo
- start_index = foam.find_keyword_line(dict_lines, "maxCo")
- dict_lines[start_index] = "maxCo \t{:.2f};\n".format(max_courant_number)
-
- #Write maximum time step
- start_index = foam.find_keyword_line(dict_lines, "maxDeltaT")
- dict_lines[start_index] = "maxDeltaT \t{:.6f};\n".format(max_delta_t)
-
-
- #Write purge write interval
- start_index = foam.find_keyword_line(dict_lines, "purgeWrite")
- dict_lines[start_index] = "purgeWrite \t{};\n".format(purge_write)
-
- ########################### Function Objects ##############################
-
- #Find function object location
- start_index = foam.find_keyword_line(dict_lines, "functions") + 2
-
- #Write wind profile monitoring functionObjects
+ dict_lines[start_index] = f'writeInterval \t{write_interval};\n'
+
+ # Write maxCo
+ start_index = foam.find_keyword_line(dict_lines, 'maxCo')
+ dict_lines[start_index] = f'maxCo \t{max_courant_number:.2f};\n'
+
+ # Write maximum time step
+ start_index = foam.find_keyword_line(dict_lines, 'maxDeltaT')
+ dict_lines[start_index] = f'maxDeltaT \t{max_delta_t:.6f};\n'
+
+ # Write purge write interval
+ start_index = foam.find_keyword_line(dict_lines, 'purgeWrite')
+ dict_lines[start_index] = f'purgeWrite \t{purge_write};\n'
+
+ # Function Objects ##############################
+
+ # Find function object location
+ start_index = foam.find_keyword_line(dict_lines, 'functions') + 2
+
+ # Write wind profile monitoring functionObjects
if monitor_wind_profiles:
- added_part = ""
+ added_part = ''
for prof in wind_profiles:
- added_part += " #includeFunc {}\n".format(prof["name"])
+ added_part += ' #includeFunc {}\n'.format(prof['name'])
dict_lines.insert(start_index, added_part)
-
- #Write VTK sampling sampling points
+
+ # Write VTK sampling sampling points
if monitor_vtk_planes:
- added_part = ""
+ added_part = ''
for pln in vtk_planes:
- added_part += " #includeFunc {}\n".format(pln["name"])
+ added_part += ' #includeFunc {}\n'.format(pln['name'])
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/controlDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/controlDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_fvSolution_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_fvSolution_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
json_file.close()
-
+
num_non_orthogonal_correctors = ns_data['numNonOrthogonalCorrectors']
num_correctors = ns_data['numCorrectors']
num_outer_correctors = ns_data['numOuterCorrectors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSolutionTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/fvSolutionTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write simpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "SIMPLE") + 2
- added_part = ""
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write simpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'SIMPLE') + 2
+ added_part = ''
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pimpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "PIMPLE") + 2
- added_part = ""
- added_part += " nOuterCorrectors \t{};\n".format(num_outer_correctors)
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pimpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PIMPLE') + 2
+ added_part = ''
+ added_part += f' nOuterCorrectors \t{num_outer_correctors};\n'
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pisoFoam options
- start_index = foam.find_keyword_line(dict_lines, "PISO") + 2
- added_part = ""
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pisoFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PISO') + 2
+ added_part = ''
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSolution"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSolution'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-def write_pressure_probes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
+def write_pressure_probes_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
pressure_sampling_points = rm_data['pressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
-
- added_part = ""
-
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
+
for i in range(len(pressure_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(pressure_sampling_points[i][0], pressure_sampling_points[i][1], pressure_sampling_points[i][2])
-
+ added_part += f' ({pressure_sampling_points[i][0]:.6f} {pressure_sampling_points[i][1]:.6f} {pressure_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/pressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/pressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-
-def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_wind_profiles_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- ns_data = json_data["numericalSetup"]
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
@@ -1092,516 +1061,506 @@ def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
write_interval = rm_data['profileWriteInterval']
start_time = rm_data['profileStartTime']
- if rm_data['monitorWindProfile'] == False:
- return
-
- if len(wind_profiles)==0:
+ if rm_data['monitorWindProfile'] == False: # noqa: E712
+ return
+
+ if len(wind_profiles) == 0:
return
- #Write dict files for wind profiles
+ # Write dict files for wind profiles
for prof in wind_profiles:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
-
- #Write start time for the probes
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- #Write name of the profile
- name = prof["name"]
- start_index = foam.find_keyword_line(dict_lines, "profileName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = prof["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write point coordinates
- start_x = prof["startX"]
- start_y = prof["startY"]
- start_z = prof["startZ"]
-
- end_x = prof["endX"]
- end_y = prof["endY"]
- end_z = prof["endZ"]
- n_points = prof["nPoints"]
-
- dx = (end_x - start_x)/n_points
- dy = (end_y - start_y)/n_points
- dz = (end_z - start_z)/n_points
-
- #Write locations of the probes
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
- added_part = ""
-
- for pi in range(n_points):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(start_x + pi*dx, start_y + pi*dy, start_z + pi*dz)
-
- dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
+
+ # Write start time for the probes
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ # Write name of the profile
+ name = prof['name']
+ start_index = foam.find_keyword_line(dict_lines, 'profileName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = prof['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write point coordinates
+ start_x = prof['startX']
+ start_y = prof['startY']
+ start_z = prof['startZ']
+
+ end_x = prof['endX']
+ end_y = prof['endY']
+ end_z = prof['endZ']
+ n_points = prof['nPoints']
+
+ dx = (end_x - start_x) / n_points
+ dy = (end_y - start_y) / n_points
+ dz = (end_z - start_z) / n_points
+
+ # Write locations of the probes
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+ added_part = ''
+
+ for pi in range(n_points):
+ added_part += f' ({start_x + pi * dx:.6f} {start_y + pi * dy:.6f} {start_z + pi * dz:.6f})\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_vtk_plane_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_vtk_plane_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
-
vtk_planes = rm_data['vtkPlanes']
write_interval = rm_data['vtkWriteInterval']
- if rm_data['monitorVTKPlane'] == False:
- return
-
- if len(vtk_planes)==0:
+ if rm_data['monitorVTKPlane'] == False: # noqa: E712
return
- #Write dict files for wind profiles
+ if len(vtk_planes) == 0:
+ return
+
+ # Write dict files for wind profiles
for pln in vtk_planes:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/vtkPlaneTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/vtkPlaneTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
- #Write start and end time for the section
+ # Write start and end time for the section
start_time = pln['startTime']
end_time = pln['endTime']
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- start_index = foam.find_keyword_line(dict_lines, "timeEnd")
- dict_lines[start_index] = " timeEnd \t\t{:.6f};\n".format(end_time)
-
- #Write name of the profile
- name = pln["name"]
- start_index = foam.find_keyword_line(dict_lines, "planeName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = pln["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write normal and point coordinates
- point_x = pln["pointX"]
- point_y = pln["pointY"]
- point_z = pln["pointZ"]
-
- normal_axis = pln["normalAxis"]
-
- start_index = foam.find_keyword_line(dict_lines, "point")
- dict_lines[start_index] = "\t point\t\t({:.6f} {:.6f} {:.6f});\n".format(point_x, point_y, point_z)
-
- start_index = foam.find_keyword_line(dict_lines, "normal")
- if normal_axis=="X":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(1, 0, 0)
- if normal_axis=="Y":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 1, 0)
- if normal_axis=="Z":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 0, 1)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'timeEnd')
+ dict_lines[start_index] = f' timeEnd \t\t{end_time:.6f};\n'
+
+ # Write name of the profile
+ name = pln['name']
+ start_index = foam.find_keyword_line(dict_lines, 'planeName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = pln['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write normal and point coordinates
+ point_x = pln['pointX']
+ point_y = pln['pointY']
+ point_z = pln['pointZ']
+
+ normal_axis = pln['normalAxis']
+
+ start_index = foam.find_keyword_line(dict_lines, 'point')
+ dict_lines[start_index] = (
+ f'\t point\t\t({point_x:.6f} {point_y:.6f} {point_z:.6f});\n'
+ )
+
+ start_index = foam.find_keyword_line(dict_lines, 'normal')
+ if normal_axis == 'X':
+ dict_lines[start_index] = f'\t normal\t\t({1} {0} {0});\n'
+ if normal_axis == 'Y':
+ dict_lines[start_index] = f'\t normal\t\t({0} {1} {0});\n'
+ if normal_axis == 'Z':
+ dict_lines[start_index] = f'\t normal\t\t({0} {0} {1});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_momentumTransport_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_momentumTransport_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
- RANS_type = turb_data['RANSModelType']
- LES_type = turb_data['LESModelType']
- DES_type = turb_data['DESModelType']
+ RANS_type = turb_data['RANSModelType'] # noqa: N806
+ LES_type = turb_data['LESModelType'] # noqa: N806
+ DES_type = turb_data['DESModelType'] # noqa: N806
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/momentumTransportTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/momentumTransportTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "simulationType")
- dict_lines[start_index] = "simulationType \t{};\n".format("RAS" if simulation_type=="RANS" else simulation_type)
-
- if simulation_type=="RANS":
- #Write RANS model type
- start_index = foam.find_keyword_line(dict_lines, "RAS") + 2
- added_part = " model \t{};\n".format(RANS_type)
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'simulationType')
+ dict_lines[start_index] = 'simulationType \t{};\n'.format(
+ 'RAS' if simulation_type == 'RANS' else simulation_type
+ )
+
+ if simulation_type == 'RANS':
+ # Write RANS model type
+ start_index = foam.find_keyword_line(dict_lines, 'RAS') + 2
+ added_part = f' model \t{RANS_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="LES":
- #Write LES SGS model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(LES_type)
+
+ elif simulation_type == 'LES':
+ # Write LES SGS model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{LES_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="DES":
- #Write DES model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(DES_type)
+
+ elif simulation_type == 'DES':
+ # Write DES model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{DES_type};\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/constant/momentumTransport"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/momentumTransport'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_physicalProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_physicalProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/physicalPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/physicalPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.4e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.4e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/physicalProperties'
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/constant/physicalProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_transportProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_transportProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/transportPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/transportPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.3e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.3e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/transportProperties'
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/constant/transportProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_fvSchemes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_fvSchemes_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSchemesTemplate{}".format(simulation_type), "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + f'/fvSchemesTemplate{simulation_type}') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSchemes"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSchemes'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_decomposeParDict_file(input_json_path, template_dict_path, case_path):
+ output_file.close()
+
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_decomposeParDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
num_processors = ns_data['numProcessors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/decomposeParDictTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/decomposeParDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write number of sub-domains
- start_index = foam.find_keyword_line(dict_lines, "numberOfSubdomains")
- dict_lines[start_index] = "numberOfSubdomains\t{};\n".format(num_processors)
-
- #Write method of decomposition
- start_index = foam.find_keyword_line(dict_lines, "decomposer")
- dict_lines[start_index] = "decomposer\t\t{};\n".format("scotch")
-
- #Write method of decomposition for OF-V9 and lower compatability
- start_index = foam.find_keyword_line(dict_lines, "method")
- dict_lines[start_index] = "method\t\t{};\n".format("scotch")
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/decomposeParDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write number of sub-domains
+ start_index = foam.find_keyword_line(dict_lines, 'numberOfSubdomains')
+ dict_lines[start_index] = f'numberOfSubdomains\t{num_processors};\n'
+
+ # Write method of decomposition
+ start_index = foam.find_keyword_line(dict_lines, 'decomposer')
+ dict_lines[start_index] = 'decomposer\t\t{};\n'.format('scotch')
+
+ # Write method of decomposition for OF-V9 and lower compatibility
+ start_index = foam.find_keyword_line(dict_lines, 'method')
+ dict_lines[start_index] = 'method\t\t{};\n'.format('scotch')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/decomposeParDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
-
+ output_file.close()
+
+
+def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
fmax = 200.0
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
- ns_data = json_data["numericalSetup"]
-
+ wc_data = json_data['windCharacteristics']
+ ns_data = json_data['numericalSetup']
+
wind_speed = wc_data['referenceWindSpeed']
duration = ns_data['duration']
-
- #Generate a little longer duration to be safe
- duration = duration*1.010
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/DFSRTurbDictTemplate", "r")
+ # Generate a little longer duration to be safe
+ duration = duration * 1.010 # noqa: PLR6104
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/DFSRTurbDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write the end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime\t\t\t{:.4f};\n".format(duration)
-
- #Write patch name
- start_index = foam.find_keyword_line(dict_lines, "patchName")
- dict_lines[start_index] = "patchName\t\t\"{}\";\n".format("inlet")
-
- #Write cohUav
- start_index = foam.find_keyword_line(dict_lines, "cohUav")
- dict_lines[start_index] = "cohUav\t\t\t{:.4f};\n".format(wind_speed)
-
- #Write fmax
- start_index = foam.find_keyword_line(dict_lines, "fMax")
- dict_lines[start_index] = "fMax\t\t\t{:.4f};\n".format(fmax)
-
- #Write time step
- start_index = foam.find_keyword_line(dict_lines, "timeStep")
- dict_lines[start_index] = "timeStep\t\t{:.4f};\n".format(1.0/fmax)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/DFSRTurbDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write the end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime\t\t\t{duration:.4f};\n'
+
+ # Write patch name
+ start_index = foam.find_keyword_line(dict_lines, 'patchName')
+ dict_lines[start_index] = 'patchName\t\t"{}";\n'.format('inlet')
+
+ # Write cohUav
+ start_index = foam.find_keyword_line(dict_lines, 'cohUav')
+ dict_lines[start_index] = f'cohUav\t\t\t{wind_speed:.4f};\n'
+
+ # Write fmax
+ start_index = foam.find_keyword_line(dict_lines, 'fMax')
+ dict_lines[start_index] = f'fMax\t\t\t{fmax:.4f};\n'
+
+ # Write time step
+ start_index = foam.find_keyword_line(dict_lines, 'timeStep')
+ dict_lines[start_index] = f'timeStep\t\t{1.0 / fmax:.4f};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/DFSRTurbDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-if __name__ == '__main__':
-
+
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
input_json_path = sys.argv[1]
template_dict_path = sys.argv[2]
case_path = sys.argv[3]
-
-
+
# input_json_path = "/home/abiy/Documents/WE-UQ/LocalWorkDir/EmptyDomainCFD/constant/simCenter/input"
# template_dict_path = "/home/abiy/SimCenter/SourceCode/NHERI-SimCenter/SimCenterBackendApplications/applications/createEVENT/EmptyDomainCFD/templateOF10Dicts"
# case_path = "/home/abiy/Documents/WE-UQ/LocalWorkDir/EmptyDomainCFD"
-
+
# data_path = os.getcwd()
# script_path = os.path.dirname(os.path.realpath(__file__))
-
-
- #Create case director
- # set up goes here
-
-
- #Read JSON data
- with open(input_json_path + "/EmptyDomainCFD.json") as json_file:
- json_data = json.load(json_file)
+ # Create case director
+ # set up goes here
+
+ # Read JSON data
+ with open(input_json_path + '/EmptyDomainCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
RANS_type = turb_data['RANSModelType']
LES_type = turb_data['LESModelType']
-
- #Write blockMesh
+
+ # Write blockMesh
write_block_mesh_dict(input_json_path, template_dict_path, case_path)
- #Create and write the SnappyHexMeshDict file
+ # Create and write the SnappyHexMeshDict file
write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path)
-
- #Write files in "0" directory
+
+ # Write files in "0" directory
write_U_file(input_json_path, template_dict_path, case_path)
write_p_file(input_json_path, template_dict_path, case_path)
write_nut_file(input_json_path, template_dict_path, case_path)
write_k_file(input_json_path, template_dict_path, case_path)
-
- if simulation_type == "RANS" and RANS_type=="kEpsilon":
+
+ if simulation_type == 'RANS' and RANS_type == 'kEpsilon':
write_epsilon_file(input_json_path, template_dict_path, case_path)
- #Write control dict
+ # Write control dict
write_controlDict_file(input_json_path, template_dict_path, case_path)
-
- #Write results to be monitored
+
+ # Write results to be monitored
write_wind_profiles_file(input_json_path, template_dict_path, case_path)
write_vtk_plane_file(input_json_path, template_dict_path, case_path)
-
- #Write fvSolution dict
+
+ # Write fvSolution dict
write_fvSolution_file(input_json_path, template_dict_path, case_path)
- #Write fvSchemes dict
+ # Write fvSchemes dict
write_fvSchemes_file(input_json_path, template_dict_path, case_path)
- #Write momentumTransport dict
+ # Write momentumTransport dict
write_momentumTransport_file(input_json_path, template_dict_path, case_path)
-
- #Write physicalProperties dict
+
+ # Write physicalProperties dict
write_physicalProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
+
+ # Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
write_transportProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write decomposeParDict
+
+ # Write decomposeParDict
write_decomposeParDict_file(input_json_path, template_dict_path, case_path)
-
- #Write DFSRTurb dict
+
+ # Write DFSRTurb dict
# write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path)
-
- #Write TInf files
+
+ # Write TInf files
write_boundary_data_files(input_json_path, case_path)
diff --git a/modules/createEVENT/NNGM/NNGM.py b/modules/createEVENT/NNGM/NNGM.py
index df9e802c3..4cf7739ba 100644
--- a/modules/createEVENT/NNGM/NNGM.py
+++ b/modules/createEVENT/NNGM/NNGM.py
@@ -1,142 +1,168 @@
-
+import argparse # noqa: CPY001, D100, INP001
import json
import os
-import sys
-import subprocess
-import hashlib
-from scipy import spatial
-import glob
-import re
-import argparse
from textwrap import wrap
-def ReadSMC(smcFilePath):
- with open(smcFilePath, 'r+') as smcFile:
+from scipy import spatial
+
+
+def ReadSMC(smcFilePath): # noqa: N802, N803, D103
+ with open(smcFilePath, 'r+') as smcFile: # noqa: N806, PLW1514, PTH123
series = []
- smcLines = smcFile.readlines()
- dT = 1.0/float(smcLines[17].strip().split()[1])
- nCommentLines = int(smcLines[12].strip().split()[7])
- for line in smcLines[(27+nCommentLines):]:
+ smcLines = smcFile.readlines() # noqa: N806
+ dT = 1.0 / float(smcLines[17].strip().split()[1]) # noqa: N806
+ nCommentLines = int(smcLines[12].strip().split()[7]) # noqa: N806
+ for line in smcLines[(27 + nCommentLines) :]:
for value in wrap(line, 10, drop_whitespace=False):
if value.strip():
- series.append(float(value)/100.0)
+ series.append(float(value) / 100.0) # noqa: PERF401
return [series, dT]
-def ReadCOSMOS(cosmosFilePath):
- with open(cosmosFilePath, 'r+') as cosmosFile:
+
+def ReadCOSMOS(cosmosFilePath): # noqa: N802, N803, D103
+ with open(cosmosFilePath, 'r+') as cosmosFile: # noqa: N806, PLW1514, PTH123
series = []
- cosmosLines = cosmosFile.readlines()
- headerSize = int(cosmosLines[0][46:48])
- intSize = int(cosmosLines[headerSize][37:40])
- realSize = int(cosmosLines[headerSize + intSize + 1][34:37])
- commentSize = int(cosmosLines[headerSize + intSize + realSize + 2][0:4])
- totalHeader = headerSize + intSize + realSize + commentSize + 3
- recordSize = int(cosmosLines[totalHeader].strip().split()[0])
- dT = float(cosmosLines[37].strip().split()[1])/1000.0
-
- for line in cosmosLines[totalHeader + 1:totalHeader + recordSize + 1]:
- series.append(float(line.strip())/100.0)
+ cosmosLines = cosmosFile.readlines() # noqa: N806
+ headerSize = int(cosmosLines[0][46:48]) # noqa: N806
+ intSize = int(cosmosLines[headerSize][37:40]) # noqa: N806
+ realSize = int(cosmosLines[headerSize + intSize + 1][34:37]) # noqa: N806
+ commentSize = int(cosmosLines[headerSize + intSize + realSize + 2][0:4]) # noqa: N806
+ totalHeader = headerSize + intSize + realSize + commentSize + 3 # noqa: N806
+ recordSize = int(cosmosLines[totalHeader].strip().split()[0]) # noqa: N806
+ dT = float(cosmosLines[37].strip().split()[1]) / 1000.0 # noqa: N806
+
+ for line in cosmosLines[totalHeader + 1 : totalHeader + recordSize + 1]:
+ series.append(float(line.strip()) / 100.0) # noqa: PERF401
return [series, dT]
-def createEvent(recordsFolder, h1File, h2File, eventFilePath):
- if h1File.endswith(".smc"):
- h1, dt1 = ReadSMC(os.path.join(recordsFolder, h1File))
+def createEvent(recordsFolder, h1File, h2File, eventFilePath): # noqa: N802, N803, D103
+ if h1File.endswith('.smc'):
+ h1, dt1 = ReadSMC(os.path.join(recordsFolder, h1File)) # noqa: PTH118
else:
- h1, dt1 = ReadCOSMOS(os.path.join(recordsFolder, h1File))
+ h1, dt1 = ReadCOSMOS(os.path.join(recordsFolder, h1File)) # noqa: PTH118
- if h2File.endswith(".smc"):
- h2, dt2 = ReadSMC(os.path.join(recordsFolder, h2File))
+ if h2File.endswith('.smc'):
+ h2, dt2 = ReadSMC(os.path.join(recordsFolder, h2File)) # noqa: PTH118
else:
- h2, dt2 = ReadCOSMOS(os.path.join(recordsFolder, h2File))
-
- patternH1 = {}
- patternH1["type"] = "UniformAcceleration"
- patternH1["timeSeries"] = "accel_X"
- patternH1["dof"] = 1
-
- patternH2 = {}
- patternH2["type"] = "UniformAcceleration"
- patternH2["timeSeries"] = "accel_Y"
- patternH2["dof"] = 2
-
- seriesH1 = {}
- seriesH1["name"] = "accel_X"
- seriesH1["type"] = "Value"
- seriesH1["dT"] = dt1
- seriesH1["data"] = h1
-
- seriesH2 = {}
- seriesH2["name"] = "accel_Y"
- seriesH2["type"] = "Value"
- seriesH2["dT"] = dt2
- seriesH2["data"] = h2
+ h2, dt2 = ReadCOSMOS(os.path.join(recordsFolder, h2File)) # noqa: PTH118
+
+ patternH1 = {} # noqa: N806
+ patternH1['type'] = 'UniformAcceleration'
+ patternH1['timeSeries'] = 'accel_X'
+ patternH1['dof'] = 1
+
+ patternH2 = {} # noqa: N806
+ patternH2['type'] = 'UniformAcceleration'
+ patternH2['timeSeries'] = 'accel_Y'
+ patternH2['dof'] = 2
+
+ seriesH1 = {} # noqa: N806
+ seriesH1['name'] = 'accel_X'
+ seriesH1['type'] = 'Value'
+ seriesH1['dT'] = dt1
+ seriesH1['data'] = h1
+
+ seriesH2 = {} # noqa: N806
+ seriesH2['name'] = 'accel_Y'
+ seriesH2['type'] = 'Value'
+ seriesH2['dT'] = dt2
+ seriesH2['data'] = h2
event = {}
- event["name"] = h1File
- event["type"] = "Seismic"
- event["description"] = h1File
- event["dT"] = dt1
- event["numSteps"] = len(h1)
- event["timeSeries"] = [seriesH1, seriesH2]
- event["pattern"] = [patternH1, patternH2]
-
- eventsDict = {}
- eventsDict["Events"] = [event]
- eventsDict["RandomVariables"] = []
-
- with open(eventFilePath, 'w') as eventFile:
- json.dump(eventsDict, eventFile, indent=4)
-
-
-def main():
- #Input Argument Specifications
- gmArgsParser = argparse.ArgumentParser("Characterize ground motion using seismic hazard analysis and record selection")
- gmArgsParser.add_argument("-filenameAIM", "--filenameAIM", required=True, help="Path to the BIM file")
- gmArgsParser.add_argument("-filenameEVENT", "--filenameEVENT", required=True, help="Path to the EVENT file")
- gmArgsParser.add_argument("-groundMotions", "--groundMotions", required=True, help="Path to the ground motions configuration file")
- gmArgsParser.add_argument("-recordsFolder", "--recordsFolder", required=True, help="Path to the ground motions records folder")
- gmArgsParser.add_argument("-getRV", "--getRV", action='store_true', help="Flag showing whether or not this call is to get the random variables definition")
-
- #Parse the arguments
- gmArgs = gmArgsParser.parse_args()
-
-
- #Check getRV flag
+ event['name'] = h1File
+ event['type'] = 'Seismic'
+ event['description'] = h1File
+ event['dT'] = dt1
+ event['numSteps'] = len(h1)
+ event['timeSeries'] = [seriesH1, seriesH2]
+ event['pattern'] = [patternH1, patternH2]
+
+ eventsDict = {} # noqa: N806
+ eventsDict['Events'] = [event]
+ eventsDict['RandomVariables'] = []
+
+ with open(eventFilePath, 'w') as eventFile: # noqa: N806, PLW1514, PTH123
+ json.dump(eventsDict, eventFile, indent=4)
+
+
+def main(): # noqa: D103
+ # Input Argument Specifications
+ gmArgsParser = argparse.ArgumentParser( # noqa: N806
+ 'Characterize ground motion using seismic hazard analysis and record selection'
+ )
+ gmArgsParser.add_argument(
+ '-filenameAIM', '--filenameAIM', required=True, help='Path to the BIM file'
+ )
+ gmArgsParser.add_argument(
+ '-filenameEVENT',
+ '--filenameEVENT',
+ required=True,
+ help='Path to the EVENT file',
+ )
+ gmArgsParser.add_argument(
+ '-groundMotions',
+ '--groundMotions',
+ required=True,
+ help='Path to the ground motions configuration file',
+ )
+ gmArgsParser.add_argument(
+ '-recordsFolder',
+ '--recordsFolder',
+ required=True,
+ help='Path to the ground motions records folder',
+ )
+ gmArgsParser.add_argument(
+ '-getRV',
+ '--getRV',
+ action='store_true',
+ help='Flag showing whether or not this call is to get the random variables definition',
+ )
+
+ # Parse the arguments
+ gmArgs = gmArgsParser.parse_args() # noqa: N806
+
+ # Check getRV flag
if not gmArgs.getRV:
- #We will use the template files so no changes are needed
- #We do not have any random variables for this event for now
+ # We will use the template files so no changes are needed
+ # We do not have any random variables for this event for now
return 0
- #First let's process the arguments
- bimFilePath = gmArgs.filenameAIM
- eventFilePath = gmArgs.filenameEVENT
- gmConfigPath = gmArgs.groundMotions
- recordsFolder = gmArgs.recordsFolder
-
- with open(gmConfigPath, 'r') as gmConfigFile:
- gmConfig = json.load(gmConfigFile)
-
- #We need to read the building location
- with open(bimFilePath, 'r') as bimFile:
+ # First let's process the arguments
+ bimFilePath = gmArgs.filenameAIM # noqa: N806
+ eventFilePath = gmArgs.filenameEVENT # noqa: N806
+ gmConfigPath = gmArgs.groundMotions # noqa: N806
+ recordsFolder = gmArgs.recordsFolder # noqa: N806
+
+ with open(gmConfigPath) as gmConfigFile: # noqa: N806, PLW1514, PTH123
+ gmConfig = json.load(gmConfigFile) # noqa: N806
+
+ # We need to read the building location
+ with open(bimFilePath) as bimFile: # noqa: N806, PLW1514, PTH123
bim = json.load(bimFile)
- location = [bim["GI"]["location"]["latitude"], bim["GI"]["location"]["longitude"]]
+ location = [
+ bim['GI']['location']['latitude'],
+ bim['GI']['location']['longitude'],
+ ]
- siteLocations = []
- for gm in gmConfig["GroundMotion"]:
- siteLocations.append([gm["Location"]["Latitude"], gm["Location"]["Longitude"]])
+ siteLocations = [] # noqa: N806
+ for gm in gmConfig['GroundMotion']:
+ siteLocations.append( # noqa: PERF401
+ [gm['Location']['Latitude'], gm['Location']['Longitude']]
+ )
# we need to find the nearest neighbor
- sitesTree = spatial.KDTree(siteLocations)
+ sitesTree = spatial.KDTree(siteLocations) # noqa: N806
nearest = sitesTree.query(location)
- nearestGM = gmConfig["GroundMotion"][nearest[1]]
- h1File = nearestGM["Records"]["Horizontal1"]
- h2File = nearestGM["Records"]["Horizontal2"]
-
- createEvent(os.path.abspath(recordsFolder), h1File, h2File, eventFilePath)
+ nearestGM = gmConfig['GroundMotion'][nearest[1]] # noqa: N806
+ h1File = nearestGM['Records']['Horizontal1'] # noqa: N806
+ h2File = nearestGM['Records']['Horizontal2'] # noqa: N806
+
+ createEvent(os.path.abspath(recordsFolder), h1File, h2File, eventFilePath) # noqa: RET503, PTH100
+
-if __name__== "__main__":
+if __name__ == '__main__':
main()
diff --git a/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.cpp b/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.cpp
index fdd8a0d13..c2caaa352 100644
--- a/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.cpp
+++ b/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.cpp
@@ -94,7 +94,7 @@ main(int argc, char **argv) {
json_t *generalInformation = json_object_get(input, "GeneralInformation");
json_t *inputEventsArray = json_object_get(input, "Events");
if (generalInformation == NULL || inputEventsArray == NULL) {
- std::cerr << "FATAL ERROR - input file conatins no Events key-pair\n";
+ std::cerr << "FATAL ERROR - input file contains no Events key-pair\n";
exit(-1);
}
@@ -128,7 +128,7 @@ main(int argc, char **argv) {
json_object_set(units,"time",json_string("sec"));
json_object_set(outputEvent,"units",units);
- // call function to fill in event details .. depends on getRV flag what is acually done
+ // call function to fill in event details .. depends on getRV flag what is actually done
addEvent(generalInformation, inputEvent, outputEvent, doRV);
json_array_append(outputEventsArray, outputEvent);
@@ -180,7 +180,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
widthJO == NULL ||
depthJO == NULL ||
storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -337,7 +337,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
}
//
- // for each tap determine factors fr moments and forces for the buiding asuming a mesh discretization
+ // for each tap determine factors fr moments and forces for the building assuming a mesh discretization
//
int numDivisionX = 10;
@@ -525,7 +525,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
json_t *storiesJO = json_object_get(generalInfo,"stories");
if (storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -628,7 +628,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
//
// function to add factors for forces and moment contribution coefficients for taps to building floor
-// determine coeffiecients for each tap for a building face. This is done by going over each story of
+// determine coefficients for each tap for a building face. This is done by going over each story of
// For each story break into numDiv X numDiv segments. For each segment assume point load at center
// segment and equal in mag to area of segment and using simply supported beam formula determine force
// at floor below and floor above. based on distance from center line of story determine actibg moments
@@ -718,9 +718,9 @@ int addForcesFace(TAP *theTaps, int numTaps,
// function to fnd nearest tap
// inputs: theTAPS: array of Taps,
// numTaps: number of taps in array
-// xLoc, yLoc: is location of inut point
+// xLoc, yLoc: is location of input point
// face: if of face
-// output: pinter to nearest TAp in the array, NULL if no taps with face
+// output: Pointer to nearest TAp in the array, NULL if no taps with face
//
TAP *findNearestTAP(TAP *theTAPS, int numTaps, double locX, double locY, int face) {
diff --git a/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.py b/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.py
index a3dbffa48..1845bb535 100644
--- a/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.py
+++ b/modules/createEVENT/NonisolatedLowRiseTPU/NonIsolatedLowRiseTPU.py
@@ -1,121 +1,113 @@
-# python code to open the TPU .mat file
+# python code to open the TPU .mat file # noqa: CPY001, D100, INP001
# and put data into a SimCenter JSON file for
# wind tunnel data
-import sys
import os
-import subprocess
-import json
-import stat
-import shutil
-import numpy as np
+import sys
+
import scipy.io as sio
-from pprint import pprint
-inputArgs = sys.argv
+inputArgs = sys.argv # noqa: N816
-print ("Number of arguments: %d" % len(sys.argv))
-print ("The arguments are: %s" %str(sys.argv))
+print('Number of arguments: %d' % len(sys.argv)) # noqa: T201
+print('The arguments are: %s' % str(sys.argv)) # noqa: T201, UP031
# set filenames
-matFileIN = sys.argv[1]
-jsonFileOUT = sys.argv[2]
-
-dataDir = os.getcwd()
-scriptDir = os.path.dirname(os.path.realpath(__file__))
-
-def parseTPU_LowRise_MatFile(matFileIn, windFileOutName):
-
- file = open(windFileOutName,"w");
- file.write("{\n");
-
- mat_contents = sio.loadmat(matFileIn);
- breadth = mat_contents['Building_breadth'][0][0];
-
- depth = mat_contents['Building_depth'][0][0];
- height = mat_contents['Building_height'][0][0];
- breadth = mat_contents['Building_breadth'][0][0];
- pitch = mat_contents['Roof_pitch'][0][0];
- period = mat_contents['Sample_period'][0][0];
- frequency = mat_contents['Sample_frequency'][0][0];
- angle = mat_contents['Wind_azimuth'][0][0];
- roofType = mat_contents['Roof_type'][0];
- if (roofType == 'flat roof'):
- roofType = 'Flat'
- elif (roofType == 'gable roof'):
- roofType = 'Gable'
-
- file.write("\"roofType\":\"" + roofType + "\",")
- file.write("\"windSpeed\":%f," % 22.0);
- file.write("\"depth\":%f," % depth);
- file.write("\"height\":%f," % height);
- file.write("\"breadth\":%f," % breadth);
- file.write("\"pitch\":%f," % pitch);
- file.write("\"period\":%f," % period);
- file.write("\"units\":{\"length\":\"m\",\"time\":\"sec\"},");
- file.write("\"frequency\":%f," % frequency);
- file.write("\"incidenceAngle\":%f," % angle);
- file.write("\"tapLocations\": [");
- locations = mat_contents['Location_of_measured_points'];
- numLocations = locations.shape[1];
-
+matFileIN = sys.argv[1] # noqa: N816
+jsonFileOUT = sys.argv[2] # noqa: N816
+
+dataDir = os.getcwd() # noqa: PTH109, N816
+scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
+
+
+def parseTPU_LowRise_MatFile(matFileIn, windFileOutName): # noqa: C901, N802, N803, D103
+ file = open(windFileOutName, 'w') # noqa: PLW1514, PTH123, SIM115
+ file.write('{\n')
+ mat_contents = sio.loadmat(matFileIn)
+ breadth = mat_contents['Building_breadth'][0][0]
+ depth = mat_contents['Building_depth'][0][0]
+ height = mat_contents['Building_height'][0][0]
+ breadth = mat_contents['Building_breadth'][0][0]
+ pitch = mat_contents['Roof_pitch'][0][0]
+ period = mat_contents['Sample_period'][0][0]
+ frequency = mat_contents['Sample_frequency'][0][0]
+ angle = mat_contents['Wind_azimuth'][0][0]
+ roofType = mat_contents['Roof_type'][0] # noqa: N806
+ if roofType == 'flat roof':
+ roofType = 'Flat' # noqa: N806
+ elif roofType == 'gable roof':
+ roofType = 'Gable' # noqa: N806
+
+ file.write('"roofType":"' + roofType + '",')
+ file.write('"windSpeed":%f,' % 22.0) # noqa: UP031
+ file.write('"depth":%f,' % depth) # noqa: UP031
+ file.write('"height":%f,' % height) # noqa: UP031
+ file.write('"breadth":%f,' % breadth) # noqa: UP031
+ file.write('"pitch":%f,' % pitch) # noqa: UP031
+ file.write('"period":%f,' % period) # noqa: UP031
+ file.write('"units":{"length":"m","time":"sec"},')
+ file.write('"frequency":%f,' % frequency) # noqa: UP031
+ file.write('"incidenceAngle":%f,' % angle) # noqa: UP031
+ file.write('"tapLocations": [')
+ locations = mat_contents['Location_of_measured_points']
+ numLocations = locations.shape[1] # noqa: N806
# get xMax and yMax .. assuming first sensor is 1m from building edge
- # location on faces cannot be obtained from the inputs, at least not with
+ # location on faces cannot be obtained from the inputs, at least not with
# current documentation, awaing email from TPU
- xMax = max(locations[0])+1
- yMax = max(locations[1])+1
-
- for loc in range(0, numLocations):
+ xMax = max(locations[0]) + 1 # noqa: N806
+ yMax = max(locations[1]) + 1 # noqa: N806
+
+ for loc in range(numLocations):
tag = locations[2][loc]
- xLoc = locations[0][loc]
- yLoc = locations[1][loc]
+ xLoc = locations[0][loc] # noqa: N806
+ yLoc = locations[1][loc] # noqa: N806
face = locations[3][loc]
- if (roofType == 'Flat'):
-
- X = xLoc
- Y = yLoc
- if (face == 1):
- xLoc = -(Y - breadth/2.0)
- yLoc = X + xMax
- elif (face == 2):
- xLoc = X + depth/2.0
- yLoc = Y + yMax
- elif (face == 3):
- xLoc = Y + breadth/2.0;
- yLoc = -(X - xMax)
- elif (face == 4):
- xLoc = -(X - depth/2.0)
- yLoc = -(Y - yMax)
+ if roofType == 'Flat':
+ X = xLoc # noqa: N806
+ Y = yLoc # noqa: N806
+ if face == 1:
+ xLoc = -(Y - breadth / 2.0) # noqa: N806
+ yLoc = X + xMax # noqa: N806
+ elif face == 2: # noqa: PLR2004
+ xLoc = X + depth / 2.0 # noqa: N806
+ yLoc = Y + yMax # noqa: N806
+ elif face == 3: # noqa: PLR2004
+ xLoc = Y + breadth / 2.0 # noqa: N806
+ yLoc = -(X - xMax) # noqa: N806
+ elif face == 4: # noqa: PLR2004
+ xLoc = -(X - depth / 2.0) # noqa: N806
+ yLoc = -(Y - yMax) # noqa: N806
else:
- xLoc = X + depth/2
- yLoc = Y + breadth/2
+ xLoc = X + depth / 2 # noqa: N806
+ yLoc = Y + breadth / 2 # noqa: N806
-
- if (loc == numLocations-1):
- file.write("{\"id\":%d,\"xLoc\":%f,\"yLoc\":%f,\"face\":%d}]" % (tag, xLoc, yLoc, face))
+ if loc == numLocations - 1:
+ file.write(
+ '{"id":%d,"xLoc":%f,"yLoc":%f,"face":%d}]' % (tag, xLoc, yLoc, face)
+ )
else:
- file.write("{\"id\":%d,\"xLoc\":%f,\"yLoc\":%f,\"face\":%d}," % (tag, xLoc, yLoc, face))
-
-
- file.write(",\"pressureCoefficients\": [");
- coefficients = mat_contents['Wind_pressure_coefficients'];
- numLocations = coefficients.shape[1];
- numValues = coefficients.shape[0];
- for loc in range(0, numLocations):
- file.write("{\"id\": %d , \"data\":[" % (loc+1))
- for i in range(0, numValues-1):
- file.write("%f," % coefficients[i,loc])
- if (loc != numLocations-1):
- file.write("%f]}," % coefficients[numValues-1,loc])
+ file.write(
+ '{"id":%d,"xLoc":%f,"yLoc":%f,"face":%d},' % (tag, xLoc, yLoc, face)
+ )
+
+ file.write(',"pressureCoefficients": [')
+ coefficients = mat_contents['Wind_pressure_coefficients']
+ numLocations = coefficients.shape[1] # noqa: N806
+ numValues = coefficients.shape[0] # noqa: N806
+ for loc in range(numLocations):
+ file.write('{"id": %d , "data":[' % (loc + 1))
+ for i in range(numValues - 1):
+ file.write('%f,' % coefficients[i, loc]) # noqa: UP031
+ if loc != numLocations - 1:
+ file.write('%f]},' % coefficients[numValues - 1, loc]) # noqa: UP031
else:
- file.write("%f]}]" % coefficients[numValues-1,loc])
+ file.write('%f]}]' % coefficients[numValues - 1, loc]) # noqa: UP031
- file.write("}")
+ file.write('}')
file.close()
-if __name__ == '__main__':
- parseTPU_LowRise_MatFile(matFileIN,jsonFileOUT)
-
+if __name__ == '__main__':
+ parseTPU_LowRise_MatFile(matFileIN, jsonFileOUT)
diff --git a/modules/createEVENT/SimCenterEvent/SimCenterEvent.py b/modules/createEVENT/SimCenterEvent/SimCenterEvent.py
index d0e8be829..3fffeee32 100644
--- a/modules/createEVENT/SimCenterEvent/SimCenterEvent.py
+++ b/modules/createEVENT/SimCenterEvent/SimCenterEvent.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,26 +37,30 @@
# Adam Zsarnóczay
#
-import argparse, json, sys, os
-import numpy as np
+import argparse
+import json
+import os
+import sys
from pathlib import Path
+import numpy as np
+
# import the common constants and methods
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import get_scale_factors, get_unit_bases
+from simcenter_common import get_scale_factors, get_unit_bases # noqa: E402
-def write_RV(AIM_file, EVENT_file):
+def write_RV(AIM_file, EVENT_file): # noqa: N802, N803, D103
# load the AIM file to get information about the assigned events
- with open(AIM_file, 'r', encoding="utf-8") as f:
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
aim_file = json.load(f)
input_units = None
- if 'RegionalEvent' in aim_file.keys():
+ if 'RegionalEvent' in aim_file.keys(): # noqa: SIM118
input_units = aim_file['RegionalEvent'].get('units', None)
output_units = aim_file.get('units', None)
@@ -69,7 +72,7 @@ def write_RV(AIM_file, EVENT_file):
input_unit_bases = get_unit_bases(input_units)
# get the location of the event input files
- # TODO: assuming a single event for now
+ # TODO: assuming a single event for now # noqa: TD002
aim_event_input = aim_file['Events'][0]
data_dir = Path(aim_event_input['EventFolderPath'])
@@ -77,37 +80,38 @@ def write_RV(AIM_file, EVENT_file):
events = aim_event_input['Events']
# initialize the dictionary that will become EVENT.json
- event_file = {
- 'randomVariables': [],
- 'Events': []
- }
+ event_file = {'randomVariables': [], 'Events': []}
if len(events) > 1:
# if there is more than one event then we need random variables
# initialize the randomVariables part of the EVENT file
- event_file['randomVariables'].append({
- 'distribution': 'discrete_design_set_string',
- 'name': 'eventID',
- 'value': 'RV.eventID',
- 'elements': []
- })
+ event_file['randomVariables'].append(
+ {
+ 'distribution': 'discrete_design_set_string',
+ 'name': 'eventID',
+ 'value': 'RV.eventID',
+ 'elements': [],
+ }
+ )
# initialize the Events part of the EVENT file
- event_file['Events'].append({
- # 'type': 'Seismic', I am pretty sure we are not using this now
- # or we are using it incorrectly, so I removed it for the time being
- # and replaced it with the information that is actually used
- 'type': aim_event_input['type'],
- 'event_id': 'RV.eventID',
- 'unitScaleFactor': f_scale_units,
- 'units': input_unit_bases,
- 'data_dir': str(data_dir)
- })
+ event_file['Events'].append(
+ {
+ # 'type': 'Seismic', I am pretty sure we are not using this now
+ # or we are using it incorrectly, so I removed it for the time being
+ # and replaced it with the information that is actually used
+ 'type': aim_event_input['type'],
+ 'event_id': 'RV.eventID',
+ 'unitScaleFactor': f_scale_units,
+ 'units': input_unit_bases,
+ 'data_dir': str(data_dir),
+ }
+ )
# collect the filenames
- RV_elements = np.array(events).T[0].tolist()
- #for event in events:
+ RV_elements = np.array(events).T[0].tolist() # noqa: N806
+ # for event in events:
# #if event['EventClassification'] in ['Earthquake', 'Hurricane',
# # 'Flood']:
# #RV_elements.append(event['fileName'])
@@ -117,37 +121,45 @@ def write_RV(AIM_file, EVENT_file):
event_file['randomVariables'][0]['elements'] = RV_elements
else:
-
# if there is only one event, then we do not need random variables
# initialize the Events part of the EVENT file
- event_file['Events'].append({
- #'type': 'Seismic',
- 'type': aim_event_input['type'],
- 'event_id': events[0][0],
- 'unitScaleFactor': f_scale_units,
- 'units': input_unit_bases,
- 'data_dir': str(data_dir)
- })
+ event_file['Events'].append(
+ {
+ # 'type': 'Seismic',
+ 'type': aim_event_input['type'],
+ 'event_id': events[0][0],
+ 'unitScaleFactor': f_scale_units,
+ 'units': input_unit_bases,
+ 'data_dir': str(data_dir),
+ }
+ )
# if time histories are used, then load the first event
- # TODO: this is needed by some other code that should be fixed and this
+ # TODO: this is needed by some other code that should be fixed and this # noqa: TD002
# part should be removed.
if aim_event_input['type'] == 'timeHistory':
event_file['Events'][0].update(
- load_record(events[0][0], data_dir, empty=len(events) > 1))
- #, event_class = event_class))
+ load_record(events[0][0], data_dir, empty=len(events) > 1)
+ )
+ # , event_class = event_class))
# save the EVENT dictionary to a json file
- with open(EVENT_file, 'w', encoding="utf-8") as f:
+ with open(EVENT_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(event_file, f, indent=2)
-def load_record(file_name, data_dir, f_scale_user=1.0,
- f_scale_units={'ALL':1.0}, empty=False):
- #event_class=None):
- #just in case
+def load_record( # noqa: D103
+ file_name,
+ data_dir,
+ f_scale_user=1.0,
+ f_scale_units={'ALL': 1.0}, # noqa: B006
+ empty=False, # noqa: FBT002
+):
+ # event_class=None):
+
+ # just in case
data_dir = Path(data_dir)
# extract the file name (the part after "x" is only for bookkeeping)
@@ -155,87 +167,88 @@ def load_record(file_name, data_dir, f_scale_user=1.0,
# open the input event data file
# (SimCenter json format is assumed here)
- with open(data_dir / '{}.json'.format(file_name), 'r', encoding="utf-8") as f:
+ with open(data_dir / f'{file_name}.json', encoding='utf-8') as f: # noqa: PTH123
event_data = json.load(f)
# check if Event File is already in EVENT format
- isEventFile = False
- if event_data.__contains__('Events'):
+ isEventFile = False # noqa: N806
+ if event_data.__contains__('Events'): # noqa: PLC2801
event_dic = event_data['Events'][0]
- #event_dic['dT'] = event_data['Events'][0]['dT']
- #event_dic['numSteps'] = event_data['Events'][0]['numSteps']
- #event_dic['timeSeries'] = event_data['Events'][0]['timeSeries']
- #event_dic['pattern'] = event_data['Events'][0]['pattern']
- return event_dic
+ # event_dic['dT'] = event_data['Events'][0]['dT']
+ # event_dic['numSteps'] = event_data['Events'][0]['numSteps']
+ # event_dic['timeSeries'] = event_data['Events'][0]['timeSeries']
+ # event_dic['pattern'] = event_data['Events'][0]['pattern']
+ return event_dic # noqa: RET504
- isEventFile = True
+ isEventFile = True # noqa: N806
- else:
+ else: # noqa: RET505
# initialize the internal EVENT file structure
event_dic = {
- 'name': file_name,
- 'dT' : event_data['dT'],
- 'numSteps': len(event_data['data_x']),
- 'timeSeries': [],
- 'pattern': []
- }
+ 'name': file_name,
+ 'dT': event_data['dT'],
+ 'numSteps': len(event_data['data_x']),
+ 'timeSeries': [],
+ 'pattern': [],
+ }
if not isEventFile:
- f_scale_units = f_scale_units.get('TH_file',f_scale_units.get('ALL', None))
+ f_scale_units = f_scale_units.get('TH_file', f_scale_units.get('ALL', None))
if f_scale_units is None:
- raise ValueError("No unit scaling is defined for time history data.")
+ raise ValueError('No unit scaling is defined for time history data.') # noqa: EM101, TRY003
f_scale = float(f_scale_units) * float(f_scale_user)
# generate the event files
- # TODO: add 'z' later
+ # TODO: add 'z' later # noqa: TD002
for i, dir_ in enumerate(['x', 'y']):
-
- src_label = 'data_'+dir_
+ src_label = 'data_' + dir_
tar_label = src_label
# if there is data in the given direction in the input file
- if src_label in event_data.keys():
-
+ if src_label in event_data.keys(): # noqa: SIM118
# then load that data into the output EVENT file and scale it
- event_dic['timeSeries'].append({
- 'name': tar_label,
- 'type': 'Value',
- 'dT': event_data['dT'],
- 'data': list(np.array(event_data[src_label]) * f_scale)
- })
+ event_dic['timeSeries'].append(
+ {
+ 'name': tar_label,
+ 'type': 'Value',
+ 'dT': event_data['dT'],
+ 'data': list(np.array(event_data[src_label]) * f_scale),
+ }
+ )
# (empty is used when generating only random variables in write_RV)
if empty:
event_dic['timeSeries'][-1]['data'] = []
- # TODO: We will need to generalize this as soon as we add
+ # TODO: We will need to generalize this as soon as we add # noqa: TD002
# different types of time histories
# Assuming acceleration time history for now.
- event_dic['pattern'].append({
- 'type': 'UniformAcceleration',
- 'timeSeries': tar_label,
- 'dof': i + 1
- })
+ event_dic['pattern'].append(
+ {
+ 'type': 'UniformAcceleration',
+ 'timeSeries': tar_label,
+ 'dof': i + 1,
+ }
+ )
return event_dic
-def get_records(AIM_file, EVENT_file):
- """
- This function is only called if UQ is part of the workflow. That is, it is
- not called if we are using IMasEDP and skipping the response simulation.
- """
+def get_records(AIM_file, EVENT_file): # noqa: N803
+ """This function is only called if UQ is part of the workflow. That is, it is
+ not called if we are using IMasEDP and skipping the response simulation.
+ """ # noqa: D205, D401, D404
# load the AIM file
- with open(AIM_file, 'r', encoding="utf-8") as f:
- AIM_file = json.load(f)
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
+ AIM_file = json.load(f) # noqa: N806
# load the EVENT file
- with open(EVENT_file, 'r', encoding="utf-8") as f:
+ with open(EVENT_file, encoding='utf-8') as f: # noqa: PTH123
event_file = json.load(f)
- #event_class = AIM_file['Events']['Events'][0]['EventClassification']
+ # event_class = AIM_file['Events']['Events'][0]['EventClassification']
# get the event_id to identify which event to load
# (the event id might have been randomly generated earlier)
@@ -246,11 +259,11 @@ def get_records(AIM_file, EVENT_file):
# get the scale factor if a user specified it
- event_data = np.array(AIM_file["Events"][0]["Events"]).T
+ event_data = np.array(AIM_file['Events'][0]['Events']).T
event_loc = np.where(event_data == event_id)[1][0]
f_scale_user = event_data.T[event_loc][1]
- #f_scale_user = dict([(evt['fileName'], evt.get('factor', 1.0))
+ # f_scale_user = dict([(evt['fileName'], evt.get('factor', 1.0))
# for evt in AIM_file["Events"]["Events"]])[event_id]
# get the location of the event data
@@ -258,32 +271,34 @@ def get_records(AIM_file, EVENT_file):
# load the event data and scale it
event_file['Events'][0].update(
- load_record(event_id, data_dir, f_scale_user, f_scale_units)) #, event_class = event_class))
+ load_record(event_id, data_dir, f_scale_user, f_scale_units)
+ ) # , event_class = event_class))
# save the updated EVENT file
- with open(EVENT_file, 'w', encoding="utf-8") as f:
+ with open(EVENT_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(event_file, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser(
- "Read event input files (e.g. time history data, intensity measure "
- "fields and convert them to standard SimCenter EVENT.json files",
- allow_abbrev=False
+ 'Read event input files (e.g. time history data, intensity measure '
+ 'fields and convert them to standard SimCenter EVENT.json files',
+ allow_abbrev=False,
)
- parser.add_argument('--filenameAIM',
- help = "Name of the AIM file")
- parser.add_argument('--filenameEVENT',
- help = "Name of the EVENT file")
- parser.add_argument('--inputUnit',
- help = "Units of the data in the input file",
- default = None)
- parser.add_argument('--getRV',
- help = "If True, the application prepares on the RandomVariables in "
- "the EVENT file; otherwise it loads the appropriate EVENT data.",
+ parser.add_argument('--filenameAIM', help='Name of the AIM file')
+ parser.add_argument('--filenameEVENT', help='Name of the EVENT file')
+ parser.add_argument(
+ '--inputUnit', help='Units of the data in the input file', default=None
+ )
+ parser.add_argument(
+ '--getRV',
+ help='If True, the application prepares on the RandomVariables in '
+ 'the EVENT file; otherwise it loads the appropriate EVENT data.',
default=False,
- nargs='?', const=True)
+ nargs='?',
+ const=True,
+ )
args = parser.parse_args()
diff --git a/modules/createEVENT/SurroundedBuildingCFD/SurroundedBuildingCFD.py b/modules/createEVENT/SurroundedBuildingCFD/SurroundedBuildingCFD.py
index 556c65796..d48fbddb6 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/SurroundedBuildingCFD.py
+++ b/modules/createEVENT/SurroundedBuildingCFD/SurroundedBuildingCFD.py
@@ -1,103 +1,88 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- patternsArray = []
- windEventJson = {
- "type" : "Wind",
- "subtype": "SurroundedBuildingCFD",
- "pattern": patternsArray,
- "pressure": [],
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ patternsArray = [] # noqa: N806
+ windEventJson = { # noqa: N806
+ 'type': 'Wind',
+ 'subtype': 'SurroundedBuildingCFD',
+ 'pattern': patternsArray,
+ 'pressure': [],
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [windEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [windEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
- with open(eventFilePath, "w") as eventsFile:
+ with open(eventFilePath, 'w') as eventsFile: # noqa: N806, PLW1514, PTH123
json.dump(eventDict, eventsFile)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r') as BIMFile:
- bim = json.load(BIMFile)
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath) as BIMFile: # noqa: N806, PLW1514, PTH123
+ bim = json.load(BIMFile)
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using CFD
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by CFD")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
-
- #parsing arguments
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by CFD'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
+
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
+ if arguments.getRV == True: # noqa: E712
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- #write the event file
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ # write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/SurroundedBuildingCFD/foam_file_processor.py b/modules/createEVENT/SurroundedBuildingCFD/foam_file_processor.py
index 5d6e7a758..202998636 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/foam_file_processor.py
+++ b/modules/createEVENT/SurroundedBuildingCFD/foam_file_processor.py
@@ -1,71 +1,72 @@
-# This script contains functions for reading and writing
-# OpenFoam dictionaries and filses.
+# This script contains functions for reading and writing # noqa: CPY001, D100, INP001
+# OpenFoam dictionaries and filses.
#
-import numpy as np
import os
-def find_keyword_line(dict_lines, keyword):
-
+import numpy as np
+
+
+def find_keyword_line(dict_lines, keyword): # noqa: D103
start_line = -1
-
+
count = 0
for line in dict_lines:
- l = line.lstrip(" ")
-
+ l = line.lstrip(' ') # noqa: E741
+
if l.startswith(keyword):
start_line = count
break
-
- count += 1
+
+ count += 1 # noqa: SIM113
return start_line
-
+
+
def write_foam_field(field, file_name):
- """
- Writes a given numpy two dimensional array to OpenFOAM
- field format. It can handel the following formats:
+ """Writes a given numpy two dimensional array to OpenFOAM
+ field format. It can handle the following formats:
pointField,
vectorField,
tensorField,
symmTensorField
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D400, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name, "w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- line = "\n("
+ line = '\n('
for j in range(size[1]):
- line += " {:.6e}".format(field[i,j])
- line += ")"
+ line += f' {field[i, j]:.6e}'
+ line += ')'
foam_file.write(line)
-
- foam_file.write('\n);')
+
+ foam_file.write('\n);')
foam_file.close()
+
def write_scalar_field(field, file_name):
- """
- Writes a given one dimensional numpy array to OpenFOAM
+ """Writes a given one dimensional numpy array to OpenFOAM
scalar field format.
- """
- if os.path.exists(file_name):
- os.remove(file_name)
+ """ # noqa: D205, D401
+ if os.path.exists(file_name): # noqa: PTH110
+ os.remove(file_name) # noqa: PTH107
- foam_file = open(file_name,"w+")
+ foam_file = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
size = np.shape(field)
- foam_file.write("{}".format(size[0]))
+ foam_file.write(f'{size[0]}')
foam_file.write('\n(')
-
+
for i in range(size[0]):
- foam_file.write("\n {:.6e}".format(field.flatten()[i]))
-
- foam_file.write('\n);')
- foam_file.close()
\ No newline at end of file
+ foam_file.write(f'\n {field.flatten()[i]:.6e}')
+
+ foam_file.write('\n);')
+ foam_file.close()
diff --git a/modules/createEVENT/SurroundedBuildingCFD/post_process_output.py b/modules/createEVENT/SurroundedBuildingCFD/post_process_output.py
index 49b04f4d8..f11de48cf 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/post_process_output.py
+++ b/modules/createEVENT/SurroundedBuildingCFD/post_process_output.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016-2017, The Regents of the University of California (Regents).
+# Copyright (c) 2016-2017, The Regents of the University of California (Regents). # noqa: D100, INP001
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -38,126 +37,124 @@
#
-# This script reads OpenFOAM output and plot the characteristics of the
-# approaching wind. For now, it read and plots only velocity field data and
-# pressure on predicted set of probes.
+# This script reads OpenFOAM output and plot the characteristics of the
+# approaching wind. For now, it read and plots only velocity field data and
+# pressure on predicted set of probes.
#
-import sys
-import os
-import subprocess
+import argparse
import json
-import stat
+import os
import shutil
from pathlib import Path
+
import numpy as np
-import matplotlib.pyplot as plt
-import matplotlib.gridspec as gridspec
-from scipy import signal
-from scipy.interpolate import interp1d
-from scipy.interpolate import UnivariateSpline
-from scipy import stats
-import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots
-import argparse
+from scipy import signal
+def readPressureProbes(fileName): # noqa: N802, N803
+ """Created on Wed May 16 14:31:42 2018
-def readPressureProbes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
Reads pressure probe data from OpenFOAM and return the probe location, time, and the pressure
for each time step.
-
+
@author: Abiy
- """
+ """ # noqa: D400, D401
probes = []
p = []
- time = []
-
- with open(fileName, "r") as f:
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
- probes.append([float(line[3]),float(line[4]),float(line[5])])
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+ probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.split()
+ else:
+ line = line.split() # noqa: PLW2901
time.append(float(line[0]))
p_probe_i = np.zeros([len(probes)])
- for i in range(len(probes)):
+ for i in range(len(probes)):
p_probe_i[i] = float(line[i + 1])
p.append(p_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
p = np.asarray(p, dtype=np.float32)
-
+
return probes, time, p
+
def read_pressure_data(file_names):
- """
- This functions takes names of different OpenFOAM pressure measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might show up.
+ """This functions takes names of different OpenFOAM pressure measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might show up.
Parameters
----------
- *args
- List of file pashes of pressure data to be connected together.
+ *args
+ List of file pashes of pressure data to be connected together.
Returns
-------
time, pressure
Returns the pressure time and pressure data of the connected file.
- """
- no_files = len(file_names)
- connected_time = [] # Connected array of time
+
+ """ # noqa: D205, D401, D404
+ no_files = len(file_names)
+ connected_time = [] # Connected array of time
connected_p = [] # connected array of pressure.
time1 = []
- p1 = []
+ p1 = []
time2 = []
- p2 = []
- probes= []
-
- for i in range(no_files):
- probes, time2, p2 = readPressureProbes(file_names[i])
-
- if i==0:
+ p2 = []
+ probes = []
+
+ for i in range(no_files):
+ probes, time2, p2 = readPressureProbes(file_names[i])
+
+ if i == 0:
connected_time = time2
- connected_p = p2
+ connected_p = p2
else:
try:
index = np.where(time2 > time1[-1])[0][0]
- # index += 1
+ # index += 1
- except:
- # sys.exit('Fatal Error!: the pressure filese have time gap')
- index = 0 # Joint them even if they have a time gap
+ except: # noqa: E722
+ # sys.exit('Fatal Error!: the pressure files have time gap')
+ index = 0 # Joint them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
connected_p = np.concatenate((connected_p, p2[index:]))
time1 = time2
- p1 = p2
+ p1 = p2 # noqa: F841
return probes, connected_time, connected_p
class PressureData:
- """
- A class that holds a pressure data and performs the following operations:
- - mean and rms pressure coefficients
- - peak pressure coefficients
- """
- def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
- start_time=None, end_time=None):
+ """A class that holds a pressure data and performs the following operations:
+ - mean and rms pressure coefficients
+ - peak pressure coefficients
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ u_ref=0.0,
+ rho=1.25,
+ p_ref=0.0,
+ start_time=None,
+ end_time=None,
+ ):
self.path = path
self.u_ref = u_ref
self.p_ref = p_ref
@@ -168,375 +165,369 @@ def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0,
self.__set_time()
self.Nt = len(self.time)
self.T = self.time[-1]
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
- self.dt = np.mean(np.diff(self.time))
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
+ self.dt = np.mean(np.diff(self.time))
self.probe_count = np.shape(self.probes)[0]
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
# print(sorted_index)
# print("\tTime directories: %s" %(time_names))
- file_names = []
-
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]],'p')
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'p') # noqa: PTH118
file_names.append(file_name)
-
+
# print(file_names)
self.probes, self.time, self.p = read_pressure_data(file_names)
- self.p = self.rho*np.transpose(self.p) # OpenFOAM gives p/rho
+ self.p = self.rho * np.transpose(self.p) # OpenFOAM gives p/rho
# self.p = np.transpose(self.p) # OpenFOAM gives p/rho
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
- def __set_time (self):
- if(self.start_time != None):
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
# self.cp = self.cp[:,start_index:]
- try:
- self.p = self.p[:,start_index:]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, start_index:]
+ except: # noqa: S110, E722
pass
-
- if(self.end_time != None):
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
# self.cp = self.cp[:,:end_index]
- try:
- self.p = self.p[:,:end_index]
- except:
+ try: # noqa: SIM105
+ self.p = self.p[:, :end_index]
+ except: # noqa: S110, E722
pass
-
-
+def von_karman_spectrum(f, Uav, I, L, comp=0): # noqa: N803, E741, D103
+ psd = np.zeros(len(f)) # noqa: F841
-def von_karman_spectrum(f, Uav, I, L, comp=0):
-
- psd = np.zeros(len(f))
+ if comp == 0:
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ / np.power(1.0 + 70.8 * np.power(f * L / Uav, 2.0), 5.0 / 6.0)
+ )
- if comp==0:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)/np.power(1.0 + 70.8*np.power(f*L/ Uav, 2.0), 5.0 / 6.0)
+ if comp == 1 or comp == 2: # noqa: RET503, PLR1714, PLR2004
+ return (
+ 4.0
+ * np.power(I * Uav, 2.0)
+ * (L / Uav)
+ * (1.0 + 188.4 * np.power(2.0 * f * L / Uav, 2.0))
+ / np.power(1.0 + 70.8 * np.power(2.0 * f * L / Uav, 2.0), 11.0 / 6.0)
+ )
- if comp==1 or comp==2:
- return 4.0*np.power(I*Uav, 2.0)*(L/Uav)*(1.0 + 188.4*np.power(2.0*f*L/Uav, 2.0)) /np.power(1.0 + 70.8*np.power(2.0*f*L/Uav, 2.0), 11.0/6.0)
def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
def write_open_foam_vector_field(p, file_name):
-
- """
- Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
- format.
-
- """
- f = open(file_name,"w+")
- f.write('%d' % len(p[:,2]))
+ """Writes a given vector-field (n x 3) array to OpenFOAM 'vectorField'
+ format.
+
+ """ # noqa: D205, D401
+ f = open(file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ f.write('%d' % len(p[:, 2]))
f.write('\n(')
- for i in range(len(p[:,2])):
- f.write('\n ({:.7e} {:.7e} {:.7e})'.format(p[i,0], p[i,1], p[i,2]))
-
- f.write('\n);')
- f.close()
+ for i in range(len(p[:, 2])):
+ f.write(f'\n ({p[i, 0]:.7e} {p[i, 1]:.7e} {p[i, 2]:.7e})')
+ f.write('\n);')
+ f.close()
-def read_openFoam_scalar_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_scalar_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ sField = [] # noqa: N806
- sField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
itrf = iter(f)
next(itrf)
for line in itrf:
- if line.startswith('(') or line.startswith(')'):
- continue
- else:
- line = line.split()
+ if line.startswith('(') or line.startswith(')'): # noqa: PIE810
+ continue
+ else: # noqa: RET507
+ line = line.split() # noqa: PLW2901
sField.append(float(line[0]))
-
- sField = np.asarray(sField, dtype=np.float32)
-
- return sField
+ sField = np.asarray(sField, dtype=np.float32) # noqa: N806
+
+ return sField # noqa: RET504
-def read_openFoam_vector_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+def read_openFoam_vector_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
- if len(line) < 3:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
+ if len(line) < 3: # noqa: PLR2004
continue
-
- vField.append([float(line[0]),float(line[1]),float(line[2])])
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField.append([float(line[0]), float(line[1]), float(line[2])])
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 9
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
-def read_openFoam_symmetric_tensor_field(file_name):
-
- """
- Reads a given vectorField OpenFOAM into numpy (n x 3) array format.
- """
+ return vField # noqa: RET504
+
+
+def read_openFoam_symmetric_tensor_field(file_name): # noqa: N802
+ """Reads a given vectorField OpenFOAM into numpy (n x 3) array format.""" # noqa: D401
+ vField = [] # noqa: N806
- vField = []
-
row_count = 6
- with open(file_name, "r") as f:
+ with open(file_name) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('('):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
-
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
+
if len(line) < row_count:
continue
-
+
row = np.zeros(row_count)
for i in range(row_count):
row[i] = float(line[i])
-
+
vField.append(row)
-
- vField = np.asarray(vField, dtype=np.float32)
-
- return vField
+ vField = np.asarray(vField, dtype=np.float32) # noqa: N806
+ return vField # noqa: RET504
def read_velocity_data(path):
- """
- This functions takes names of different OpenFOAM velocity measurements and connect
- them into one file removing overlaps if any. All the probes must be in the same
- location, otherwise an error might showup.
+ """This functions takes names of different OpenFOAM velocity measurements and connect
+ them into one file removing overlaps if any. All the probes must be in the same
+ location, otherwise an error might showup.
Parameters
----------
- *args
- List of file paths of velocity data to be connected together.
+ *args
+ List of file paths of velocity data to be connected together.
Returns
-------
time, pressure
Returns the velocity time and velocity data of the connected file.
- """
- num_files = len(path)
- connected_time = [] # Connected array of time
- connected_U = [] # connected array of pressure.
+ """ # noqa: D205, D401, D404
+ num_files = len(path)
+ connected_time = [] # Connected array of time
+ connected_U = [] # connected array of pressure. # noqa: N806
- time1 = []
- U1 = []
+ time1 = []
+ U1 = [] # noqa: N806
time2 = []
- U2 = []
+ U2 = [] # noqa: N806
probes = []
-
- for i in range(num_files):
- probes, time2, U2 = read_velocity_probes(path[i])
- if i != 0:
+
+ for i in range(num_files):
+ probes, time2, U2 = read_velocity_probes(path[i]) # noqa: N806
+ if i != 0:
try:
index = np.where(time2 > time1[-1])[0][0]
- except:
+ except: # noqa: E722
# sys.exit('Fatal Error!: the pressure files have time gap')
- index = 0 # Join them even if they have a time gap
+ index = 0 # Join them even if they have a time gap
connected_time = np.concatenate((connected_time, time2[index:]))
- connected_U = np.concatenate((connected_U, U2[index:]))
+ connected_U = np.concatenate((connected_U, U2[index:])) # noqa: N806
else:
connected_time = time2
- connected_U = U2
+ connected_U = U2 # noqa: N806
time1 = time2
- U1 = U2
+ U1 = U2 # noqa: N806, F841
shape = np.shape(connected_U)
- U = np.zeros((shape[1], shape[2], shape[0]))
-
+ U = np.zeros((shape[1], shape[2], shape[0])) # noqa: N806
+
for i in range(shape[1]):
for j in range(shape[2]):
- U[i,j,:] = connected_U[:,i,j]
+ U[i, j, :] = connected_U[:, i, j]
return probes, connected_time, U
-def read_velocity_probes(fileName):
- """
- Created on Wed May 16 14:31:42 2018
-
- Reads velocity probe data from OpenFOAM and return the probe location, time,
+
+def read_velocity_probes(fileName): # noqa: N803
+ """Created on Wed May 16 14:31:42 2018
+
+ Reads velocity probe data from OpenFOAM and return the probe location, time,
and the velocity vector for each time step.
- """
+ """ # noqa: D400, D401
probes = []
- U = []
- time = []
-
- with open(fileName, "r") as f:
+ U = [] # noqa: N806
+ time = []
+
+ with open(fileName) as f: # noqa: PLW1514, PTH123
for line in f:
if line.startswith('#'):
if line.startswith('# Probe'):
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
probes.append([float(line[3]), float(line[4]), float(line[5])])
else:
continue
- else:
- line = line.replace('(','')
- line = line.replace(')','')
- line = line.split()
+ else:
+ line = line.replace('(', '') # noqa: PLW2901
+ line = line.replace(')', '') # noqa: PLW2901
+ line = line.split() # noqa: PLW2901
try:
time.append(float(line[0]))
- except:
+ except: # noqa: S112, E722
continue
- u_probe_i = np.zeros([len(probes),3])
- for i in range(len(probes)):
- u_probe_i[i,:] = [float(line[3*i + 1]), float(line[3*i + 2]), float(line[3*i + 3])]
+ u_probe_i = np.zeros([len(probes), 3])
+ for i in range(len(probes)):
+ u_probe_i[i, :] = [
+ float(line[3 * i + 1]),
+ float(line[3 * i + 2]),
+ float(line[3 * i + 3]),
+ ]
U.append(u_probe_i)
-
+
probes = np.asarray(probes, dtype=np.float32)
time = np.asarray(time, dtype=np.float32)
- U = np.asarray(U, dtype=np.float32)
+ U = np.asarray(U, dtype=np.float32) # noqa: N806
return probes, time, U
+
def calculate_length_scale(u, uav, dt, min_corr=0.0):
-
- """
- Calculates the length scale of a velocity time history given.
-
- """
-
- u = u - np.mean(u)
-
- corr = signal.correlate(u, u, mode='full')
-
- u_std = np.std(u)
-
- corr = corr[int(len(corr)/2):]/(u_std**2*len(u))
-
- loc = np.argmax(corr < min_corr)
-
- corr = corr[:loc]
-
- L = uav*np.trapz(corr, dx=dt)
-
- return L
+ """Calculates the length scale of a velocity time history given.""" # noqa: D401
+ u = u - np.mean(u) # noqa: PLR6104
+
+ corr = signal.correlate(u, u, mode='full')
+
+ u_std = np.std(u)
+
+ corr = corr[int(len(corr) / 2) :] / (u_std**2 * len(u))
+
+ loc = np.argmax(corr < min_corr)
+
+ corr = corr[:loc]
+
+ L = uav * np.trapz(corr, dx=dt) # noqa: NPY201, N806
+
+ return L # noqa: RET504
-def psd(x, dt, nseg):
- """
- Calculates the power spectral density of a given signal using the welch
- method.
+
+def psd(x, dt, nseg): # noqa: F811
+ """Calculates the power spectral density of a given signal using the welch
+ method.
Parameters
----------
- x
- The time history of the signal.
+ x
+ The time history of the signal.
dt
- The time step .
+ The time step .
nseg
- The the number of segments to average the time series.
+ The the number of segments to average the time series.
Returns
-------
freq, spectra
Returns the frequency and spectra of the signal
-
- """
+
+ """ # noqa: D205, D401
x_no_mean = x - np.mean(x)
- freq, spectra = signal.welch(x_no_mean, fs=1.0/dt, nperseg=len(x_no_mean)/nseg)
-
+ freq, spectra = signal.welch(
+ x_no_mean, fs=1.0 / dt, nperseg=len(x_no_mean) / nseg
+ )
+
return freq[1:], spectra[1:]
+
class VelocityData:
- """
- A class that holds a velocity data and performs the following operations:
- - mean velocity profile
- - turbulence intensity profiles
- - integral scale of turbulence profiles
- """
- def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
- start_time=None, end_time=None, resample_dt = None):
+ """A class that holds a velocity data and performs the following operations:
+ - mean velocity profile
+ - turbulence intensity profiles
+ - integral scale of turbulence profiles
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ path,
+ sampling_rate=400,
+ filter_data=False, # noqa: FBT002
+ filter_freq=400,
+ start_time=None,
+ end_time=None,
+ resample_dt=None,
+ ):
self.path = path
self.sampling_rate = sampling_rate
self.filter_data = filter_data
@@ -545,502 +536,850 @@ def __init__(self, path,sampling_rate=400, filter_data=False, filter_freq=400,
self.end_time = end_time
self.component_count = 3
self.resample_dt = resample_dt
- self.__read_cfd_data()
+ self.__read_cfd_data()
self.__set_time()
self.Nt = len(self.time)
- self.T = self.time[-1]
- self.dt = np.mean(np.diff(self.time))
- self.f_max = 1.0/(2.0*self.dt)
+ self.T = self.time[-1]
+ self.dt = np.mean(np.diff(self.time))
+ self.f_max = 1.0 / (2.0 * self.dt)
self.probe_count = np.shape(self.probes)[0]
self.Np = self.probe_count
- self.z = self.probes[:,2]
- self.y = self.probes[:,1]
- self.x = self.probes[:,0]
+ self.z = self.probes[:, 2]
+ self.y = self.probes[:, 1]
+ self.x = self.probes[:, 0]
self.__filter_signal()
self.__calculate_all()
- def __read_cfd_data (self):
- if os.path.isdir(self.path):
- print("Reading from path : %s" % (self.path))
+ def __read_cfd_data(self):
+ if os.path.isdir(self.path): # noqa: PTH112
+ print('Reading from path : %s' % (self.path)) # noqa: T201, UP031
time_names = os.listdir(self.path)
- sorted_index = np.argsort(np.float_(time_names)).tolist()
- file_names = []
-
+ sorted_index = np.argsort(np.float64(time_names)).tolist()
+ file_names = []
+
for i in range(len(sorted_index)):
- file_name = os.path.join(self.path, time_names[sorted_index[i]], "U")
- file_names.append( file_name)
-
-
- self.probes, self.time, self.U = read_velocity_data(file_names)
-
- #Distance along the path of the profile
+ file_name = os.path.join(self.path, time_names[sorted_index[i]], 'U') # noqa: PTH118
+ file_names.append(file_name)
+
+ self.probes, self.time, self.U = read_velocity_data(file_names)
+
+ # Distance along the path of the profile
n_points = np.shape(self.probes)[0]
self.dist = np.zeros(n_points)
- for i in range(n_points-1):
- self.dist[i + 1] = self.dist[i] + np.linalg.norm(self.probes[i + 1, :] - self.probes[i, :])
-
+ for i in range(n_points - 1):
+ self.dist[i + 1] = self.dist[i] + np.linalg.norm(
+ self.probes[i + 1, :] - self.probes[i, :]
+ )
# Coefficient of variation
- cv = np.std(np.diff(self.time))/np.mean(np.diff(self.time))
-
- if cv > 1.0e-4:
+ cv = np.std(np.diff(self.time)) / np.mean(np.diff(self.time))
+
+ if cv > 1.0e-4: # noqa: PLR2004
self.__adjust_time_step()
else:
- print("Cannot find the file path: %s" % (self.path))
-
-
-
- def __adjust_time_step (self):
-
- if self.resample_dt == None:
- dt = np.mean(np.diff(self.time))
- else:
- dt = self.resample_dt
-
+ print('Cannot find the file path: %s' % (self.path)) # noqa: T201, UP031
+
+ def __adjust_time_step(self):
+ if self.resample_dt == None: # noqa: E711
+ dt = np.mean(np.diff(self.time))
+ else:
+ dt = self.resample_dt
+
time = np.arange(start=self.time[0], stop=self.time[-1], step=dt)
-
+
shape = np.shape(self.U)
-
- U = np.zeros((shape[0],shape[1],len(time)))
+
+ U = np.zeros((shape[0], shape[1], len(time))) # noqa: N806
for i in range(shape[0]):
for j in range(shape[1]):
U[i, j, :] = np.interp(time, self.time, self.U[i, j, :])
-
- self.time = time
- self.U = U
-
+ self.time = time
+ self.U = U
+
def __filter_signal(self):
if self.filter_data:
- low_pass = signal.butter(10, self.filter_freq,'lowpass', fs=self.sampling_rate, output='sos')
+ low_pass = signal.butter(
+ 10, self.filter_freq, 'lowpass', fs=self.sampling_rate, output='sos'
+ )
for i in range(self.probe_count):
for j in range(self.component_count):
- self.U[i,j,:] = signal.sosfilt(low_pass, self.U[i,j,:])
+ self.U[i, j, :] = signal.sosfilt(low_pass, self.U[i, j, :])
- def __set_time (self):
- if(self.start_time != None):
+ def __set_time(self):
+ if self.start_time != None: # noqa: E711
start_index = int(np.argmax(self.time > self.start_time))
self.time = self.time[start_index:]
- self.U = self.U[:,:,start_index:]
-
- if(self.end_time != None):
+ self.U = self.U[:, :, start_index:]
+
+ if self.end_time != None: # noqa: E711
end_index = int(np.argmax(self.time > self.end_time))
self.time = self.time[:end_index]
- self.U = self.U[:,:,:end_index]
+ self.U = self.U[:, :, :end_index]
def __calculate_all(self):
-
self.u = np.zeros((self.probe_count, self.component_count, self.Nt))
- #Calculate the mean velocity profile.
+ # Calculate the mean velocity profile.
- self.Uav = np.mean(self.U[:,0,:], axis=1)
+ self.Uav = np.mean(self.U[:, 0, :], axis=1)
+
+ # Calculate the turbulence intensity.
+ self.I = np.std(self.U, axis=2) # gets the standard deviation
+ self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
+ self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
+ self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
- #Calculate the turbulence intensity.
- self.I = np.std(self.U, axis=2) # gets the standard deviation
- self.Ru = np.var(self.U[:, 0, :], axis=1) # gets reynolds stress
- self.Rv = np.var(self.U[:, 1, :], axis=1) # gets reynolds stress
- self.Rw = np.var(self.U[:, 2, :], axis=1) # gets reynolds stress
-
for i in range(self.component_count):
- self.I[:,i] = self.I[:,i]/self.Uav
-
-
- #Calculate the length scale profiles.
+ self.I[:, i] = self.I[:, i] / self.Uav # noqa: PLR6104
+
+ # Calculate the length scale profiles.
self.L = np.zeros((self.probe_count, self.component_count))
for i in range(self.probe_count):
for j in range(self.component_count):
- self.u[i,j,:] = self.U[i,j,:] - np.mean(self.U[i,j,:])
- self.L[i,j] = calculate_length_scale(self.u[i,j,:], self.Uav[i], self.dt, 0.05)
+ self.u[i, j, :] = self.U[i, j, :] - np.mean(self.U[i, j, :])
+ self.L[i, j] = calculate_length_scale(
+ self.u[i, j, :], self.Uav[i], self.dt, 0.05
+ )
-
- #Calculate the shear stress profiles.
+ # Calculate the shear stress profiles.
self.uv_bar = np.zeros(self.Np)
self.uw_bar = np.zeros(self.Np)
-
+
for i in range(self.Np):
- self.uv_bar[i] = np.cov(self.U[i,0,:], self.U[i,1,:])[0,1]
- self.uw_bar[i] = np.cov(self.U[i,0,:], self.U[i,2,:])[0,1]
+ self.uv_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 1, :])[0, 1]
+ self.uw_bar[i] = np.cov(self.U[i, 0, :], self.U[i, 2, :])[0, 1]
+
+ def get_Uav(self, z): # noqa: N802, D102
+ from scipy import interpolate # noqa: PLC0415
- def get_Uav(self, z):
- from scipy import interpolate
-
f = interpolate.interp1d(self.z, self.Uav)
-
- return f(z)
+ return f(z)
def copy_vtk_planes_and_order(input_path, output_path, field):
- """
- This code reads VTK sample plane data from OpenFOAM case directory and
- copies them into other directory with all vtks files ordered in their
- respective time sequence in one directory.
-
+ """This code reads VTK sample plane data from OpenFOAM case directory and
+ copies them into other directory with all vtks files ordered in their
+ respective time sequence in one directory.
+
input_path: path of the vtk files in the postProcessing directory
ouput_path: path to write the vtk files in order
- """
+ """ # noqa: D205, D401, D404
+ if not os.path.isdir(input_path): # noqa: PTH112
+ print(f'Cannot find the path for: {input_path}') # noqa: T201
+ return
- if not os.path.isdir(input_path):
- print("Cannot find the path for: {}".format(input_path))
- return
-
- if not os.path.isdir(output_path):
- print("Cannot find the path for: {}".format(output_path))
- return
-
-
- print("Reading from path: {}".format(input_path))
+ if not os.path.isdir(output_path): # noqa: PTH112
+ print(f'Cannot find the path for: {output_path}') # noqa: T201
+ return
+
+ print(f'Reading from path: {input_path}') # noqa: T201
time_names = os.listdir(input_path)
- times = np.float_(time_names)
+ times = np.float64(time_names)
sorted_index = np.argsort(times).tolist()
-
- n_times = len(times)
-
- print("\tNumber of time direcories: {} ".format(n_times))
- print("\tTime step: {:.4f} s".format(np.mean(np.diff(times))))
- print("\tTotal duration: {:.4f} s".format(times[sorted_index[-1]] - times[sorted_index[0]]))
-
-
+
+ n_times = len(times)
+
+ print(f'\tNumber of time directories: {n_times} ') # noqa: T201
+ print(f'\tTime step: {np.mean(np.diff(times)):.4f} s') # noqa: T201
+ print( # noqa: T201
+ f'\tTotal duration: {times[sorted_index[-1]] - times[sorted_index[0]]:.4f} s'
+ )
+
for i in range(n_times):
index = sorted_index[i]
- pathi = os.path.join(input_path, time_names[index])
+ pathi = os.path.join(input_path, time_names[index]) # noqa: PTH118
os.listdir(pathi)
-
- new_name = "{}_T{:04d}.vtk".format(field, i + 1)
+
+ new_name = f'{field}_T{i + 1:04d}.vtk'
for f in os.listdir(pathi):
- if f.endswith(".vtk"):
- new_path = os.path.join(output_path, new_name)
- old_path = os.path.join(pathi, f)
+ if f.endswith('.vtk'):
+ new_path = os.path.join(output_path, new_name) # noqa: PTH118
+ old_path = os.path.join(pathi, f) # noqa: PTH118
shutil.copyfile(old_path, new_path)
- print("Copied path: {}".format(old_path))
+ print(f'Copied path: {old_path}') # noqa: T201
+
-def plot_wind_profiles_and_spectra(case_path, output_path, prof_name):
-
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
+def plot_wind_profiles_and_spectra(case_path, output_path, prof_name): # noqa: D103
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
-
+
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
- ref_h = wc_data["referenceHeight"]
-
+ wc_data = json_data['windCharacteristics']
+
+ ref_h = wc_data['referenceHeight']
+
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
prof = VelocityData(prof_path, start_time=None, end_time=None)
-
- #Create wind profile data profile z, Uav, Iu ..., Lu ...,
+ # Create wind profile data profile z, Uav, Iu ..., Lu ...,
prof_np = np.zeros((len(prof.z), 9))
- prof_np[:,0] = prof.z
- prof_np[:,1] = prof.Uav
- prof_np[:,2] = prof.I[:,0]
- prof_np[:,3] = prof.I[:,1]
- prof_np[:,4] = prof.I[:,2]
- prof_np[:,5] = prof.uw_bar
- prof_np[:,6] = prof.L[:,0]
- prof_np[:,7] = prof.L[:,1]
- prof_np[:,8] = prof.L[:,2]
-
-
-
- #Read the target wind profile data
- tar_path = os.path.join(case_path, "constant", "boundaryData", "inlet")
-
- tar_p = read_openFoam_vector_field(os.path.join(tar_path, "points"))
- tar_U = read_openFoam_scalar_field(os.path.join(tar_path, "U"))
- tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, "R"))
- tar_L = read_openFoam_tensor_field(os.path.join(tar_path, "L"))
-
- tar_U_ref = np.interp(ref_h, tar_p[:,2], tar_U)
-
-
- tar_Iu = np.sqrt(tar_R[:, 0])/tar_U
- tar_Iv = np.sqrt(tar_R[:, 3])/tar_U
- tar_Iw = np.sqrt(tar_R[:, 5])/tar_U
+ prof_np[:, 0] = prof.z
+ prof_np[:, 1] = prof.Uav
+ prof_np[:, 2] = prof.I[:, 0]
+ prof_np[:, 3] = prof.I[:, 1]
+ prof_np[:, 4] = prof.I[:, 2]
+ prof_np[:, 5] = prof.uw_bar
+ prof_np[:, 6] = prof.L[:, 0]
+ prof_np[:, 7] = prof.L[:, 1]
+ prof_np[:, 8] = prof.L[:, 2]
+
+ # Read the target wind profile data
+ tar_path = os.path.join(case_path, 'constant', 'boundaryData', 'inlet') # noqa: PTH118
+
+ tar_p = read_openFoam_vector_field(os.path.join(tar_path, 'points')) # noqa: PTH118
+ tar_U = read_openFoam_scalar_field(os.path.join(tar_path, 'U')) # noqa: PTH118, N806
+ tar_R = read_openFoam_symmetric_tensor_field(os.path.join(tar_path, 'R')) # noqa: PTH118, N806
+ tar_L = read_openFoam_tensor_field(os.path.join(tar_path, 'L')) # noqa: PTH118, N806
+
+ tar_U_ref = np.interp(ref_h, tar_p[:, 2], tar_U) # noqa: N806, F841
+
+ tar_Iu = np.sqrt(tar_R[:, 0]) / tar_U # noqa: N806
+ tar_Iv = np.sqrt(tar_R[:, 3]) / tar_U # noqa: N806
+ tar_Iw = np.sqrt(tar_R[:, 5]) / tar_U # noqa: N806
tar_uw = tar_R[:, 2]
-
- tar_Lu = tar_L[:, 0]
- tar_Lv = tar_L[:, 3]
- tar_Lw = tar_L[:, 6]
-
- tar_I = np.zeros((3, len(tar_Iu)))
- tar_L = np.zeros((3, len(tar_Lu)))
-
- tar_I[0,:] = tar_Iu
- tar_I[1,:] = tar_Iv
- tar_I[2,:] = tar_Iw
-
- tar_L[0,:] = tar_Lu
- tar_L[1,:] = tar_Lv
- tar_L[2,:] = tar_Lw
-
-
- subplot_titles = ("Mean Velocity", "Turbulence Intensity, Iu", "Turbulence Intensity, Iv", "Turbulence Intensity, Iw",
- "Shear Stress", "Length Scale, Lu", "Length Scale, Lv", "Length Scale, Lw")
-
- fig = make_subplots(rows=2, cols=4, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
-
- fig.add_trace(go.Scatter(x=tar_U, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,1], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="$U_{av} [m/s]$", range=[0, 1.25*np.max(prof_np[:,1])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+
+ tar_Lu = tar_L[:, 0] # noqa: N806
+ tar_Lv = tar_L[:, 3] # noqa: N806
+ tar_Lw = tar_L[:, 6] # noqa: N806
+
+ tar_I = np.zeros((3, len(tar_Iu))) # noqa: N806
+ tar_L = np.zeros((3, len(tar_Lu))) # noqa: N806
+
+ tar_I[0, :] = tar_Iu
+ tar_I[1, :] = tar_Iv
+ tar_I[2, :] = tar_Iw
+
+ tar_L[0, :] = tar_Lu
+ tar_L[1, :] = tar_Lv
+ tar_L[2, :] = tar_Lw
+
+ subplot_titles = (
+ 'Mean Velocity',
+ 'Turbulence Intensity, Iu',
+ 'Turbulence Intensity, Iv',
+ 'Turbulence Intensity, Iw',
+ 'Shear Stress',
+ 'Length Scale, Lu',
+ 'Length Scale, Lv',
+ 'Length Scale, Lw',
+ )
+
+ fig = make_subplots(
+ rows=2,
+ cols=4,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ fig.add_trace(
+ go.Scatter(
+ x=tar_U,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 1],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='$U_{av} [m/s]$',
+ range=[0, 1.25 * np.max(prof_np[:, 1])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
# Turbulence Intensity Iu
- fig.add_trace(go.Scatter(x=tar_Iu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,2], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=2)
- fig.update_xaxes(title_text="$I_{u}$", range=[0, 1.3*np.max(prof_np[:,2])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=2)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 2],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$I_{u}$',
+ range=[0, 1.3 * np.max(prof_np[:, 2])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=2,
+ )
# Turbulence Intensity Iv
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,3], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=3)
- fig.update_xaxes(title_text="$I_{v}$", range=[0, 1.3*np.max(prof_np[:,3])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=3)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 3],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$I_{v}$',
+ range=[0, 1.3 * np.max(prof_np[:, 3])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=3,
+ )
# Turbulence Intensity Iw
- fig.add_trace(go.Scatter(x=tar_Iw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=1, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,4], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=4)
- fig.update_xaxes(title_text="$I_{w}$", range=[0, 1.3*np.max(prof_np[:,4])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=4)
-
-
- # Shear Stress Profile
- fig.add_trace(go.Scatter(x=tar_uw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=1)
- fig.add_trace(go.Scatter(x=prof_np[:,5], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=1)
- fig.update_xaxes(title_text=r'$\overline{uw}$', range=[1.3*np.min(prof_np[:,5]), 1.5*np.max(prof_np[:,5])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
- fig.update_yaxes(title_text="$z [m]$", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Iw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=1,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 4],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$I_{w}$',
+ range=[0, 1.3 * np.max(prof_np[:, 4])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=4,
+ )
+
+ # Shear Stress Profile
+ fig.add_trace(
+ go.Scatter(
+ x=tar_uw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=1,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 5],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=1,
+ )
+ fig.update_xaxes(
+ title_text=r'$\overline{uw}$',
+ range=[1.3 * np.min(prof_np[:, 5]), 1.5 * np.max(prof_np[:, 5])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text='$z [m]$',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=1,
+ )
# Length scale Lu
- fig.add_trace(go.Scatter(x=tar_Lu, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=2)
- fig.add_trace(go.Scatter(x=prof_np[:,6], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=2)
- fig.update_xaxes(title_text="$L_{u} [m]$", range=[0, 1.5*np.max(prof_np[:,6])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=2)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lu,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=2,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 6],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=2,
+ )
+ fig.update_xaxes(
+ title_text='$L_{u} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 6])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=2,
+ )
# Length scale Lv
- fig.add_trace(go.Scatter(x=tar_Lv, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=3)
- fig.add_trace(go.Scatter(x=prof_np[:,7], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=3)
- fig.update_xaxes(title_text="$L_{v} [m]$", range=[0, 1.5*np.max(prof_np[:,7])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=3)
-
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lv,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=3,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 7],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=3,
+ )
+ fig.update_xaxes(
+ title_text='$L_{v} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 7])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=3,
+ )
# Length scale Lw
- fig.add_trace(go.Scatter(x=tar_Lw, y=tar_p[:,2], line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target', ), row=2, col=4)
- fig.add_trace(go.Scatter(x=prof_np[:,8], y=prof_np[:,0], line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=2, col=4)
- fig.update_xaxes(title_text="$L_{w} [m]$", range=[0, 1.5*np.max(prof_np[:,8])],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
- fig.update_yaxes(title_text="", range=[0, 1.01*np.max(prof_np[:,0])], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=2, col=4)
-
-
- fig.update_layout(height=850, width=1200, title_text="",showlegend=False)
+ fig.add_trace(
+ go.Scatter(
+ x=tar_Lw,
+ y=tar_p[:, 2],
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target',
+ ),
+ row=2,
+ col=4,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=prof_np[:, 8],
+ y=prof_np[:, 0],
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=2,
+ col=4,
+ )
+ fig.update_xaxes(
+ title_text='$L_{w} [m]$',
+ range=[0, 1.5 * np.max(prof_np[:, 8])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+ fig.update_yaxes(
+ title_text='',
+ range=[0, 1.01 * np.max(prof_np[:, 0])],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=2,
+ col=4,
+ )
+
+ fig.update_layout(height=850, width=1200, title_text='', showlegend=False)
fig.show()
- fig.write_html(os.path.join(output_path, prof_name + ".html"), include_mathjax="cdn")
-
+ fig.write_html(
+ os.path.join(output_path, prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
+ # Plot the spectra at four locations
- #Plot the spectra at four locations
-
- spec_h = ref_h*np.array([0.25, 0.50, 1.00, 2.00])
+ spec_h = ref_h * np.array([0.25, 0.50, 1.00, 2.00])
n_spec = len(spec_h)
nseg = 5
ncomp = 3
- ylabel = ['$fS_{u}/\sigma^2_{u}$',
- '$fS_{v}/\sigma^2_{v}$',
- '$fS_{w}/\sigma^2_{w}$']
-
+ ylabel = [
+ r'$fS_{u}/\sigma^2_{u}$',
+ r'$fS_{v}/\sigma^2_{v}$',
+ r'$fS_{w}/\sigma^2_{w}$',
+ ]
for i in range(n_spec):
- loc = np.argmin(np.abs(prof_np[:,0] - spec_h[i]))
-
- loc_tar = np.argmin(np.abs(tar_p[:,2] - spec_h[i]))
-
- subplot_titles = ("u-component", "v-component", "w-component")
- fig = make_subplots(rows=1, cols=3, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
-
- U_ref_prof = np.interp(spec_h[i], prof_np[:,0], prof_np[:,1])
- U_ref_tar = np.interp(spec_h[i], tar_p[:,2], tar_U)
-
- #Plot each component
+ loc = np.argmin(np.abs(prof_np[:, 0] - spec_h[i]))
+
+ loc_tar = np.argmin(np.abs(tar_p[:, 2] - spec_h[i]))
+
+ subplot_titles = ('u-component', 'v-component', 'w-component')
+ fig = make_subplots(
+ rows=1,
+ cols=3,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
+
+ U_ref_prof = np.interp(spec_h[i], prof_np[:, 0], prof_np[:, 1]) # noqa: N806
+ U_ref_tar = np.interp(spec_h[i], tar_p[:, 2], tar_U) # noqa: N806
+
+ # Plot each component
for j in range(ncomp):
- freq, spec = psd(prof.u[loc, j,:], prof.dt, nseg)
-
- f_min = np.min(freq)/1.5
- f_max = 1.5*np.max(freq)
-
- u_var = np.var(prof.u[loc, j,:])
-
- spec = freq*spec/u_var
- freq = freq*spec_h[i]/U_ref_prof
-
-
- tar_Iz = tar_I[j,loc_tar]
- tar_Lz = tar_L[j,loc_tar]
-
-
- vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
- vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
-
- vonk_psd = vonk_f*vonk_psd/np.square(U_ref_tar*tar_Iz)
- vonk_f = vonk_f*spec_h[i]/U_ref_tar
-
-
- fig.add_trace(go.Scatter(x=freq, y=spec, line=dict(color='firebrick', width=1.5),
- mode='lines', name=prof_name, ), row=1, col=1+j)
- fig.add_trace(go.Scatter(x=vonk_f, y=vonk_psd, line=dict(color='black', width=3.0, dash='dot'),
- mode='lines', name='Target(von Karman)', ), row=1, col=1+j)
- fig.update_xaxes(type="log", title_text="$fz/U$",
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
- fig.update_yaxes(type="log", title_text=ylabel[j], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1+j)
-
- fig.update_layout(height=450, width=1500, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "spectra_" + prof_name + "_H" + str(1 + i) + ".html"), include_mathjax="cdn")
-
+ freq, spec = psd(prof.u[loc, j, :], prof.dt, nseg)
+
+ f_min = np.min(freq) / 1.5
+ f_max = 1.5 * np.max(freq)
+
+ u_var = np.var(prof.u[loc, j, :])
+ spec = freq * spec / u_var
+ freq = freq * spec_h[i] / U_ref_prof
+ tar_Iz = tar_I[j, loc_tar] # noqa: N806
+ tar_Lz = tar_L[j, loc_tar] # noqa: N806
-def plot_pressure_profile(case_path, output_path, prof_name):
+ vonk_f = np.logspace(np.log10(f_min), np.log10(f_max), 200)
+ vonk_psd = von_karman_spectrum(vonk_f, U_ref_tar, tar_Iz, tar_Lz, j)
- prof_path = os.path.join(case_path, "postProcessing", prof_name)
-
+ vonk_psd = vonk_f * vonk_psd / np.square(U_ref_tar * tar_Iz)
+ vonk_f = vonk_f * spec_h[i] / U_ref_tar
+
+ fig.add_trace(
+ go.Scatter(
+ x=freq,
+ y=spec,
+ line=dict(color='firebrick', width=1.5), # noqa: C408
+ mode='lines',
+ name=prof_name,
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.add_trace(
+ go.Scatter(
+ x=vonk_f,
+ y=vonk_psd,
+ line=dict(color='black', width=3.0, dash='dot'), # noqa: C408
+ mode='lines',
+ name='Target(von Karman)',
+ ),
+ row=1,
+ col=1 + j,
+ )
+ fig.update_xaxes(
+ type='log',
+ title_text='$fz/U$',
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+ fig.update_yaxes(
+ type='log',
+ title_text=ylabel[j],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1 + j,
+ )
+
+ fig.update_layout(height=450, width=1500, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join( # noqa: PTH118
+ output_path, 'spectra_' + prof_name + '_H' + str(1 + i) + '.html'
+ ),
+ include_mathjax='cdn',
+ )
- prof = PressureData(prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0)
+def plot_pressure_profile(case_path, output_path, prof_name): # noqa: D103
+ prof_path = os.path.join(case_path, 'postProcessing', prof_name) # noqa: PTH118
+
+ prof = PressureData(
+ prof_path, start_time=1.0, end_time=None, u_ref=0.0, rho=1.25, p_ref=0.0
+ )
std_p = np.std(prof.p, axis=1)
-
-
- subplot_titles = ("Pressure Fluctuation",)
-
- fig = make_subplots(rows=1, cols=1, start_cell="top-left", subplot_titles=subplot_titles, vertical_spacing=0.15)
+ subplot_titles = ('Pressure Fluctuation',)
+
+ fig = make_subplots(
+ rows=1,
+ cols=1,
+ start_cell='top-left',
+ subplot_titles=subplot_titles,
+ vertical_spacing=0.15,
+ )
# Plot pressure fluctuation Velocity
- fig.add_trace(go.Scatter(x=prof.x-np.min(prof.x), y=std_p, line=dict(color='firebrick', width=2.5),
- mode='lines+markers', name=prof_name, ), row=1, col=1)
-
- fig.update_xaxes(title_text="Distance from inlet (x) [m]", range=[np.min(prof.x-np.min(prof.x)), np.max(prof.x-np.min(prof.x))],
- showline=True, linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
- fig.update_yaxes(title_text=r"Pressure R.M.S", range=[0, 1.15*np.max(std_p)], showline=True,
- linewidth=1.5, linecolor='black',ticks='outside', row=1, col=1)
-
+ fig.add_trace(
+ go.Scatter(
+ x=prof.x - np.min(prof.x),
+ y=std_p,
+ line=dict(color='firebrick', width=2.5), # noqa: C408
+ mode='lines+markers',
+ name=prof_name,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.update_xaxes(
+ title_text='Distance from inlet (x) [m]',
+ range=[np.min(prof.x - np.min(prof.x)), np.max(prof.x - np.min(prof.x))],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+ fig.update_yaxes(
+ title_text=r'Pressure R.M.S',
+ range=[0, 1.15 * np.max(std_p)],
+ showline=True,
+ linewidth=1.5,
+ linecolor='black',
+ ticks='outside',
+ row=1,
+ col=1,
+ )
+
+ fig.update_layout(height=400, width=800, title_text='', showlegend=False)
+ fig.show()
+ fig.write_html(
+ os.path.join(output_path, 'pressure_' + prof_name + '.html'), # noqa: PTH118
+ include_mathjax='cdn',
+ )
- fig.update_layout(height=400, width=800, title_text="",showlegend=False)
- fig.show()
- fig.write_html(os.path.join(output_path, "pressure_" + prof_name + ".html"), include_mathjax="cdn")
-
-if __name__ == '__main__':
+if __name__ == '__main__':
""""
Entry point to read the simulation results from OpenFOAM case and post-process it.
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get EVENT file from OpenFOAM output")
- parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True)
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get EVENT file from OpenFOAM output'
+ )
+ parser.add_argument(
+ '-c', '--case', help='OpenFOAM case directory', required=True
+ )
arguments, unknowns = parser.parse_known_args()
-
- case_path = arguments.case
-
- print("Case full path: ", case_path)
+ case_path = arguments.case
+
+ print('Case full path: ', case_path) # noqa: T201
# prof_name = sys.argv[2]
- #Read JSON data
- json_path = os.path.join(case_path, "constant", "simCenter", "input", "EmptyDomainCFD.json")
- with open(json_path) as json_file:
- json_data = json.load(json_file)
-
+ # Read JSON data
+ json_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'input', 'EmptyDomainCFD.json'
+ )
+ with open(json_path) as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
-
+ rm_data = json_data['resultMonitoring']
+
wind_profiles = rm_data['windProfiles']
vtk_planes = rm_data['vtkPlanes']
-
- prof_output_path = os.path.join(case_path, "constant", "simCenter", "output", "windProfiles")
- #Check if it exists and remove files
- if os.path.exists(prof_output_path):
+ prof_output_path = os.path.join( # noqa: PTH118
+ case_path, 'constant', 'simCenter', 'output', 'windProfiles'
+ )
+
+ # Check if it exists and remove files
+ if os.path.exists(prof_output_path): # noqa: PTH110
shutil.rmtree(prof_output_path)
-
- #Create new path
+
+ # Create new path
Path(prof_output_path).mkdir(parents=True, exist_ok=True)
-
- #Plot velocity and pressure profiles
+
+ # Plot velocity and pressure profiles
for prof in wind_profiles:
- name = prof["name"]
- field = prof["field"]
- print(name)
- print(field)
-
- if field=="Velocity":
+ name = prof['name']
+ field = prof['field']
+ print(name) # noqa: T201
+ print(field) # noqa: T201
+
+ if field == 'Velocity':
plot_wind_profiles_and_spectra(case_path, prof_output_path, name)
-
- if field=="Pressure":
+
+ if field == 'Pressure':
plot_pressure_profile(case_path, prof_output_path, name)
-
-
+
# Copy the VTK files renamed
for pln in vtk_planes:
- name = pln["name"]
- field = pln["field"]
-
- vtk_path = os.path.join(case_path, "postProcessing", name)
- vtk_path_renamed = os.path.join(case_path, "postProcessing", name + "_renamed")
+ name = pln['name']
+ field = pln['field']
+
+ vtk_path = os.path.join(case_path, 'postProcessing', name) # noqa: PTH118
+ vtk_path_renamed = os.path.join( # noqa: PTH118
+ case_path, 'postProcessing', name + '_renamed'
+ )
Path(vtk_path_renamed).mkdir(parents=True, exist_ok=True)
copy_vtk_planes_and_order(vtk_path, vtk_path_renamed, field)
-
- #Check if it exists and remove files
- if os.path.exists(vtk_path):
- shutil.rmtree(vtk_path)
\ No newline at end of file
+
+ # Check if it exists and remove files
+ if os.path.exists(vtk_path): # noqa: PTH110
+ shutil.rmtree(vtk_path)
diff --git a/modules/createEVENT/SurroundedBuildingCFD/setup_case.py b/modules/createEVENT/SurroundedBuildingCFD/setup_case.py
index b3545c0f6..786386e4a 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/setup_case.py
+++ b/modules/createEVENT/SurroundedBuildingCFD/setup_case.py
@@ -1,85 +1,99 @@
-"""
-This script writes BC and initial condition, and setups the OpenFoam case
+"""This script writes BC and initial condition, and setups the OpenFoam case
directory.
-"""
-import numpy as np
-import sys
-import os
+""" # noqa: CPY001, D205, D404, INP001
+
import json
-import numpy as np
+import os
+import sys
+
import foam_file_processor as foam
+import numpy as np
from stl import mesh
-def create_building_geometry(width, depth, height, center):
- epsilon = 0.001*min(width, depth, height)
-
+def create_building_geometry(width, depth, height, center): # noqa: D103
+ epsilon = 0.001 * min(width, depth, height)
+
# Define the 8 vertices of the building
- vertices = np.array([[-depth/2.0, -width/2.0, -epsilon],
- [+depth/2.0, -width/2.0, -epsilon],
- [+depth/2.0, +width/2.0, -epsilon],
- [-depth/2.0, +width/2.0, -epsilon],
- [-depth/2.0, -width/2.0, height],
- [+depth/2.0, -width/2.0, height],
- [+depth/2.0, +width/2.0, height],
- [-depth/2.0, +width/2.0, height]])
-
+ vertices = np.array(
+ [
+ [-depth / 2.0, -width / 2.0, -epsilon],
+ [+depth / 2.0, -width / 2.0, -epsilon],
+ [+depth / 2.0, +width / 2.0, -epsilon],
+ [-depth / 2.0, +width / 2.0, -epsilon],
+ [-depth / 2.0, -width / 2.0, height],
+ [+depth / 2.0, -width / 2.0, height],
+ [+depth / 2.0, +width / 2.0, height],
+ [-depth / 2.0, +width / 2.0, height],
+ ]
+ )
+
vertices += center
# Define the 12 triangles composing the rectangular building
- faces = np.array([\
- [0,3,1],
- [1,3,2],
- [0,4,7],
- [0,7,3],
- [4,5,6],
- [4,6,7],
- [5,1,2],
- [5,2,6],
- [2,3,6],
- [3,7,6],
- [0,1,5],
- [0,5,4]])
-
+ faces = np.array(
+ [
+ [0, 3, 1],
+ [1, 3, 2],
+ [0, 4, 7],
+ [0, 7, 3],
+ [4, 5, 6],
+ [4, 6, 7],
+ [5, 1, 2],
+ [5, 2, 6],
+ [2, 3, 6],
+ [3, 7, 6],
+ [0, 1, 5],
+ [0, 5, 4],
+ ]
+ )
+
# Create the mesh
bldg = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
- bldg.vectors[i][j] = vertices[f[j],:]
+ bldg.vectors[i][j] = vertices[f[j], :]
return bldg
-def create_surroundings_geometry(main_bldg_width, main_bldg_depth, sur_bldg_width, sur_bldg_depth, sur_bldg_height,
- street_width_x, street_width_y, bound_radius, randomness=0.0):
-
+def create_surroundings_geometry( # noqa: D103
+ main_bldg_width,
+ main_bldg_depth,
+ sur_bldg_width,
+ sur_bldg_depth,
+ sur_bldg_height,
+ street_width_x,
+ street_width_y,
+ bound_radius,
+ randomness=0.0,
+):
plan_x = max(main_bldg_depth, sur_bldg_depth)
plan_y = max(main_bldg_width, sur_bldg_width)
-
- n_grid_x = int(2.0*bound_radius/(plan_x + street_width_x)) + 1
- n_grid_y = int(2.0*bound_radius/(plan_y + street_width_y)) + 1
+
+ n_grid_x = int(2.0 * bound_radius / (plan_x + street_width_x)) + 1
+ n_grid_y = int(2.0 * bound_radius / (plan_y + street_width_y)) + 1
if (n_grid_x % 2) == 0:
- n_grid_x -=1
+ n_grid_x -= 1
if (n_grid_y % 2) == 0:
- n_grid_y -=1
-
+ n_grid_y -= 1
- mid_ix = int(n_grid_x/2)
- mid_iy = int(n_grid_y/2)
+ mid_ix = int(n_grid_x / 2)
+ mid_iy = int(n_grid_y / 2)
copies = []
- min_h = 1.0 - randomness*0.95
- max_h = 1.0 + randomness*0.95
+ min_h = 1.0 - randomness * 0.95
+ max_h = 1.0 + randomness * 0.95
rand_f = np.random.uniform(min_h, max_h, (n_grid_x, n_grid_y))
- x_max = (street_width_x + plan_x)*n_grid_x - street_width_x
- y_max = (street_width_y + plan_y)*n_grid_y - street_width_y
-
+ x_max = (street_width_x + plan_x) * n_grid_x - street_width_x
+ y_max = (street_width_y + plan_y) * n_grid_y - street_width_y
+
# bound_radius = max(x_max, y_max)/2.0
for ix in range(n_grid_x):
@@ -88,250 +102,255 @@ def create_surroundings_geometry(main_bldg_width, main_bldg_depth, sur_bldg_widt
if ix == mid_ix and iy == mid_iy:
continue
- center_x = -x_max/2.0 + ix*street_width_x + plan_x*(ix + 0.5)
- center_y = -y_max/2.0 + iy*street_width_y + plan_y*(iy + 0.5)
+ center_x = -x_max / 2.0 + ix * street_width_x + plan_x * (ix + 0.5)
+ center_y = -y_max / 2.0 + iy * street_width_y + plan_y * (iy + 0.5)
# bldg_R = np.sqrt((abs(center_x) + sur_bldg_depth)**2.0 + (abs(center_y) + sur_bldg_width)**2.0)
- bldg_R = np.sqrt(center_x**2.0 + center_y**2.0)
+ bldg_R = np.sqrt(center_x**2.0 + center_y**2.0) # noqa: N806
- #Add the building if it's within bounding radius
+ # Add the building if it's within bounding radius
if bldg_R < bound_radius:
- bldg = create_building_geometry(sur_bldg_width, sur_bldg_depth, sur_bldg_height*rand_f[ix, iy],
- np.array([center_x, center_y, 0.0]))
+ bldg = create_building_geometry(
+ sur_bldg_width,
+ sur_bldg_depth,
+ sur_bldg_height * rand_f[ix, iy],
+ np.array([center_x, center_y, 0.0]),
+ )
copies.append(bldg)
- #Merge the buildings together into one geometric data
- combined = mesh.Mesh(np.concatenate([copy.data for copy in copies]), remove_duplicate_polygons=True)
+ # Merge the buildings together into one geometric data
+ combined = mesh.Mesh(
+ np.concatenate([copy.data for copy in copies]),
+ remove_duplicate_polygons=True,
+ )
# vertices = combined.vectors.reshape(-1, 3)
# unique_vertices, indices = np.unique(np.round(vertices, decimals=int(-np.log10(1e-6))), return_inverse=True, axis=0)
# merged_mesh = mesh.Mesh(np.zeros(len(indices) // 3, dtype=combined.dtype))
# merged_mesh.vectors = unique_vertices[indices].reshape(-1, 3, 3)
-
+
# print(combined.is_closed())
- return combined
+ return combined # noqa: RET504
+def write_main_building_stl_file(input_json_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_main_building_stl_file(input_json_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
geom_data = json_data['GeometricData']
- #Else create the STL file
- scale = geom_data['geometricScale']
- length_unit = json_data['lengthUnit']
+ # Else create the STL file
+ scale = geom_data['geometricScale']
+ length_unit = json_data['lengthUnit']
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
-
- #Convert from full-scale to model-scale
- B = convert_to_meters*geom_data['buildingWidth']/scale
- D = convert_to_meters*geom_data['buildingDepth']/scale
- H = convert_to_meters*geom_data['buildingHeight']/scale
-
+
+ # Convert from full-scale to model-scale
+ B = convert_to_meters * geom_data['buildingWidth'] / scale # noqa: N806
+ D = convert_to_meters * geom_data['buildingDepth'] / scale # noqa: N806
+ H = convert_to_meters * geom_data['buildingHeight'] / scale # noqa: N806
+
origin = np.array(geom_data['origin'])
- wind_dxn = geom_data['windDirection']
+ wind_dxn = geom_data['windDirection']
wind_dxn_rad = np.deg2rad(wind_dxn)
-
- bldg = create_building_geometry(B, D, H, origin)
-
- #Account for wind direction
+
+ bldg = create_building_geometry(B, D, H, origin)
+
+ # Account for wind direction
bldg.rotate([0.0, 0.0, 1.0], wind_dxn_rad)
-
+
# Write the mesh to file "building.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
bldg.save(case_path + '/constant/geometry/building.stl', mode=fmt)
-def write_surrounding_buildings_stl_file(input_json_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
+def write_surrounding_buildings_stl_file(input_json_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
geom_data = json_data['GeometricData']
sur_data = geom_data['surroundingBuildingsInformation']
- #Else create the STL file
- scale = geom_data['geometricScale']
- length_unit = json_data['lengthUnit']
+ # Else create the STL file
+ scale = geom_data['geometricScale']
+ length_unit = json_data['lengthUnit']
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
-
- #Convert from full-scale to model-scale
- B = convert_to_meters*geom_data['buildingWidth']/scale
- D = convert_to_meters*geom_data['buildingDepth']/scale
- Sb = convert_to_meters*sur_data['surroundingBuildingsWidth']/scale
- Sd = convert_to_meters*sur_data['surroundingBuildingsDepth']/scale
- Sh = convert_to_meters*sur_data['surroundingBuildingsHeight']/scale
- Swx = convert_to_meters*sur_data['streetWidthX']/scale
- Swy = convert_to_meters*sur_data['streetWidthY']/scale
- Rb = convert_to_meters*sur_data['boundingRadius']/scale
-
- #Normalize 0 to 1
- rand = sur_data['randomness']/100.0
-
- origin = np.array(geom_data['origin'])
- wind_dxn = geom_data['windDirection']
- wind_dxn_rad = np.deg2rad(wind_dxn)
+ # Convert from full-scale to model-scale
+ B = convert_to_meters * geom_data['buildingWidth'] / scale # noqa: N806
+ D = convert_to_meters * geom_data['buildingDepth'] / scale # noqa: N806
+ Sb = convert_to_meters * sur_data['surroundingBuildingsWidth'] / scale # noqa: N806
+ Sd = convert_to_meters * sur_data['surroundingBuildingsDepth'] / scale # noqa: N806
+ Sh = convert_to_meters * sur_data['surroundingBuildingsHeight'] / scale # noqa: N806
+ Swx = convert_to_meters * sur_data['streetWidthX'] / scale # noqa: N806
+ Swy = convert_to_meters * sur_data['streetWidthY'] / scale # noqa: N806
+ Rb = convert_to_meters * sur_data['boundingRadius'] / scale # noqa: N806
+
+ # Normalize 0 to 1
+ rand = sur_data['randomness'] / 100.0
+
+ origin = np.array(geom_data['origin']) # noqa: F841
+ wind_dxn = geom_data['windDirection']
+
+ wind_dxn_rad = np.deg2rad(wind_dxn)
surroundings = create_surroundings_geometry(B, D, Sb, Sd, Sh, Swx, Swy, Rb, rand)
-
- #Account for wind direction
+
+ # Account for wind direction
surroundings.rotate([0.0, 0.0, 1.0], wind_dxn_rad)
-
+
# Write the mesh to file "surroundings.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
surroundings.save(case_path + '/constant/geometry/surroundings.stl', mode=fmt)
-def write_block_mesh_dict(input_json_path, template_dict_path, case_path):
+def write_block_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["blockMeshParameters"]
+ mesh_data = json_data['blockMeshParameters']
geom_data = json_data['GeometricData']
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
origin = np.array(geom_data['origin'])
- scale = geom_data['geometricScale']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale'] # noqa: F841
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_cells = mesh_data['xNumCells']
y_cells = mesh_data['yNumCells']
z_cells = mesh_data['zNumCells']
-
+
x_grading = mesh_data['xGrading']
y_grading = mesh_data['yGrading']
z_grading = mesh_data['zGrading']
-
- bc_map = {"slip": 'wall', "cyclic": 'cyclic', "noSlip": 'wall',
- "symmetry": 'symmetry', "empty": 'empty', "TInf": 'patch',
- "MeanABL": 'patch', "Uniform": 'patch', "zeroPressureOutlet": 'patch',
- "roughWallFunction": 'wall',"smoothWallFunction": 'wall'}
+ bc_map = {
+ 'slip': 'wall',
+ 'cyclic': 'cyclic',
+ 'noSlip': 'wall',
+ 'symmetry': 'symmetry',
+ 'empty': 'empty',
+ 'TInf': 'patch',
+ 'MeanABL': 'patch',
+ 'Uniform': 'patch',
+ 'zeroPressureOutlet': 'patch',
+ 'roughWallFunction': 'wall',
+ 'smoothWallFunction': 'wall',
+ }
inlet_type = bc_map[boundary_data['inletBoundaryCondition']]
outlet_type = bc_map[boundary_data['outletBoundaryCondition']]
- ground_type = bc_map[boundary_data['groundBoundaryCondition']]
+ ground_type = bc_map[boundary_data['groundBoundaryCondition']]
top_type = bc_map[boundary_data['topBoundaryCondition']]
front_type = bc_map[boundary_data['sidesBoundaryCondition']]
back_type = bc_map[boundary_data['sidesBoundaryCondition']]
length_unit = json_data['lengthUnit']
-
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
x_max = x_min + Lx
y_max = y_min + Ly
z_max = z_min + Lz
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/blockMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/blockMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
- dict_lines[17] = "\txMin\t\t{:.4f};\n".format(x_min)
- dict_lines[18] = "\tyMin\t\t{:.4f};\n".format(y_min)
- dict_lines[19] = "\tzMin\t\t{:.4f};\n".format(z_min)
+ dict_lines[17] = f'\txMin\t\t{x_min:.4f};\n'
+ dict_lines[18] = f'\tyMin\t\t{y_min:.4f};\n'
+ dict_lines[19] = f'\tzMin\t\t{z_min:.4f};\n'
- dict_lines[20] = "\txMax\t\t{:.4f};\n".format(x_max)
- dict_lines[21] = "\tyMax\t\t{:.4f};\n".format(y_max)
- dict_lines[22] = "\tzMax\t\t{:.4f};\n".format(z_max)
+ dict_lines[20] = f'\txMax\t\t{x_max:.4f};\n'
+ dict_lines[21] = f'\tyMax\t\t{y_max:.4f};\n'
+ dict_lines[22] = f'\tzMax\t\t{z_max:.4f};\n'
+ dict_lines[23] = f'\txCells\t\t{x_cells:d};\n'
+ dict_lines[24] = f'\tyCells\t\t{y_cells:d};\n'
+ dict_lines[25] = f'\tzCells\t\t{z_cells:d};\n'
- dict_lines[23] = "\txCells\t\t{:d};\n".format(x_cells)
- dict_lines[24] = "\tyCells\t\t{:d};\n".format(y_cells)
- dict_lines[25] = "\tzCells\t\t{:d};\n".format(z_cells)
-
- dict_lines[26] = "\txGrading\t{:.4f};\n".format(x_grading)
- dict_lines[27] = "\tyGrading\t{:.4f};\n".format(y_grading)
- dict_lines[28] = "\tzGrading\t{:.4f};\n".format(z_grading)
+ dict_lines[26] = f'\txGrading\t{x_grading:.4f};\n'
+ dict_lines[27] = f'\tyGrading\t{y_grading:.4f};\n'
+ dict_lines[28] = f'\tzGrading\t{z_grading:.4f};\n'
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
- dict_lines[31] = "convertToMeters {:.4f};\n".format(convert_to_meters)
- dict_lines[61] = " type {};\n".format(inlet_type)
- dict_lines[70] = " type {};\n".format(outlet_type)
- dict_lines[79] = " type {};\n".format(ground_type)
- dict_lines[88] = " type {};\n".format(top_type)
- dict_lines[97] = " type {};\n".format(front_type)
- dict_lines[106] = " type {};\n".format(back_type)
-
-
- write_file_name = case_path + "/system/blockMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[31] = f'convertToMeters {convert_to_meters:.4f};\n'
+ dict_lines[61] = f' type {inlet_type};\n'
+ dict_lines[70] = f' type {outlet_type};\n'
+ dict_lines[79] = f' type {ground_type};\n'
+ dict_lines[88] = f' type {top_type};\n'
+ dict_lines[97] = f' type {front_type};\n'
+ dict_lines[106] = f' type {back_type};\n'
+
+ write_file_name = case_path + '/system/blockMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
+def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["snappyHexMeshParameters"]
+ mesh_data = json_data['snappyHexMeshParameters']
add_surface_refinement = mesh_data['addSurfaceRefinements']
building_stl_name = mesh_data['buildingSTLName']
surrounding_stl_name = mesh_data['surroundingsSTLName']
@@ -339,134 +358,139 @@ def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
surface_refinements = mesh_data['surfaceRefinements']
edge_refinements = mesh_data['edgeRefinements']
geom_data = json_data['GeometricData']
-
+
add_prism_layers = mesh_data['addPrismLayers']
number_of_prism_layers = mesh_data['numberOfPrismLayers']
prism_layer_expansion_ratio = mesh_data['prismLayerExpansionRatio']
final_prism_layer_thickness = mesh_data['finalPrismLayerThickness']
prism_layer_surface_name = mesh_data['prismLayerSurfaceName']
- prism_layer_relative_size = "on"
+ prism_layer_relative_size = 'on'
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
origin = np.array(geom_data['origin'])
-
+
num_cells_between_levels = mesh_data['numCellsBetweenLevels']
resolve_feature_angle = mesh_data['resolveFeatureAngle']
- num_processors = mesh_data['numProcessors']
-
+ num_processors = mesh_data['numProcessors'] # noqa: F841
+
refinement_boxes = mesh_data['refinementBoxes']
-
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
- x_max = x_min + Lx
+ x_max = x_min + Lx # noqa: F841
y_max = y_min + Ly
- z_max = z_min + Lz
-
- inside_point = [x_min + Lf/2.0, (y_min + y_max)/2.0, (z_min + z_max)/2.0]
+ z_max = z_min + Lz
+ inside_point = [x_min + Lf / 2.0, (y_min + y_max) / 2.0, (z_min + z_max) / 2.0]
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/snappyHexMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/snappyHexMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "addLayers")
- dict_lines[start_index] = "addLayers\t{};\n".format("off")
-
- ###################### Edit Geometry Section ##############################
-
- #Add refinement box geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- n_boxes = len(refinement_boxes)
+
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'addLayers')
+ dict_lines[start_index] = 'addLayers\t{};\n'.format('off')
+
+ # Edit Geometry Section ##############################
+
+ # Add refinement box geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ n_boxes = len(refinement_boxes)
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " type searchableBox;\n"
- added_part += " min ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][2], refinement_boxes[i][3], refinement_boxes[i][4])
- added_part += " max ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][5], refinement_boxes[i][6], refinement_boxes[i][7])
- added_part += " }\n"
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' type searchableBox;\n'
+ added_part += f' min ({refinement_boxes[i][2]:.4f} {refinement_boxes[i][3]:.4f} {refinement_boxes[i][4]:.4f});\n'
+ added_part += f' max ({refinement_boxes[i][5]:.4f} {refinement_boxes[i][6]:.4f} {refinement_boxes[i][7]:.4f});\n'
+ added_part += ' }\n'
dict_lines.insert(start_index, added_part)
- #Add building and surrounding stl geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- added_part += " {}\n".format(building_stl_name)
- added_part += " {\n"
- added_part += " type triSurfaceMesh;\n"
- added_part += " file \"{}.stl\";\n".format(building_stl_name)
- added_part += " }\n"
-
- added_part += " {}\n".format(surrounding_stl_name)
- added_part += " {\n"
- added_part += " type triSurfaceMesh;\n"
- added_part += " file \"{}.stl\";\n".format(surrounding_stl_name)
- added_part += " }\n"
- dict_lines.insert(start_index, added_part)
-
- ################# Edit castellatedMeshControls Section ####################
-
- #Write 'nCellsBetweenLevels'
- start_index = foam.find_keyword_line(dict_lines, "nCellsBetweenLevels")
- dict_lines[start_index] = " nCellsBetweenLevels {:d};\n".format(num_cells_between_levels)
-
- #Write 'resolveFeatureAngle'
- start_index = foam.find_keyword_line(dict_lines, "resolveFeatureAngle")
- dict_lines[start_index] = " resolveFeatureAngle {:d};\n".format(resolve_feature_angle)
-
- #Write 'insidePoint'
- start_index = foam.find_keyword_line(dict_lines, "insidePoint")
- dict_lines[start_index] = " insidePoint ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #For compatibility with OpenFOAM-9 and older
- start_index = foam.find_keyword_line(dict_lines, "locationInMesh")
- dict_lines[start_index] = " locationInMesh ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- # #Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
+ # Add building and surrounding stl geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ added_part += f' {building_stl_name}\n'
+ added_part += ' {\n'
+ added_part += ' type triSurfaceMesh;\n'
+ added_part += f' file "{building_stl_name}.stl";\n'
+ added_part += ' }\n'
+
+ added_part += f' {surrounding_stl_name}\n'
+ added_part += ' {\n'
+ added_part += ' type triSurfaceMesh;\n'
+ added_part += f' file "{surrounding_stl_name}.stl";\n'
+ added_part += ' }\n'
+ dict_lines.insert(start_index, added_part)
+
+ # Edit castellatedMeshControls Section ####################
+
+ # Write 'nCellsBetweenLevels'
+ start_index = foam.find_keyword_line(dict_lines, 'nCellsBetweenLevels')
+ dict_lines[start_index] = (
+ f' nCellsBetweenLevels {num_cells_between_levels:d};\n'
+ )
+
+ # Write 'resolveFeatureAngle'
+ start_index = foam.find_keyword_line(dict_lines, 'resolveFeatureAngle')
+ dict_lines[start_index] = f' resolveFeatureAngle {resolve_feature_angle:d};\n'
+
+ # Write 'insidePoint'
+ start_index = foam.find_keyword_line(dict_lines, 'insidePoint')
+ dict_lines[start_index] = (
+ f' insidePoint ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # For compatibility with OpenFOAM-9 and older
+ start_index = foam.find_keyword_line(dict_lines, 'locationInMesh')
+ dict_lines[start_index] = (
+ f' locationInMesh ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # #Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
# start_index = foam.find_keyword_line(dict_lines, "outsidePoint")
# dict_lines[start_index] = " outsidePoint ({:.4e} {:.4e} {:.4e});\n".format(-1e-20, -1e-20, -1e20)
- #Add refinement edge
- if add_edge_refinement:
- start_index = foam.find_keyword_line(dict_lines, "features") + 2
- added_part = ""
-
- for edge in edge_refinements:
- added_part += " {\n"
- added_part += " file \"{}.eMesh\";\n".format(edge["name"])
- added_part += " level {};\n".format(edge["level"])
- added_part += " }\n"
-
+ # Add refinement edge
+ if add_edge_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'features') + 2
+ added_part = ''
+
+ for edge in edge_refinements:
+ added_part += ' {\n'
+ added_part += ' file "{}.eMesh";\n'.format(edge['name'])
+ added_part += ' level {};\n'.format(edge['level'])
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
- #Add refinement surface
- if add_surface_refinement:
- start_index = foam.find_keyword_line(dict_lines, "refinementSurfaces") + 2
- added_part = ""
+
+ # Add refinement surface
+ if add_surface_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'refinementSurfaces') + 2
+ added_part = ''
for surf in surface_refinements:
- added_part += " {}\n".format(surf["name"])
- added_part += " {\n"
- added_part += " level ({} {});\n".format(surf["minLevel"], surf["maxLevel"])
- added_part += " patchInfo\n"
- added_part += " {\n"
- added_part += " type wall;\n"
- added_part += " }\n"
- added_part += " }\n"
-
+ added_part += ' {}\n'.format(surf['name'])
+ added_part += ' {\n'
+ added_part += ' level ({} {});\n'.format(
+ surf['minLevel'], surf['maxLevel']
+ )
+ added_part += ' patchInfo\n'
+ added_part += ' {\n'
+ added_part += ' type wall;\n'
+ added_part += ' }\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
+
# #Add surface refinement around the building as a refinement region
# if surface_refinements[-1]["minLevel"] > refinement_boxes[-1][1]:
# added_part = ""
@@ -475,930 +499,895 @@ def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path):
# added_part += " mode distance;\n"
# added_part += " levels (({:.4f} {}));\n".format(surface_refinement_distance, refinement_boxes[-1][1] + 1)
# added_part += " }\n"
-
- # start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+
+ # start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
# dict_lines.insert(start_index, added_part)
- #Add box refinements
- added_part = ""
+ # Add box refinements
+ added_part = ''
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " mode inside;\n"
- added_part += " level {};\n".format(refinement_boxes[i][1])
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' mode inside;\n'
+ added_part += f' level {refinement_boxes[i][1]};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
- ####################### Edit PrismLayer Section ##########################
+ # Edit PrismLayer Section ##########################
if add_prism_layers:
- #Add surface layers (prism layers)
- added_part = ""
- added_part += " \"{}\"\n".format(prism_layer_surface_name)
- added_part += " {\n"
- added_part += " nSurfaceLayers {};\n".format(number_of_prism_layers)
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "layers") + 2
+ # Add surface layers (prism layers)
+ added_part = ''
+ added_part += f' "{prism_layer_surface_name}"\n'
+ added_part += ' {\n'
+ added_part += f' nSurfaceLayers {number_of_prism_layers};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'layers') + 2
dict_lines.insert(start_index, added_part)
- #Write 'relativeSizes'
- start_index = foam.find_keyword_line(dict_lines, "relativeSizes")
- dict_lines[start_index] = " relativeSizes {};\n".format(prism_layer_relative_size)
-
- #Write 'expansionRatio'
- start_index = foam.find_keyword_line(dict_lines, "expansionRatio")
- dict_lines[start_index] = " expansionRatio {:.4f};\n".format(prism_layer_expansion_ratio)
-
- #Write 'finalLayerThickness'
- start_index = foam.find_keyword_line(dict_lines, "finalLayerThickness")
- dict_lines[start_index] = " finalLayerThickness {:.4f};\n".format(final_prism_layer_thickness)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/snappyHexMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write 'relativeSizes'
+ start_index = foam.find_keyword_line(dict_lines, 'relativeSizes')
+ dict_lines[start_index] = f' relativeSizes {prism_layer_relative_size};\n'
+
+ # Write 'expansionRatio'
+ start_index = foam.find_keyword_line(dict_lines, 'expansionRatio')
+ dict_lines[start_index] = (
+ f' expansionRatio {prism_layer_expansion_ratio:.4f};\n'
+ )
+
+ # Write 'finalLayerThickness'
+ start_index = foam.find_keyword_line(dict_lines, 'finalLayerThickness')
+ dict_lines[start_index] = (
+ f' finalLayerThickness {final_prism_layer_thickness:.4f};\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/snappyHexMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
- # Returns JSON object as a dictionary
- domain_data = json_data["snappyHexMeshParameters"]
- building_stl_name = domain_data['buildingSTLName']
- surroundings_stl_name = domain_data['surroundingsSTLName']
-
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/surfaceFeaturesDictTemplate", "r")
-
- #Export to OpenFOAM probe format
- dict_lines = dict_file.readlines()
- dict_file.close()
-
-
- #Write main building and surrounding buildings surface names
- start_index = foam.find_keyword_line(dict_lines, "surfaces")
- dict_lines[start_index] = "surfaces (\"{}.stl\" \"{}.stl\");\n".format(building_stl_name, surroundings_stl_name)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/surfaceFeaturesDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
- for line in dict_lines:
- output_file.write(line)
- output_file.close()
+
+def write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
+ # Returns JSON object as a dictionary
+ domain_data = json_data['snappyHexMeshParameters']
+ building_stl_name = domain_data['buildingSTLName']
+ surroundings_stl_name = domain_data['surroundingsSTLName']
+
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/surfaceFeaturesDictTemplate') # noqa: PLW1514, PTH123, SIM115
+
+ # Export to OpenFOAM probe format
+ dict_lines = dict_file.readlines()
+ dict_file.close()
+
+ # Write main building and surrounding buildings surface names
+ start_index = foam.find_keyword_line(dict_lines, 'surfaces')
+ dict_lines[start_index] = (
+ f'surfaces ("{building_stl_name}.stl" "{surroundings_stl_name}.stl");\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/surfaceFeaturesDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ for line in dict_lines:
+ output_file.write(line)
+ output_file.close()
def write_boundary_data_files(input_json_path, case_path):
- """
- This functions writes wind profile files in "constant/boundaryData/inlet"
- if TInf options are used for the simulation.
- """
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ """This functions writes wind profile files in "constant/boundaryData/inlet"
+ if TInf options are used for the simulation.
+ """ # noqa: D205, D401, D404
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
- if boundary_data['inletBoundaryCondition']=="TInf":
-
+ if boundary_data['inletBoundaryCondition'] == 'TInf':
geom_data = json_data['GeometricData']
- wind_profiles = np.array(boundary_data["inflowProperties"]['windProfiles'])
+ wind_profiles = np.array(boundary_data['inflowProperties']['windProfiles'])
- bd_path = case_path + "/constant/boundaryData/inlet/"
+ bd_path = case_path + '/constant/boundaryData/inlet/'
- #Write points file
+ # Write points file
n_pts = np.shape(wind_profiles)[0]
- points = np.zeros((n_pts, 3))
-
+ points = np.zeros((n_pts, 3))
origin = np.array(geom_data['origin'])
-
- Ly = geom_data['domainWidth']
- Lf = geom_data['fetchLength']
-
+
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
+ y_min = -Ly / 2.0 - origin[1]
y_max = y_min + Ly
- points[:,0] = x_min
- points[:,1] = (y_min + y_max)/2.0
- points[:,2] = wind_profiles[:, 0]
+ points[:, 0] = x_min
+ points[:, 1] = (y_min + y_max) / 2.0
+ points[:, 2] = wind_profiles[:, 0]
- #Shift the last element of the y coordinate
- #a bit to make planer interpolation easier
+ # Shift the last element of the y coordinate
+ # a bit to make planer interpolation easier
points[-1:, 1] = y_max
- foam.write_foam_field(points, bd_path + "points")
-
- #Write wind speed file as a scalar field
- foam.write_scalar_field(wind_profiles[:, 1], bd_path + "U")
+ foam.write_foam_field(points, bd_path + 'points')
- #Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
- foam.write_foam_field(wind_profiles[:, 2:8], bd_path + "R")
+ # Write wind speed file as a scalar field
+ foam.write_scalar_field(wind_profiles[:, 1], bd_path + 'U')
- #Write length scale file (8 columns -> it's a tensor field)
- foam.write_foam_field(wind_profiles[:, 8:17], bd_path + "L")
+ # Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
+ foam.write_foam_field(wind_profiles[:, 2:8], bd_path + 'R')
+ # Write length scale file (8 columns -> it's a tensor field)
+ foam.write_foam_field(wind_profiles[:, 8:17], bd_path + 'L')
-def write_U_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_U_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- inlet_BC_type = boundary_data['inletBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- sides_BC_type = boundary_data['sidesBoundaryCondition']
-
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ inlet_BC_type = boundary_data['inletBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/UFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/UFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- ##################### Internal Field #########################
- #Initialize the internal fields frow a lower velocity to avoid Courant number
- #instability when the solver starts. Now %10 of roof-height wind speed is set
- start_index = foam.find_keyword_line(dict_lines, "internalField")
+
+ # Internal Field #########################
+ # Initialize the internal fields frow a lower velocity to avoid Courant number
+ # instability when the solver starts. Now %10 of roof-height wind speed is set
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
# dict_lines[start_index] = "internalField uniform ({:.4f} 0 0);\n".format(1.0*wind_speed)
- #Set the internal field to zero to make it easy for the solver to start
- dict_lines[start_index] = "internalField uniform (0 0 0);\n"
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
-
- if inlet_BC_type == "Uniform":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
-
- if inlet_BC_type == "MeanABL":
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletVelocity;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 uniform \t {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
- if inlet_BC_type == "TInf":
- added_part = ""
- added_part += "\t type \t turbulentDFMInlet;\n"
- added_part += "\t filterType \t exponential;\n"
- added_part += "\t filterFactor \t {};\n".format(4)
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t periodicInY \t {};\n".format("true")
- added_part += "\t periodicInZ \t {};\n".format("false")
- added_part += "\t constMeanU \t {};\n".format("true")
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
+ # Set the internal field to zero to make it easy for the solver to start
+ dict_lines[start_index] = 'internalField uniform (0 0 0);\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+
+ if inlet_BC_type == 'Uniform':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+
+ if inlet_BC_type == 'MeanABL':
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletVelocity;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 uniform \t {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
+ if inlet_BC_type == 'TInf':
+ added_part = ''
+ added_part += '\t type \t turbulentDFMInlet;\n'
+ added_part += '\t filterType \t exponential;\n'
+ added_part += f'\t filterFactor \t {4};\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+ added_part += '\t periodicInY \t {};\n'.format('true')
+ added_part += '\t periodicInZ \t {};\n'.format('false')
+ added_part += '\t constMeanU \t {};\n'.format('true')
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform (0 0 0);\n"
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += '\t inletValue \t uniform (0 0 0);\n'
# added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t value \t uniform (0 0 0);\n"
-
- # added_part += "\t type zeroGradient;\n"
+ added_part += '\t value \t uniform (0 0 0);\n'
+ # added_part += "\t type zeroGradient;\n"
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t value \t uniform (0 0 0);\n"
- added_part += "\t uniformValue \t constant (0 0 0);\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+ added_part += '\t uniformValue \t constant (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Building BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format("noSlip")
-
+ # Building BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+ added_part = ''
+ added_part += '\t type \t {};\n'.format('noSlip')
+
dict_lines.insert(start_index, added_part)
- ###################### Surroundings BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "surroundings") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format("noSlip")
-
+ # Surroundings BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'surroundings') + 2
+ added_part = ''
+ added_part += '\t type \t {};\n'.format('noSlip')
+
dict_lines.insert(start_index, added_part)
+ # Write edited dict to file
+ write_file_name = case_path + '/0/U'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/0/U"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_p_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_p_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/pFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/pFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- p0 = 0.0;
+ # BC and initial condition
+ p0 = 0.0
+ # Internal Field #########################
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(p0)
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {p0:.4f};\n'
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(p0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {p0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Building BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Building BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
+ # Surrounding BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'surroundings') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
- ###################### Surrounding BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "surroundings") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/0/p"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/0/p'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_nut_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_nut_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
- surrounding_BC_type = boundary_data['surroundingBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806
+ surrounding_BC_type = boundary_data['surroundingBoundaryCondition'] # noqa: N806
# wind_speed = wind_data['roofHeightWindSpeed']
# building_height = wind_data['buildingHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/nutFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/nutFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- nut0 = 0.0
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(nut0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ nut0 = 0.0
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {nut0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(nut0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {nut0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkAtmRoughWallFunction;\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t value \t uniform 0.0;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkAtmRoughWallFunction;\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t value \t uniform 0.0;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Building BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
-
- if building_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if building_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if building_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkRoughWallFunction;\n"
- added_part += "\t Ks \t uniform 1e-5;\n"
- added_part += "\t Cs \t uniform 0.5;\n"
- added_part += "\t value \t uniform 0;\n"
-
+
+ # Building BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+
+ if building_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if building_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if building_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkRoughWallFunction;\n'
+ added_part += '\t Ks \t uniform 1e-5;\n'
+ added_part += '\t Cs \t uniform 0.5;\n'
+ added_part += '\t value \t uniform 0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Surrounding BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "surroundings") + 2
-
- if surrounding_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if surrounding_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
-
- if surrounding_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkRoughWallFunction;\n"
- added_part += "\t Ks \t uniform 1e-5;\n"
- added_part += "\t Cs \t uniform 0.5;\n"
- added_part += "\t value \t uniform 0;\n"
-
+ # Surrounding BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'surroundings') + 2
+
+ if surrounding_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if surrounding_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
+
+ if surrounding_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkRoughWallFunction;\n'
+ added_part += '\t Ks \t uniform 1e-5;\n'
+ added_part += '\t Cs \t uniform 0.5;\n'
+ added_part += '\t value \t uniform 0;\n'
+
dict_lines.insert(start_index, added_part)
+ # Write edited dict to file
+ write_file_name = case_path + '/0/nut'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/0/nut"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_epsilon_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_epsilon_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/epsilonFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/epsilonFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- epsilon0 = 0.01
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(epsilon0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletEpsilon;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ # BC and initial condition
+ epsilon0 = 0.01
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {epsilon0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletEpsilon;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(epsilon0)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {epsilon0:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- #Note: Should be replaced with smooth wall function for epsilon,
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ # Note: Should be replaced with smooth wall function for epsilon,
# now the same with rough wall function.
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/epsilon"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/epsilon'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_k_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_k_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
- building_BC_type = boundary_data['buildingBoundaryCondition']
- surrounding_BC_type = boundary_data['surroundingBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
+ building_BC_type = boundary_data['buildingBoundaryCondition'] # noqa: N806
+ surrounding_BC_type = boundary_data['surroundingBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/kFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/kFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition (you may need to scale to model scale)
+
+ # BC and initial condition (you may need to scale to model scale)
# k0 = 1.3 #not in model scale
-
- I = 0.1
- k0 = 1.5*(I*wind_speed)**2
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField \t uniform {:.4f};\n".format(k0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletK;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ I = 0.1 # noqa: N806, E741
+ k0 = 1.5 * (I * wind_speed) ** 2
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField \t uniform {k0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletK;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(k0)
- added_part += "\t value \t uniform {:.4f};\n".format(k0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {k0:.4f};\n'
+ added_part += f'\t value \t uniform {k0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Building BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "building") + 2
-
- if building_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if building_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if building_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Building BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'building') + 2
+
+ if building_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if building_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if building_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
- ###################### Surroundings BC ##############################
- start_index = foam.find_keyword_line(dict_lines, "surroundings") + 2
-
- if surrounding_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if surrounding_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if surrounding_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+ # Surroundings BC ##############################
+ start_index = foam.find_keyword_line(dict_lines, 'surroundings') + 2
+
+ if surrounding_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if surrounding_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if surrounding_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
+ # Write edited dict to file
+ write_file_name = case_path + '/0/k'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/0/k"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_controlDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_controlDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
- rm_data = json_data["resultMonitoring"]
-
+ ns_data = json_data['numericalSetup']
+ rm_data = json_data['resultMonitoring'] # noqa: F841
+
solver_type = ns_data['solverType']
duration = ns_data['duration']
time_step = ns_data['timeStep']
max_courant_number = ns_data['maxCourantNumber']
adjust_time_step = ns_data['adjustTimeStep']
-
+
# monitor_wind_profiles = rm_data['monitorWindProfile']
# monitor_vtk_planes = rm_data['monitorVTKPlane']
# wind_profiles = rm_data['windProfiles']
# vtk_planes = rm_data['vtkPlanes']
-
- # Need to change this for
- max_delta_t = 10*time_step
-
- #Write 10 times
+ # Need to change this for
+ max_delta_t = 10 * time_step
+
+ # Write 10 times
write_frequency = 10.0
- write_interval_time = duration/write_frequency
- write_interval_count = int(write_interval_time/time_step)
- purge_write = 3
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/controlDictTemplate", "r")
+ write_interval_time = duration / write_frequency
+ write_interval_count = int(write_interval_time / time_step)
+ purge_write = 3
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/controlDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write application type
- start_index = foam.find_keyword_line(dict_lines, "application")
- dict_lines[start_index] = "application \t{};\n".format(solver_type)
-
- #Write end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime \t{:.6f};\n".format(duration)
-
- #Write time step time
- start_index = foam.find_keyword_line(dict_lines, "deltaT")
- dict_lines[start_index] = "deltaT \t\t{:.6f};\n".format(time_step)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+
+ # Write application type
+ start_index = foam.find_keyword_line(dict_lines, 'application')
+ dict_lines[start_index] = f'application \t{solver_type};\n'
+
+ # Write end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime \t{duration:.6f};\n'
+
+ # Write time step time
+ start_index = foam.find_keyword_line(dict_lines, 'deltaT')
+ dict_lines[start_index] = f'deltaT \t\t{time_step:.6f};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('adjustableRunTime')
else:
- dict_lines[start_index] = "writeControl \t\t{};\n".format("timeStep")
-
- #Write adjustable time step or not
- start_index = foam.find_keyword_line(dict_lines, "adjustTimeStep")
- dict_lines[start_index] = "adjustTimeStep \t\t{};\n".format("yes" if adjust_time_step else "no")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(write_interval_time)
+ dict_lines[start_index] = 'writeControl \t\t{};\n'.format('timeStep')
+
+ # Write adjustable time step or not
+ start_index = foam.find_keyword_line(dict_lines, 'adjustTimeStep')
+ dict_lines[start_index] = 'adjustTimeStep \t\t{};\n'.format(
+ 'yes' if adjust_time_step else 'no'
+ )
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = f'writeInterval \t{write_interval_time:.6f};\n'
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(write_interval_count)
+ dict_lines[start_index] = f'writeInterval \t{write_interval_count};\n'
+
+ # Write maxCo
+ start_index = foam.find_keyword_line(dict_lines, 'maxCo')
+ dict_lines[start_index] = f'maxCo \t{max_courant_number:.2f};\n'
- #Write maxCo
- start_index = foam.find_keyword_line(dict_lines, "maxCo")
- dict_lines[start_index] = "maxCo \t{:.2f};\n".format(max_courant_number)
-
- #Write maximum time step
- start_index = foam.find_keyword_line(dict_lines, "maxDeltaT")
- dict_lines[start_index] = "maxDeltaT \t{:.6f};\n".format(max_delta_t)
-
+ # Write maximum time step
+ start_index = foam.find_keyword_line(dict_lines, 'maxDeltaT')
+ dict_lines[start_index] = f'maxDeltaT \t{max_delta_t:.6f};\n'
- #Write purge write interval
- start_index = foam.find_keyword_line(dict_lines, "purgeWrite")
- dict_lines[start_index] = "purgeWrite \t{};\n".format(purge_write)
+ # Write purge write interval
+ start_index = foam.find_keyword_line(dict_lines, 'purgeWrite')
+ dict_lines[start_index] = f'purgeWrite \t{purge_write};\n'
- ########################### Function Objects ##############################
-
- #Find function object location
- start_index = foam.find_keyword_line(dict_lines, "functions") + 2
+ # Function Objects ##############################
+ # Find function object location
+ start_index = foam.find_keyword_line(dict_lines, 'functions') + 2
- #Write story loads functionObjects
- added_part = " #includeFunc storyForces\n"
+ # Write story loads functionObjects
+ added_part = ' #includeFunc storyForces\n'
dict_lines.insert(start_index, added_part)
# #Write base loads functionObjects
@@ -1412,258 +1401,252 @@ def write_controlDict_file(input_json_path, template_dict_path, case_path):
# for prof in wind_profiles:
# added_part += " #includeFunc {}\n".format(prof["name"])
# dict_lines.insert(start_index, added_part)
-
- # #Write VTK sampling sampling points
+
+ # #Write VTK sampling sampling points
# if monitor_vtk_planes:
# added_part = ""
# for pln in vtk_planes:
# added_part += " #includeFunc {}\n".format(pln["name"])
# dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/controlDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/controlDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_fvSolution_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_fvSolution_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
json_file.close()
-
+
num_non_orthogonal_correctors = ns_data['numNonOrthogonalCorrectors']
num_correctors = ns_data['numCorrectors']
num_outer_correctors = ns_data['numOuterCorrectors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSolutionTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/fvSolutionTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write simpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "SIMPLE") + 2
- added_part = ""
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write simpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'SIMPLE') + 2
+ added_part = ''
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pimpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "PIMPLE") + 2
- added_part = ""
- added_part += " nOuterCorrectors \t{};\n".format(num_outer_correctors)
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pimpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PIMPLE') + 2
+ added_part = ''
+ added_part += f' nOuterCorrectors \t{num_outer_correctors};\n'
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pisoFoam options
- start_index = foam.find_keyword_line(dict_lines, "PISO") + 2
- added_part = ""
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pisoFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PISO') + 2
+ added_part = ''
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSolution"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSolution'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-def write_base_forces_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_base_forces_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
air_density = 1.0
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- num_stories = rm_data['numStories']
- floor_height = rm_data['floorHeight']
+ num_stories = rm_data['numStories'] # noqa: F841
+ floor_height = rm_data['floorHeight'] # noqa: F841
center_of_rotation = rm_data['centerOfRotation']
base_load_write_interval = rm_data['baseLoadWriteInterval']
- monitor_base_load = rm_data['monitorBaseLoad']
+ monitor_base_load = rm_data['monitorBaseLoad'] # noqa: F841
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/baseForcesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/baseForcesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(base_load_write_interval)
-
- #Write patch name to intergrate forces on
- start_index = foam.find_keyword_line(dict_lines, "patches")
- dict_lines[start_index] = "patches \t({});\n".format("building")
-
- #Write air density to rhoInf
- start_index = foam.find_keyword_line(dict_lines, "rhoInf")
- dict_lines[start_index] = "rhoInf \t\t{:.4f};\n".format(air_density)
-
- #Write center of rotation
- start_index = foam.find_keyword_line(dict_lines, "CofR")
- dict_lines[start_index] = "CofR \t\t({:.4f} {:.4f} {:.4f});\n".format(center_of_rotation[0], center_of_rotation[1], center_of_rotation[2])
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/baseForces"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{base_load_write_interval};\n'
+
+ # Write patch name to integrate forces on
+ start_index = foam.find_keyword_line(dict_lines, 'patches')
+ dict_lines[start_index] = 'patches \t({});\n'.format('building')
+
+ # Write air density to rhoInf
+ start_index = foam.find_keyword_line(dict_lines, 'rhoInf')
+ dict_lines[start_index] = f'rhoInf \t\t{air_density:.4f};\n'
+
+ # Write center of rotation
+ start_index = foam.find_keyword_line(dict_lines, 'CofR')
+ dict_lines[start_index] = (
+ f'CofR \t\t({center_of_rotation[0]:.4f} {center_of_rotation[1]:.4f} {center_of_rotation[2]:.4f});\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/baseForces'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_story_forces_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_story_forces_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
air_density = 1.0
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
num_stories = rm_data['numStories']
- floor_height = rm_data['floorHeight']
+ floor_height = rm_data['floorHeight'] # noqa: F841
center_of_rotation = rm_data['centerOfRotation']
story_load_write_interval = rm_data['storyLoadWriteInterval']
- monitor_base_load = rm_data['monitorBaseLoad']
+ monitor_base_load = rm_data['monitorBaseLoad'] # noqa: F841
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/storyForcesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/storyForcesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(story_load_write_interval)
-
- #Write patch name to intergrate forces on
- start_index = foam.find_keyword_line(dict_lines, "patches")
- dict_lines[start_index] = "patches \t({});\n".format("building")
-
- #Write air density to rhoInf
- start_index = foam.find_keyword_line(dict_lines, "rhoInf")
- dict_lines[start_index] = "rhoInf \t\t{:.4f};\n".format(air_density)
-
- #Write center of rotation
- start_index = foam.find_keyword_line(dict_lines, "CofR")
- dict_lines[start_index] = "CofR \t\t({:.4f} {:.4f} {:.4f});\n".format(center_of_rotation[0], center_of_rotation[1], center_of_rotation[2])
-
- #Number of stories as nBins
- start_index = foam.find_keyword_line(dict_lines, "nBin")
- dict_lines[start_index] = " nBin \t{};\n".format(num_stories)
-
- #Write story direction
- start_index = foam.find_keyword_line(dict_lines, "direction")
- dict_lines[start_index] = " direction \t({:.4f} {:.4f} {:.4f});\n".format(0, 0, 1.0)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/storyForces"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{story_load_write_interval};\n'
+
+ # Write patch name to integrate forces on
+ start_index = foam.find_keyword_line(dict_lines, 'patches')
+ dict_lines[start_index] = 'patches \t({});\n'.format('building')
+
+ # Write air density to rhoInf
+ start_index = foam.find_keyword_line(dict_lines, 'rhoInf')
+ dict_lines[start_index] = f'rhoInf \t\t{air_density:.4f};\n'
+
+ # Write center of rotation
+ start_index = foam.find_keyword_line(dict_lines, 'CofR')
+ dict_lines[start_index] = (
+ f'CofR \t\t({center_of_rotation[0]:.4f} {center_of_rotation[1]:.4f} {center_of_rotation[2]:.4f});\n'
+ )
+
+ # Number of stories as nBins
+ start_index = foam.find_keyword_line(dict_lines, 'nBin')
+ dict_lines[start_index] = f' nBin \t{num_stories};\n'
+
+ # Write story direction
+ start_index = foam.find_keyword_line(dict_lines, 'direction')
+ dict_lines[start_index] = f' direction \t({0:.4f} {0:.4f} {1.0:.4f});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/storyForces'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_pressure_probes_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
+def write_pressure_probes_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
pressure_sampling_points = rm_data['pressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
-
- added_part = ""
-
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
+
for i in range(len(pressure_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(pressure_sampling_points[i][0], pressure_sampling_points[i][1], pressure_sampling_points[i][2])
-
+ added_part += f' ({pressure_sampling_points[i][0]:.6f} {pressure_sampling_points[i][1]:.6f} {pressure_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/pressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/pressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-
-def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_wind_profiles_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- ns_data = json_data["numericalSetup"]
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
@@ -1671,513 +1654,506 @@ def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
write_interval = rm_data['profileWriteInterval']
start_time = rm_data['profileStartTime']
- if rm_data['monitorWindProfile'] == False:
- return
-
- if len(wind_profiles)==0:
+ if rm_data['monitorWindProfile'] == False: # noqa: E712
+ return
+
+ if len(wind_profiles) == 0:
return
- #Write dict files for wind profiles
+ # Write dict files for wind profiles
for prof in wind_profiles:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
-
- #Write start time for the probes
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- #Write name of the profile
- name = prof["name"]
- start_index = foam.find_keyword_line(dict_lines, "profileName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = prof["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write point coordinates
- start_x = prof["startX"]
- start_y = prof["startY"]
- start_z = prof["startZ"]
-
- end_x = prof["endX"]
- end_y = prof["endY"]
- end_z = prof["endZ"]
- n_points = prof["nPoints"]
-
- dx = (end_x - start_x)/n_points
- dy = (end_y - start_y)/n_points
- dz = (end_z - start_z)/n_points
-
- #Write locations of the probes
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
- added_part = ""
-
- for pi in range(n_points):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(start_x + pi*dx, start_y + pi*dy, start_z + pi*dz)
-
- dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
+
+ # Write start time for the probes
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ # Write name of the profile
+ name = prof['name']
+ start_index = foam.find_keyword_line(dict_lines, 'profileName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = prof['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write point coordinates
+ start_x = prof['startX']
+ start_y = prof['startY']
+ start_z = prof['startZ']
+
+ end_x = prof['endX']
+ end_y = prof['endY']
+ end_z = prof['endZ']
+ n_points = prof['nPoints']
+
+ dx = (end_x - start_x) / n_points
+ dy = (end_y - start_y) / n_points
+ dz = (end_z - start_z) / n_points
+
+ # Write locations of the probes
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+ added_part = ''
+
+ for pi in range(n_points):
+ added_part += f' ({start_x + pi * dx:.6f} {start_y + pi * dy:.6f} {start_z + pi * dz:.6f})\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_vtk_plane_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_vtk_plane_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
-
vtk_planes = rm_data['vtkPlanes']
write_interval = rm_data['vtkWriteInterval']
- if rm_data['monitorVTKPlane'] == False:
- return
-
- if len(vtk_planes)==0:
+ if rm_data['monitorVTKPlane'] == False: # noqa: E712
+ return
+
+ if len(vtk_planes) == 0:
return
- #Write dict files for wind profiles
+ # Write dict files for wind profiles
for pln in vtk_planes:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/vtkPlaneTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/vtkPlaneTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
- #Write start and end time for the section
+ # Write start and end time for the section
start_time = pln['startTime']
end_time = pln['endTime']
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- start_index = foam.find_keyword_line(dict_lines, "timeEnd")
- dict_lines[start_index] = " timeEnd \t\t{:.6f};\n".format(end_time)
-
- #Write name of the profile
- name = pln["name"]
- start_index = foam.find_keyword_line(dict_lines, "planeName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = pln["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write normal and point coordinates
- point_x = pln["pointX"]
- point_y = pln["pointY"]
- point_z = pln["pointZ"]
-
- normal_axis = pln["normalAxis"]
-
- start_index = foam.find_keyword_line(dict_lines, "point")
- dict_lines[start_index] = "\t point\t\t({:.6f} {:.6f} {:.6f});\n".format(point_x, point_y, point_z)
-
- start_index = foam.find_keyword_line(dict_lines, "normal")
- if normal_axis=="X":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(1, 0, 0)
- if normal_axis=="Y":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 1, 0)
- if normal_axis=="Z":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 0, 1)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'timeEnd')
+ dict_lines[start_index] = f' timeEnd \t\t{end_time:.6f};\n'
+
+ # Write name of the profile
+ name = pln['name']
+ start_index = foam.find_keyword_line(dict_lines, 'planeName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = pln['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write normal and point coordinates
+ point_x = pln['pointX']
+ point_y = pln['pointY']
+ point_z = pln['pointZ']
+
+ normal_axis = pln['normalAxis']
+
+ start_index = foam.find_keyword_line(dict_lines, 'point')
+ dict_lines[start_index] = (
+ f'\t point\t\t({point_x:.6f} {point_y:.6f} {point_z:.6f});\n'
+ )
+
+ start_index = foam.find_keyword_line(dict_lines, 'normal')
+ if normal_axis == 'X':
+ dict_lines[start_index] = f'\t normal\t\t({1} {0} {0});\n'
+ if normal_axis == 'Y':
+ dict_lines[start_index] = f'\t normal\t\t({0} {1} {0});\n'
+ if normal_axis == 'Z':
+ dict_lines[start_index] = f'\t normal\t\t({0} {0} {1});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_momentumTransport_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_momentumTransport_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
- RANS_type = turb_data['RANSModelType']
- LES_type = turb_data['LESModelType']
- DES_type = turb_data['DESModelType']
+ RANS_type = turb_data['RANSModelType'] # noqa: N806
+ LES_type = turb_data['LESModelType'] # noqa: N806
+ DES_type = turb_data['DESModelType'] # noqa: N806
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/momentumTransportTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/momentumTransportTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "simulationType")
- dict_lines[start_index] = "simulationType \t{};\n".format("RAS" if simulation_type=="RANS" else simulation_type)
-
- if simulation_type=="RANS":
- #Write RANS model type
- start_index = foam.find_keyword_line(dict_lines, "RAS") + 2
- added_part = " model \t{};\n".format(RANS_type)
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'simulationType')
+ dict_lines[start_index] = 'simulationType \t{};\n'.format(
+ 'RAS' if simulation_type == 'RANS' else simulation_type
+ )
+
+ if simulation_type == 'RANS':
+ # Write RANS model type
+ start_index = foam.find_keyword_line(dict_lines, 'RAS') + 2
+ added_part = f' model \t{RANS_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="LES":
- #Write LES SGS model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(LES_type)
+
+ elif simulation_type == 'LES':
+ # Write LES SGS model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{LES_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="DES":
- #Write DES model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(DES_type)
+
+ elif simulation_type == 'DES':
+ # Write DES model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{DES_type};\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/constant/momentumTransport"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/momentumTransport'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_physicalProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_physicalProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/physicalPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/physicalPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.4e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.4e};\n'
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/physicalProperties'
- #Write edited dict to file
- write_file_name = case_path + "/constant/physicalProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_transportProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_transportProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/transportPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/transportPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.3e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.3e};\n'
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/transportProperties'
- #Write edited dict to file
- write_file_name = case_path + "/constant/transportProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_fvSchemes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_fvSchemes_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSchemesTemplate{}".format(simulation_type), "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + f'/fvSchemesTemplate{simulation_type}') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSchemes"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSchemes'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_decomposeParDict_file(input_json_path, template_dict_path, case_path):
+ output_file.close()
+
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_decomposeParDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
num_processors = ns_data['numProcessors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/decomposeParDictTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/decomposeParDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write number of sub-domains
- start_index = foam.find_keyword_line(dict_lines, "numberOfSubdomains")
- dict_lines[start_index] = "numberOfSubdomains\t{};\n".format(num_processors)
-
- #Write method of decomposition
- start_index = foam.find_keyword_line(dict_lines, "decomposer")
- dict_lines[start_index] = "decomposer\t\t{};\n".format("scotch")
-
- #Write method of decomposition for OF-V9 and lower compatability
- start_index = foam.find_keyword_line(dict_lines, "method")
- dict_lines[start_index] = "method\t\t{};\n".format("scotch")
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/decomposeParDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write number of sub-domains
+ start_index = foam.find_keyword_line(dict_lines, 'numberOfSubdomains')
+ dict_lines[start_index] = f'numberOfSubdomains\t{num_processors};\n'
+
+ # Write method of decomposition
+ start_index = foam.find_keyword_line(dict_lines, 'decomposer')
+ dict_lines[start_index] = 'decomposer\t\t{};\n'.format('scotch')
+
+ # Write method of decomposition for OF-V9 and lower compatibility
+ start_index = foam.find_keyword_line(dict_lines, 'method')
+ dict_lines[start_index] = 'method\t\t{};\n'.format('scotch')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/decomposeParDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
+ output_file.close()
+
+
+def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
fmax = 200.0
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
- ns_data = json_data["numericalSetup"]
-
+ wc_data = json_data['windCharacteristics']
+ ns_data = json_data['numericalSetup']
+
wind_speed = wc_data['referenceWindSpeed']
duration = ns_data['duration']
-
- #Generate a little longer duration to be safe
- duration = duration*1.010
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/DFSRTurbDictTemplate", "r")
+ # Generate a little longer duration to be safe
+ duration = duration * 1.010 # noqa: PLR6104
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/DFSRTurbDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write the end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime\t\t\t{:.4f};\n".format(duration)
-
- #Write patch name
- start_index = foam.find_keyword_line(dict_lines, "patchName")
- dict_lines[start_index] = "patchName\t\t\"{}\";\n".format("inlet")
-
- #Write cohUav
- start_index = foam.find_keyword_line(dict_lines, "cohUav")
- dict_lines[start_index] = "cohUav\t\t\t{:.4f};\n".format(wind_speed)
-
- #Write fmax
- start_index = foam.find_keyword_line(dict_lines, "fMax")
- dict_lines[start_index] = "fMax\t\t\t{:.4f};\n".format(fmax)
-
- #Write time step
- start_index = foam.find_keyword_line(dict_lines, "timeStep")
- dict_lines[start_index] = "timeStep\t\t{:.4f};\n".format(1.0/fmax)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/DFSRTurbDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write the end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime\t\t\t{duration:.4f};\n'
+
+ # Write patch name
+ start_index = foam.find_keyword_line(dict_lines, 'patchName')
+ dict_lines[start_index] = 'patchName\t\t"{}";\n'.format('inlet')
+
+ # Write cohUav
+ start_index = foam.find_keyword_line(dict_lines, 'cohUav')
+ dict_lines[start_index] = f'cohUav\t\t\t{wind_speed:.4f};\n'
+
+ # Write fmax
+ start_index = foam.find_keyword_line(dict_lines, 'fMax')
+ dict_lines[start_index] = f'fMax\t\t\t{fmax:.4f};\n'
+
+ # Write time step
+ start_index = foam.find_keyword_line(dict_lines, 'timeStep')
+ dict_lines[start_index] = f'timeStep\t\t{1.0 / fmax:.4f};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/DFSRTurbDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
+
-if __name__ == '__main__':
-
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
input_json_path = sys.argv[1]
template_dict_path = sys.argv[2]
- case_path = sys.argv[3]
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ case_path = sys.argv[3]
+
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
RANS_type = turb_data['RANSModelType']
LES_type = turb_data['LESModelType']
-
- #Write blockMesh
+
+ # Write blockMesh
write_block_mesh_dict(input_json_path, template_dict_path, case_path)
- #Create and write the main building "*.stl" file
+ # Create and write the main building "*.stl" file
write_main_building_stl_file(input_json_path, case_path)
-
- #Write surrounding building STL file
+
+ # Write surrounding building STL file
write_surrounding_buildings_stl_file(input_json_path, case_path)
- #Write surfaceFeaturesDict file
+ # Write surfaceFeaturesDict file
write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path)
- #Create and write the SnappyHexMeshDict file
+ # Create and write the SnappyHexMeshDict file
write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path)
-
- #Write files in "0" directory
+ # Write files in "0" directory
write_U_file(input_json_path, template_dict_path, case_path)
write_p_file(input_json_path, template_dict_path, case_path)
write_nut_file(input_json_path, template_dict_path, case_path)
write_k_file(input_json_path, template_dict_path, case_path)
-
- if simulation_type == "RANS" and RANS_type=="kEpsilon":
+
+ if simulation_type == 'RANS' and RANS_type == 'kEpsilon':
write_epsilon_file(input_json_path, template_dict_path, case_path)
- #Write control dict
+ # Write control dict
write_controlDict_file(input_json_path, template_dict_path, case_path)
-
- #Write results to be monitored
+
+ # Write results to be monitored
# write_wind_profiles_file(input_json_path, template_dict_path, case_path)
# write_vtk_plane_file(input_json_path, template_dict_path, case_path)
write_base_forces_file(input_json_path, template_dict_path, case_path)
write_story_forces_file(input_json_path, template_dict_path, case_path)
- #Write fvSolution dict
+ # Write fvSolution dict
write_fvSolution_file(input_json_path, template_dict_path, case_path)
- #Write fvSchemes dict
+ # Write fvSchemes dict
write_fvSchemes_file(input_json_path, template_dict_path, case_path)
- #Write momentumTransport dict
+ # Write momentumTransport dict
write_momentumTransport_file(input_json_path, template_dict_path, case_path)
-
- #Write physicalProperties dict
+
+ # Write physicalProperties dict
write_physicalProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
+
+ # Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
write_transportProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write decomposeParDict
+
+ # Write decomposeParDict
write_decomposeParDict_file(input_json_path, template_dict_path, case_path)
-
- #Write DFSRTurb dict
+
+ # Write DFSRTurb dict
# write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path)
-
- #Write TInf files
+
+ # Write TInf files
write_boundary_data_files(input_json_path, case_path)
diff --git a/modules/createEVENT/SurroundedBuildingCFD/setup_case_multiple_stl_files.py b/modules/createEVENT/SurroundedBuildingCFD/setup_case_multiple_stl_files.py
index 38421c5f1..805c98f42 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/setup_case_multiple_stl_files.py
+++ b/modules/createEVENT/SurroundedBuildingCFD/setup_case_multiple_stl_files.py
@@ -1,85 +1,99 @@
-"""
-This script writes BC and initial condition, and setups the OpenFoam case
+"""This script writes BC and initial condition, and setups the OpenFoam case
directory.
-"""
-import numpy as np
-import sys
-import os
+""" # noqa: CPY001, D205, D404, INP001
+
import json
-import numpy as np
+import os
+import sys
+
import foam_file_processor as foam
+import numpy as np
from stl import mesh
-def create_building_geometry(width, depth, height, center):
- epsilon = 0.01*min(width, depth, height)
-
+def create_building_geometry(width, depth, height, center): # noqa: D103
+ epsilon = 0.01 * min(width, depth, height)
+
# Define the 8 vertices of the building
- vertices = np.array([[-depth/2.0, -width/2.0, -epsilon],
- [+depth/2.0, -width/2.0, -epsilon],
- [+depth/2.0, +width/2.0, -epsilon],
- [-depth/2.0, +width/2.0, -epsilon],
- [-depth/2.0, -width/2.0, height],
- [+depth/2.0, -width/2.0, height],
- [+depth/2.0, +width/2.0, height],
- [-depth/2.0, +width/2.0, height]])
-
+ vertices = np.array(
+ [
+ [-depth / 2.0, -width / 2.0, -epsilon],
+ [+depth / 2.0, -width / 2.0, -epsilon],
+ [+depth / 2.0, +width / 2.0, -epsilon],
+ [-depth / 2.0, +width / 2.0, -epsilon],
+ [-depth / 2.0, -width / 2.0, height],
+ [+depth / 2.0, -width / 2.0, height],
+ [+depth / 2.0, +width / 2.0, height],
+ [-depth / 2.0, +width / 2.0, height],
+ ]
+ )
+
vertices += center
# Define the 12 triangles composing the rectangular building
- faces = np.array([\
- [0,3,1],
- [1,3,2],
- [0,4,7],
- [0,7,3],
- [4,5,6],
- [4,6,7],
- [5,1,2],
- [5,2,6],
- [2,3,6],
- [3,7,6],
- [0,1,5],
- [0,5,4]])
-
+ faces = np.array(
+ [
+ [0, 3, 1],
+ [1, 3, 2],
+ [0, 4, 7],
+ [0, 7, 3],
+ [4, 5, 6],
+ [4, 6, 7],
+ [5, 1, 2],
+ [5, 2, 6],
+ [2, 3, 6],
+ [3, 7, 6],
+ [0, 1, 5],
+ [0, 5, 4],
+ ]
+ )
+
# Create the mesh
bldg = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
for i, f in enumerate(faces):
for j in range(3):
- bldg.vectors[i][j] = vertices[f[j],:]
-
+ bldg.vectors[i][j] = vertices[f[j], :]
+
return bldg
-def create_surroundings_geometry(main_bldg_width, main_bldg_depth, sur_bldg_width, sur_bldg_depth, sur_bldg_height,
- street_width_x, street_width_y, bound_radius, randomness=0.0):
-
+def create_surroundings_geometry( # noqa: D103
+ main_bldg_width,
+ main_bldg_depth,
+ sur_bldg_width,
+ sur_bldg_depth,
+ sur_bldg_height,
+ street_width_x,
+ street_width_y,
+ bound_radius,
+ randomness=0.0,
+):
plan_x = max(main_bldg_depth, sur_bldg_depth)
plan_y = max(main_bldg_width, sur_bldg_width)
-
- n_grid_x = int(2.0*bound_radius/(plan_x + street_width_x)) + 1
- n_grid_y = int(2.0*bound_radius/(plan_y + street_width_y)) + 1
+
+ n_grid_x = int(2.0 * bound_radius / (plan_x + street_width_x)) + 1
+ n_grid_y = int(2.0 * bound_radius / (plan_y + street_width_y)) + 1
if (n_grid_x % 2) == 0:
- n_grid_x -=1
+ n_grid_x -= 1
if (n_grid_y % 2) == 0:
- n_grid_y -=1
-
+ n_grid_y -= 1
- mid_ix = int(n_grid_x/2)
- mid_iy = int(n_grid_y/2)
+ mid_ix = int(n_grid_x / 2)
+ mid_iy = int(n_grid_y / 2)
buildings = []
- min_h = 1.0 - randomness*0.95
- max_h = 1.0 + randomness*0.95
+ min_h = 1.0 - randomness * 0.95
+ max_h = 1.0 + randomness * 0.95
rand_f = np.random.uniform(min_h, max_h, (n_grid_x, n_grid_y))
- x_max = (street_width_x + plan_x)*n_grid_x - street_width_x
- y_max = (street_width_y + plan_y)*n_grid_y - street_width_y
-
+ x_max = (street_width_x + plan_x) * n_grid_x - street_width_x
+ y_max = (street_width_y + plan_y) * n_grid_y - street_width_y
+
# bound_radius = max(x_max, y_max)/2.0
for ix in range(n_grid_x):
@@ -88,57 +102,61 @@ def create_surroundings_geometry(main_bldg_width, main_bldg_depth, sur_bldg_widt
if ix == mid_ix and iy == mid_iy:
continue
- center_x = -x_max/2.0 + ix*street_width_x + plan_x*(ix + 0.5)
- center_y = -y_max/2.0 + iy*street_width_y + plan_y*(iy + 0.5)
+ center_x = -x_max / 2.0 + ix * street_width_x + plan_x * (ix + 0.5)
+ center_y = -y_max / 2.0 + iy * street_width_y + plan_y * (iy + 0.5)
# bldg_R = np.sqrt((abs(center_x) + sur_bldg_depth)**2.0 + (abs(center_y) + sur_bldg_width)**2.0)
- bldg_R = np.sqrt(center_x**2.0 + center_y**2.0)
+ bldg_R = np.sqrt(center_x**2.0 + center_y**2.0) # noqa: N806
- #Add the building if it's within bounding radius
+ # Add the building if it's within bounding radius
if bldg_R < bound_radius:
- bldg = create_building_geometry(sur_bldg_width, sur_bldg_depth, sur_bldg_height*rand_f[ix, iy],
- np.array([center_x, center_y, 0.0]))
+ bldg = create_building_geometry(
+ sur_bldg_width,
+ sur_bldg_depth,
+ sur_bldg_height * rand_f[ix, iy],
+ np.array([center_x, center_y, 0.0]),
+ )
buildings.append(bldg)
-
# table = create_turn_table(sur_bldg_width, sur_bldg_depth, bound_radius)
# copies.append(table)
return buildings
-# def create_turn_table(sur_bldg_width, sur_bldg_depth, bound_radius):
-
+
+# def create_turn_table(sur_bldg_width, sur_bldg_depth, bound_radius):
+
# table_radius = bound_radius + max(sur_bldg_width, sur_bldg_depth)/2.0
-
+
# epsilon = 0.001*min(sur_bldg_width, sur_bldg_depth)
# table_h = 0.01*bound_radius
# n = 36
# vertices = np.zeros((n*2 + 2, 3))
-
+
# # Define the sector as polygon vertices of around the turn table
-# for i in range(n):
+# for i in range(n):
# x = table_radius*np.cos(i*2.0*np.pi/n)
# y = table_radius*np.sin(i*2.0*np.pi/n)
-# vertices[i, 0] = x
-# vertices[i, 1] = y
+# vertices[i, 0] = x
+# vertices[i, 1] = y
# vertices[i, 2] = -2*epsilon - table_h
-
-# vertices[n + i, 0] = x
-# vertices[n + i, 1] = y
+
+# vertices[n + i, 0] = x
+# vertices[n + i, 1] = y
# vertices[n + i, 2] = 0
-# # bottom and top center
+# # bottom and top center
# vertices[2*n, 2] = -2*epsilon - table_h
# vertices[2*n + 1, 2] = 0
-# # Define triangles composing the turn table
+# # Define triangles composing the turn table
# faces = np.zeros((n*4, 3), dtype=int)
-
-# for i in range(n):
+
+# for i in range(n):
# #triangle 1
# faces[i, 0] = i
# faces[i, 1] = i + 1
@@ -158,399 +176,414 @@ def create_surroundings_geometry(main_bldg_width, main_bldg_depth, sur_bldg_widt
# faces[3*n + i, 0] = n + i
# faces[3*n + i, 1] = n + i + 1
# faces[3*n + i, 2] = 2*n + 1
-
+
# # Create the mesh
# table = mesh.Mesh(np.zeros(faces.shape[0], dtype=mesh.Mesh.dtype))
# for i, f in enumerate(faces):
# for j in range(3):
# table.vectors[i][j] = vertices[f[j],:]
-
+
# return table
+def write_main_building_stl_file(input_json_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_main_building_stl_file(input_json_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
geom_data = json_data['GeometricData']
- #Else create the STL file
- scale = geom_data['geometricScale']
- length_unit = json_data['lengthUnit']
+ # Else create the STL file
+ scale = geom_data['geometricScale']
+ length_unit = json_data['lengthUnit']
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
-
- #Convert from full-scale to model-scale
- B = convert_to_meters*geom_data['buildingWidth']/scale
- D = convert_to_meters*geom_data['buildingDepth']/scale
- H = convert_to_meters*geom_data['buildingHeight']/scale
-
+
+ # Convert from full-scale to model-scale
+ B = convert_to_meters * geom_data['buildingWidth'] / scale # noqa: N806
+ D = convert_to_meters * geom_data['buildingDepth'] / scale # noqa: N806
+ H = convert_to_meters * geom_data['buildingHeight'] / scale # noqa: N806
+
origin = np.array(geom_data['origin'])
- wind_dxn = geom_data['windDirection']
+ wind_dxn = geom_data['windDirection']
wind_dxn_rad = np.deg2rad(wind_dxn)
-
- bldg = create_building_geometry(B, D, H, origin)
-
- #Account for wind direction
+
+ bldg = create_building_geometry(B, D, H, origin)
+
+ # Account for wind direction
bldg.rotate([0.0, 0.0, 1.0], wind_dxn_rad)
-
+
# Write the mesh to file "building.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
bldg.save(case_path + '/constant/geometry/building.stl', mode=fmt)
-def write_surrounding_buildings_stl_file(input_json_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
+def write_surrounding_buildings_stl_file(input_json_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
geom_data = json_data['GeometricData']
sur_data = geom_data['surroundingBuildingsInformation']
- #Else create the STL file
- scale = geom_data['geometricScale']
- length_unit = json_data['lengthUnit']
+ # Else create the STL file
+ scale = geom_data['geometricScale']
+ length_unit = json_data['lengthUnit']
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
-
- #Convert from full-scale to model-scale
- B = convert_to_meters*geom_data['buildingWidth']/scale
- D = convert_to_meters*geom_data['buildingDepth']/scale
- Sb = convert_to_meters*sur_data['surroundingBuildingsWidth']/scale
- Sd = convert_to_meters*sur_data['surroundingBuildingsDepth']/scale
- Sh = convert_to_meters*sur_data['surroundingBuildingsHeight']/scale
- Swx = convert_to_meters*sur_data['streetWidthX']/scale
- Swy = convert_to_meters*sur_data['streetWidthY']/scale
- Rb = convert_to_meters*sur_data['boundingRadius']/scale
-
- #Normalize 0 to 1
- rand = sur_data['randomness']/100.0
-
- origin = np.array(geom_data['origin'])
- wind_dxn = geom_data['windDirection']
- wind_dxn_rad = np.deg2rad(wind_dxn)
+ # Convert from full-scale to model-scale
+ B = convert_to_meters * geom_data['buildingWidth'] / scale # noqa: N806
+ D = convert_to_meters * geom_data['buildingDepth'] / scale # noqa: N806
+ Sb = convert_to_meters * sur_data['surroundingBuildingsWidth'] / scale # noqa: N806
+ Sd = convert_to_meters * sur_data['surroundingBuildingsDepth'] / scale # noqa: N806
+ Sh = convert_to_meters * sur_data['surroundingBuildingsHeight'] / scale # noqa: N806
+ Swx = convert_to_meters * sur_data['streetWidthX'] / scale # noqa: N806
+ Swy = convert_to_meters * sur_data['streetWidthY'] / scale # noqa: N806
+ Rb = convert_to_meters * sur_data['boundingRadius'] / scale # noqa: N806
+
+ # Normalize 0 to 1
+ rand = sur_data['randomness'] / 100.0
+
+ origin = np.array(geom_data['origin']) # noqa: F841
+ wind_dxn = geom_data['windDirection']
+
+ wind_dxn_rad = np.deg2rad(wind_dxn)
surroundings = create_surroundings_geometry(B, D, Sb, Sd, Sh, Swx, Swy, Rb, rand)
-
- #Merge the buildings together into one geometric data
+
+ # Merge the buildings together into one geometric data
combined = mesh.Mesh(np.concatenate([copy.data for copy in surroundings]))
- #Account for wind direction
+ # Account for wind direction
combined.rotate([0.0, 0.0, 1.0], wind_dxn_rad)
-
+
# Write the mesh to file "surroundings.stl"
- fmt = mesh.stl.Mode.ASCII # binary or ASCII format
+ fmt = mesh.stl.Mode.ASCII # binary or ASCII format
combined.save(case_path + '/constant/geometry/surroundings.stl', mode=fmt)
bldg_count = 0
for sur in surroundings:
- sur.save(case_path + '/constant/geometry/surr_bldg{}.stl'.format(bldg_count), mode=fmt)
- bldg_count += 1
+ sur.save(
+ case_path + f'/constant/geometry/surr_bldg{bldg_count}.stl',
+ mode=fmt,
+ )
+ bldg_count += 1 # noqa: SIM113
return len(surroundings)
-def write_block_mesh_dict(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
+def write_block_mesh_dict(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
# Returns JSON object as a dictionary
- mesh_data = json_data["blockMeshParameters"]
+ mesh_data = json_data['blockMeshParameters']
geom_data = json_data['GeometricData']
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
origin = np.array(geom_data['origin'])
- scale = geom_data['geometricScale']
-
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
+ scale = geom_data['geometricScale'] # noqa: F841
+
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_cells = mesh_data['xNumCells']
y_cells = mesh_data['yNumCells']
z_cells = mesh_data['zNumCells']
-
+
x_grading = mesh_data['xGrading']
y_grading = mesh_data['yGrading']
z_grading = mesh_data['zGrading']
-
- bc_map = {"slip": 'wall', "cyclic": 'cyclic', "noSlip": 'wall',
- "symmetry": 'symmetry', "empty": 'empty', "TInf": 'patch',
- "MeanABL": 'patch', "Uniform": 'patch', "zeroPressureOutlet": 'patch',
- "roughWallFunction": 'wall',"smoothWallFunction": 'wall'}
+ bc_map = {
+ 'slip': 'wall',
+ 'cyclic': 'cyclic',
+ 'noSlip': 'wall',
+ 'symmetry': 'symmetry',
+ 'empty': 'empty',
+ 'TInf': 'patch',
+ 'MeanABL': 'patch',
+ 'Uniform': 'patch',
+ 'zeroPressureOutlet': 'patch',
+ 'roughWallFunction': 'wall',
+ 'smoothWallFunction': 'wall',
+ }
inlet_type = bc_map[boundary_data['inletBoundaryCondition']]
outlet_type = bc_map[boundary_data['outletBoundaryCondition']]
- ground_type = bc_map[boundary_data['groundBoundaryCondition']]
+ ground_type = bc_map[boundary_data['groundBoundaryCondition']]
top_type = bc_map[boundary_data['topBoundaryCondition']]
front_type = bc_map[boundary_data['sidesBoundaryCondition']]
back_type = bc_map[boundary_data['sidesBoundaryCondition']]
length_unit = json_data['lengthUnit']
-
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
x_max = x_min + Lx
y_max = y_min + Ly
z_max = z_min + Lz
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/blockMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/blockMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- dict_lines[17] = "\txMin\t\t{:.4f};\n".format(x_min)
- dict_lines[18] = "\tyMin\t\t{:.4f};\n".format(y_min)
- dict_lines[19] = "\tzMin\t\t{:.4f};\n".format(z_min)
- dict_lines[20] = "\txMax\t\t{:.4f};\n".format(x_max)
- dict_lines[21] = "\tyMax\t\t{:.4f};\n".format(y_max)
- dict_lines[22] = "\tzMax\t\t{:.4f};\n".format(z_max)
+ dict_lines[17] = f'\txMin\t\t{x_min:.4f};\n'
+ dict_lines[18] = f'\tyMin\t\t{y_min:.4f};\n'
+ dict_lines[19] = f'\tzMin\t\t{z_min:.4f};\n'
+ dict_lines[20] = f'\txMax\t\t{x_max:.4f};\n'
+ dict_lines[21] = f'\tyMax\t\t{y_max:.4f};\n'
+ dict_lines[22] = f'\tzMax\t\t{z_max:.4f};\n'
- dict_lines[23] = "\txCells\t\t{:d};\n".format(x_cells)
- dict_lines[24] = "\tyCells\t\t{:d};\n".format(y_cells)
- dict_lines[25] = "\tzCells\t\t{:d};\n".format(z_cells)
+ dict_lines[23] = f'\txCells\t\t{x_cells:d};\n'
+ dict_lines[24] = f'\tyCells\t\t{y_cells:d};\n'
+ dict_lines[25] = f'\tzCells\t\t{z_cells:d};\n'
- dict_lines[26] = "\txGrading\t{:.4f};\n".format(x_grading)
- dict_lines[27] = "\tyGrading\t{:.4f};\n".format(y_grading)
- dict_lines[28] = "\tzGrading\t{:.4f};\n".format(z_grading)
+ dict_lines[26] = f'\txGrading\t{x_grading:.4f};\n'
+ dict_lines[27] = f'\tyGrading\t{y_grading:.4f};\n'
+ dict_lines[28] = f'\tzGrading\t{z_grading:.4f};\n'
convert_to_meters = 1.0
- if length_unit=='m':
+ if length_unit == 'm':
convert_to_meters = 1.0
- elif length_unit=='cm':
+ elif length_unit == 'cm':
convert_to_meters = 0.01
- elif length_unit=='mm':
+ elif length_unit == 'mm':
convert_to_meters = 0.001
- elif length_unit=='ft':
+ elif length_unit == 'ft':
convert_to_meters = 0.3048
- elif length_unit=='in':
+ elif length_unit == 'in':
convert_to_meters = 0.0254
- dict_lines[31] = "convertToMeters {:.4f};\n".format(convert_to_meters)
- dict_lines[61] = " type {};\n".format(inlet_type)
- dict_lines[70] = " type {};\n".format(outlet_type)
- dict_lines[79] = " type {};\n".format(ground_type)
- dict_lines[88] = " type {};\n".format(top_type)
- dict_lines[97] = " type {};\n".format(front_type)
- dict_lines[106] = " type {};\n".format(back_type)
-
-
- write_file_name = case_path + "/system/blockMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[31] = f'convertToMeters {convert_to_meters:.4f};\n'
+ dict_lines[61] = f' type {inlet_type};\n'
+ dict_lines[70] = f' type {outlet_type};\n'
+ dict_lines[79] = f' type {ground_type};\n'
+ dict_lines[88] = f' type {top_type};\n'
+ dict_lines[97] = f' type {front_type};\n'
+ dict_lines[106] = f' type {back_type};\n'
+
+ write_file_name = case_path + '/system/blockMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
+def write_snappy_hex_mesh_dict( # noqa: C901, D103
+ input_json_path,
+ template_dict_path,
+ case_path,
+ n_surr_bldgs,
+):
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
-def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path, n_surr_bldgs):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
# Returns JSON object as a dictionary
- mesh_data = json_data["snappyHexMeshParameters"]
+ mesh_data = json_data['snappyHexMeshParameters']
add_surface_refinement = mesh_data['addSurfaceRefinements']
building_stl_name = mesh_data['buildingSTLName']
- surrounding_stl_name = mesh_data['surroundingsSTLName']
+ surrounding_stl_name = mesh_data['surroundingsSTLName'] # noqa: F841
add_edge_refinement = mesh_data['addEdgeRefinements']
surface_refinements = mesh_data['surfaceRefinements']
edge_refinements = mesh_data['edgeRefinements']
geom_data = json_data['GeometricData']
-
+
add_prism_layers = mesh_data['addPrismLayers']
number_of_prism_layers = mesh_data['numberOfPrismLayers']
prism_layer_expansion_ratio = mesh_data['prismLayerExpansionRatio']
final_prism_layer_thickness = mesh_data['finalPrismLayerThickness']
prism_layer_surface_name = mesh_data['prismLayerSurfaceName']
- prism_layer_relative_size = "on"
+ prism_layer_relative_size = 'on'
+ Lx = geom_data['domainLength'] # noqa: N806
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lz = geom_data['domainHeight'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
- Lx = geom_data['domainLength']
- Ly = geom_data['domainWidth']
- Lz = geom_data['domainHeight']
- Lf = geom_data['fetchLength']
-
origin = np.array(geom_data['origin'])
-
+
num_cells_between_levels = mesh_data['numCellsBetweenLevels']
resolve_feature_angle = mesh_data['resolveFeatureAngle']
- num_processors = mesh_data['numProcessors']
-
+ num_processors = mesh_data['numProcessors'] # noqa: F841
+
refinement_boxes = mesh_data['refinementBoxes']
-
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
- z_min = 0.0 - origin[2]
+ y_min = -Ly / 2.0 - origin[1]
+ z_min = 0.0 - origin[2]
- x_max = x_min + Lx
+ x_max = x_min + Lx # noqa: F841
y_max = y_min + Ly
- z_max = z_min + Lz
-
- inside_point = [x_min + Lf/2.0, (y_min + y_max)/2.0, (z_min + z_max)/2.0]
+ z_max = z_min + Lz
+ inside_point = [x_min + Lf / 2.0, (y_min + y_max) / 2.0, (z_min + z_max) / 2.0]
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/snappyHexMeshDictTemplate", "r")
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/snappyHexMeshDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
+ # Export to OpenFOAM probe format
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write 'addLayers' switch
- start_index = foam.find_keyword_line(dict_lines, "addLayers")
- dict_lines[start_index] = "addLayers\t{};\n".format("off")
-
- ###################### Edit Geometry Section ##############################
-
- #Add refinement box geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- n_boxes = len(refinement_boxes)
+
+ # Write 'addLayers' switch
+ start_index = foam.find_keyword_line(dict_lines, 'addLayers')
+ dict_lines[start_index] = 'addLayers\t{};\n'.format('off')
+
+ # Edit Geometry Section ##############################
+
+ # Add refinement box geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ n_boxes = len(refinement_boxes)
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " type searchableBox;\n"
- added_part += " min ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][2], refinement_boxes[i][3], refinement_boxes[i][4])
- added_part += " max ({:.4f} {:.4f} {:.4f});\n".format(refinement_boxes[i][5], refinement_boxes[i][6], refinement_boxes[i][7])
- added_part += " }\n"
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' type searchableBox;\n'
+ added_part += f' min ({refinement_boxes[i][2]:.4f} {refinement_boxes[i][3]:.4f} {refinement_boxes[i][4]:.4f});\n'
+ added_part += f' max ({refinement_boxes[i][5]:.4f} {refinement_boxes[i][6]:.4f} {refinement_boxes[i][7]:.4f});\n'
+ added_part += ' }\n'
dict_lines.insert(start_index, added_part)
- #Add building and surrounding stl geometry
- start_index = foam.find_keyword_line(dict_lines, "geometry") + 2
- added_part = ""
- added_part += " {}\n".format(building_stl_name)
- added_part += " {\n"
- added_part += " type triSurfaceMesh;\n"
- added_part += " file \"{}.stl\";\n".format(building_stl_name)
- added_part += " }\n"
+ # Add building and surrounding stl geometry
+ start_index = foam.find_keyword_line(dict_lines, 'geometry') + 2
+ added_part = ''
+ added_part += f' {building_stl_name}\n'
+ added_part += ' {\n'
+ added_part += ' type triSurfaceMesh;\n'
+ added_part += f' file "{building_stl_name}.stl";\n'
+ added_part += ' }\n'
for i in range(n_surr_bldgs):
- added_part += " {}\n".format("surr_bldg{}".format(i))
- added_part += " {\n"
- added_part += " type triSurfaceMesh;\n"
- added_part += " file \"surr_bldg{}.stl\";\n".format(i)
- added_part += " }\n"
-
- dict_lines.insert(start_index, added_part)
-
- ################# Edit castellatedMeshControls Section ####################
-
- #Write 'nCellsBetweenLevels'
- start_index = foam.find_keyword_line(dict_lines, "nCellsBetweenLevels")
- dict_lines[start_index] = " nCellsBetweenLevels {:d};\n".format(num_cells_between_levels)
-
- #Write 'resolveFeatureAngle'
- start_index = foam.find_keyword_line(dict_lines, "resolveFeatureAngle")
- dict_lines[start_index] = " resolveFeatureAngle {:d};\n".format(resolve_feature_angle)
-
- #Write 'insidePoint'
- start_index = foam.find_keyword_line(dict_lines, "insidePoint")
- dict_lines[start_index] = " insidePoint ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #For compatibility with OpenFOAM-9 and older
- start_index = foam.find_keyword_line(dict_lines, "locationInMesh")
- dict_lines[start_index] = " locationInMesh ({:.4f} {:.4f} {:.4f});\n".format(inside_point[0], inside_point[1], inside_point[2])
-
- #Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
- start_index = foam.find_keyword_line(dict_lines, "outsidePoint")
- dict_lines[start_index] = " outsidePoint ({:.4e} {:.4e} {:.4e});\n".format(-1e-20, -1e-20, -1e-20)
-
-
- #Add refinement edge
- if add_edge_refinement:
- start_index = foam.find_keyword_line(dict_lines, "features") + 2
- added_part = ""
-
- for edge in edge_refinements:
- if edge["name"] == "surroundings":
+ added_part += ' {}\n'.format(f'surr_bldg{i}')
+ added_part += ' {\n'
+ added_part += ' type triSurfaceMesh;\n'
+ added_part += f' file "surr_bldg{i}.stl";\n'
+ added_part += ' }\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Edit castellatedMeshControls Section ####################
+
+ # Write 'nCellsBetweenLevels'
+ start_index = foam.find_keyword_line(dict_lines, 'nCellsBetweenLevels')
+ dict_lines[start_index] = (
+ f' nCellsBetweenLevels {num_cells_between_levels:d};\n'
+ )
+
+ # Write 'resolveFeatureAngle'
+ start_index = foam.find_keyword_line(dict_lines, 'resolveFeatureAngle')
+ dict_lines[start_index] = f' resolveFeatureAngle {resolve_feature_angle:d};\n'
+
+ # Write 'insidePoint'
+ start_index = foam.find_keyword_line(dict_lines, 'insidePoint')
+ dict_lines[start_index] = (
+ f' insidePoint ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # For compatibility with OpenFOAM-9 and older
+ start_index = foam.find_keyword_line(dict_lines, 'locationInMesh')
+ dict_lines[start_index] = (
+ f' locationInMesh ({inside_point[0]:.4f} {inside_point[1]:.4f} {inside_point[2]:.4f});\n'
+ )
+
+ # Write 'outsidePoint' on Frontera snappyHex will fail without this keyword
+ start_index = foam.find_keyword_line(dict_lines, 'outsidePoint')
+ dict_lines[start_index] = (
+ f' outsidePoint ({-1e-20:.4e} {-1e-20:.4e} {-1e-20:.4e});\n'
+ )
+
+ # Add refinement edge
+ if add_edge_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'features') + 2
+ added_part = ''
+
+ for edge in edge_refinements:
+ if edge['name'] == 'surroundings':
for i in range(n_surr_bldgs):
- added_part += " {\n"
- added_part += " file \"surr_bldg{}.eMesh\";\n".format(i)
- added_part += " level {};\n".format(edge["level"])
- added_part += " }\n"
+ added_part += ' {\n'
+ added_part += f' file "surr_bldg{i}.eMesh";\n'
+ added_part += ' level {};\n'.format(edge['level'])
+ added_part += ' }\n'
else:
- added_part += " {\n"
- added_part += " file \"{}.eMesh\";\n".format(edge["name"])
- added_part += " level {};\n".format(edge["level"])
- added_part += " }\n"
-
+ added_part += ' {\n'
+ added_part += ' file "{}.eMesh";\n'.format(edge['name'])
+ added_part += ' level {};\n'.format(edge['level'])
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
- #Add refinement surface
- if add_surface_refinement:
- start_index = foam.find_keyword_line(dict_lines, "refinementSurfaces") + 2
- added_part = ""
+
+ # Add refinement surface
+ if add_surface_refinement:
+ start_index = foam.find_keyword_line(dict_lines, 'refinementSurfaces') + 2
+ added_part = ''
for surf in surface_refinements:
- if surf["name"] == "surroundings":
+ if surf['name'] == 'surroundings':
for i in range(n_surr_bldgs):
- added_part += " surr_bldg{}\n".format(i)
- added_part += " {\n"
- added_part += " level ({} {});\n".format(surf["minLevel"], surf["maxLevel"])
- added_part += " patchInfo\n"
- added_part += " {\n"
- added_part += " type wall;\n"
- added_part += " }\n"
- added_part += " }\n"
+ added_part += f' surr_bldg{i}\n'
+ added_part += ' {\n'
+ added_part += ' level ({} {});\n'.format(
+ surf['minLevel'], surf['maxLevel']
+ )
+ added_part += ' patchInfo\n'
+ added_part += ' {\n'
+ added_part += ' type wall;\n'
+ added_part += ' }\n'
+ added_part += ' }\n'
else:
- added_part += " {}\n".format(surf["name"])
- added_part += " {\n"
- added_part += " level ({} {});\n".format(surf["minLevel"], surf["maxLevel"])
- added_part += " patchInfo\n"
- added_part += " {\n"
- added_part += " type wall;\n"
- added_part += " }\n"
- added_part += " }\n"
-
+ added_part += ' {}\n'.format(surf['name'])
+ added_part += ' {\n'
+ added_part += ' level ({} {});\n'.format(
+ surf['minLevel'], surf['maxLevel']
+ )
+ added_part += ' patchInfo\n'
+ added_part += ' {\n'
+ added_part += ' type wall;\n'
+ added_part += ' }\n'
+ added_part += ' }\n'
+
dict_lines.insert(start_index, added_part)
-
+
# #Add surface refinement around the building as a refinement region
# if surface_refinements[-1]["minLevel"] > refinement_boxes[-1][1]:
# added_part = ""
@@ -559,808 +592,784 @@ def write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path, n
# added_part += " mode distance;\n"
# added_part += " levels (({:.4f} {}));\n".format(surface_refinement_distance, refinement_boxes[-1][1] + 1)
# added_part += " }\n"
-
- # start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+
+ # start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
# dict_lines.insert(start_index, added_part)
- #Add box refinements
- added_part = ""
+ # Add box refinements
+ added_part = ''
for i in range(n_boxes):
- added_part += " {}\n".format(refinement_boxes[i][0])
- added_part += " {\n"
- added_part += " mode inside;\n"
- added_part += " level {};\n".format(refinement_boxes[i][1])
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "refinementRegions") + 2
+ added_part += f' {refinement_boxes[i][0]}\n'
+ added_part += ' {\n'
+ added_part += ' mode inside;\n'
+ added_part += f' level {refinement_boxes[i][1]};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'refinementRegions') + 2
dict_lines.insert(start_index, added_part)
-
- ####################### Edit PrismLayer Section ##########################
+ # Edit PrismLayer Section ##########################
if add_prism_layers:
- #Add surface layers (prism layers)
- added_part = ""
- added_part += " \"{}\"\n".format(prism_layer_surface_name)
- added_part += " {\n"
- added_part += " nSurfaceLayers {};\n".format(number_of_prism_layers)
- added_part += " }\n"
-
- start_index = foam.find_keyword_line(dict_lines, "layers") + 2
+ # Add surface layers (prism layers)
+ added_part = ''
+ added_part += f' "{prism_layer_surface_name}"\n'
+ added_part += ' {\n'
+ added_part += f' nSurfaceLayers {number_of_prism_layers};\n'
+ added_part += ' }\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'layers') + 2
dict_lines.insert(start_index, added_part)
- #Write 'relativeSizes'
- start_index = foam.find_keyword_line(dict_lines, "relativeSizes")
- dict_lines[start_index] = " relativeSizes {};\n".format(prism_layer_relative_size)
-
- #Write 'expansionRatio'
- start_index = foam.find_keyword_line(dict_lines, "expansionRatio")
- dict_lines[start_index] = " expansionRatio {:.4f};\n".format(prism_layer_expansion_ratio)
-
- #Write 'finalLayerThickness'
- start_index = foam.find_keyword_line(dict_lines, "finalLayerThickness")
- dict_lines[start_index] = " finalLayerThickness {:.4f};\n".format(final_prism_layer_thickness)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/snappyHexMeshDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write 'relativeSizes'
+ start_index = foam.find_keyword_line(dict_lines, 'relativeSizes')
+ dict_lines[start_index] = f' relativeSizes {prism_layer_relative_size};\n'
+
+ # Write 'expansionRatio'
+ start_index = foam.find_keyword_line(dict_lines, 'expansionRatio')
+ dict_lines[start_index] = (
+ f' expansionRatio {prism_layer_expansion_ratio:.4f};\n'
+ )
+
+ # Write 'finalLayerThickness'
+ start_index = foam.find_keyword_line(dict_lines, 'finalLayerThickness')
+ dict_lines[start_index] = (
+ f' finalLayerThickness {final_prism_layer_thickness:.4f};\n'
+ )
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/snappyHexMeshDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path, n_surr_bldgs):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
- # Returns JSON object as a dictionary
- domain_data = json_data["snappyHexMeshParameters"]
- building_stl_name = domain_data['buildingSTLName']
- surroundings_stl_name = domain_data['surroundingsSTLName']
- #Open the template blockMeshDict (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/surfaceFeaturesDictTemplate", "r")
+def write_surfaceFeaturesDict_file( # noqa: N802, D103
+ input_json_path,
+ template_dict_path,
+ case_path,
+ n_surr_bldgs,
+):
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
+ # Returns JSON object as a dictionary
+ domain_data = json_data['snappyHexMeshParameters']
+ building_stl_name = domain_data['buildingSTLName']
+ surroundings_stl_name = domain_data['surroundingsSTLName'] # noqa: F841
+
+ # Open the template blockMeshDict (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/surfaceFeaturesDictTemplate') # noqa: PLW1514, PTH123, SIM115
- #Export to OpenFOAM probe format
- dict_lines = dict_file.readlines()
- dict_file.close()
-
- #Write main building and surrounding buildings surface names
- start_index = foam.find_keyword_line(dict_lines, "surfaces")
+ # Export to OpenFOAM probe format
+ dict_lines = dict_file.readlines()
+ dict_file.close()
- added_part = "surfaces (\"{}.stl\"".format(building_stl_name)
+ # Write main building and surrounding buildings surface names
+ start_index = foam.find_keyword_line(dict_lines, 'surfaces')
- for i in range(n_surr_bldgs):
- added_part += " \"surr_bldg{}.stl\"".format(i)
+ added_part = f'surfaces ("{building_stl_name}.stl"'
- added_part += ");\n".format(building_stl_name)
+ for i in range(n_surr_bldgs):
+ added_part += f' "surr_bldg{i}.stl"'
- dict_lines[start_index] = added_part
+ added_part += ');\n'.format()
+ dict_lines[start_index] = added_part
- #Write edited dict to file
- write_file_name = case_path + "/system/surfaceFeaturesDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
- for line in dict_lines:
- output_file.write(line)
- output_file.close()
+ # Write edited dict to file
+ write_file_name = case_path + '/system/surfaceFeaturesDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
+ for line in dict_lines:
+ output_file.write(line)
+ output_file.close()
def write_boundary_data_files(input_json_path, case_path):
- """
- This functions writes wind profile files in "constant/boundaryData/inlet"
- if TInf options are used for the simulation.
- """
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ """This functions writes wind profile files in "constant/boundaryData/inlet"
+ if TInf options are used for the simulation.
+ """ # noqa: D205, D401, D404
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
+ boundary_data = json_data['boundaryConditions']
geom_data = json_data['GeometricData']
- wind_profiles = np.array(boundary_data["inflowProperties"]['windProfiles'])
+ wind_profiles = np.array(boundary_data['inflowProperties']['windProfiles'])
- bd_path = case_path + "/constant/boundaryData/inlet/"
+ bd_path = case_path + '/constant/boundaryData/inlet/'
- #Write points file
+ # Write points file
n_pts = np.shape(wind_profiles)[0]
- points = np.zeros((n_pts, 3))
-
+ points = np.zeros((n_pts, 3))
origin = np.array(geom_data['origin'])
-
- Ly = geom_data['domainWidth']
- Lf = geom_data['fetchLength']
-
+
+ Ly = geom_data['domainWidth'] # noqa: N806
+ Lf = geom_data['fetchLength'] # noqa: N806
+
x_min = -Lf - origin[0]
- y_min = -Ly/2.0 - origin[1]
+ y_min = -Ly / 2.0 - origin[1]
y_max = y_min + Ly
- points[:,0] = x_min
- points[:,1] = (y_min + y_max)/2.0
- points[:,2] = wind_profiles[:, 0]
+ points[:, 0] = x_min
+ points[:, 1] = (y_min + y_max) / 2.0
+ points[:, 2] = wind_profiles[:, 0]
- #Shift the last element of the y coordinate
- #a bit to make planer interpolation easier
+ # Shift the last element of the y coordinate
+ # a bit to make planer interpolation easier
points[-1:, 1] = y_max
- foam.write_foam_field(points, bd_path + "points")
+ foam.write_foam_field(points, bd_path + 'points')
- #Write wind speed file as a scalar field
- foam.write_scalar_field(wind_profiles[:, 1], bd_path + "U")
+ # Write wind speed file as a scalar field
+ foam.write_scalar_field(wind_profiles[:, 1], bd_path + 'U')
- #Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
- foam.write_foam_field(wind_profiles[:, 2:8], bd_path + "R")
+ # Write Reynolds stress profile (6 columns -> it's a symmetric tensor field)
+ foam.write_foam_field(wind_profiles[:, 2:8], bd_path + 'R')
- #Write length scale file (8 columns -> it's a tensor field)
- foam.write_foam_field(wind_profiles[:, 8:17], bd_path + "L")
+ # Write length scale file (8 columns -> it's a tensor field)
+ foam.write_foam_field(wind_profiles[:, 8:17], bd_path + 'L')
-def write_U_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_U_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- inlet_BC_type = boundary_data['inletBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- sides_BC_type = boundary_data['sidesBoundaryCondition']
-
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ inlet_BC_type = boundary_data['inletBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/UFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/UFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- ##################### Internal Field #########################
- #Initialize the internal fields frow a lower velocity to avoid Courant number
- #instability when the solver starts. Now %10 of roof-height wind speed is set
- start_index = foam.find_keyword_line(dict_lines, "internalField")
+
+ # Internal Field #########################
+ # Initialize the internal fields frow a lower velocity to avoid Courant number
+ # instability when the solver starts. Now %10 of roof-height wind speed is set
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
# dict_lines[start_index] = "internalField uniform ({:.4f} 0 0);\n".format(1.0*wind_speed)
- #Set the internal field to zero to make it easy for the solver to start
- dict_lines[start_index] = "internalField uniform (0 0 0);\n"
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
-
- if inlet_BC_type == "Uniform":
- added_part = ""
- added_part += "\t type \t fixedValue;\n"
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
-
- if inlet_BC_type == "MeanABL":
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletVelocity;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 uniform \t {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
- if inlet_BC_type == "TInf":
- added_part = ""
- added_part += "\t type \t turbulentDFMInlet;\n"
- added_part += "\t filterType \t exponential;\n"
- added_part += "\t filterFactor \t {};\n".format(4)
- added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t periodicInY \t {};\n".format("true")
- added_part += "\t periodicInZ \t {};\n".format("false")
- added_part += "\t constMeanU \t {};\n".format("true")
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
+ # Set the internal field to zero to make it easy for the solver to start
+ dict_lines[start_index] = 'internalField uniform (0 0 0);\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+
+ if inlet_BC_type == 'Uniform':
+ added_part = ''
+ added_part += '\t type \t fixedValue;\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+
+ if inlet_BC_type == 'MeanABL':
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletVelocity;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 uniform \t {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
+ if inlet_BC_type == 'TInf':
+ added_part = ''
+ added_part += '\t type \t turbulentDFMInlet;\n'
+ added_part += '\t filterType \t exponential;\n'
+ added_part += f'\t filterFactor \t {4};\n'
+ added_part += f'\t value \t uniform ({wind_speed:.4f} 0 0);\n'
+ added_part += '\t periodicInY \t {};\n'.format('true')
+ added_part += '\t periodicInZ \t {};\n'.format('false')
+ added_part += '\t constMeanU \t {};\n'.format('true')
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform (0 0 0);\n"
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += '\t inletValue \t uniform (0 0 0);\n'
# added_part += "\t value \t uniform ({:.4f} 0 0);\n".format(wind_speed)
- added_part += "\t value \t uniform (0 0 0);\n"
-
- # added_part += "\t type zeroGradient;\n"
+ added_part += '\t value \t uniform (0 0 0);\n'
+ # added_part += "\t type zeroGradient;\n"
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t value \t uniform (0 0 0);\n"
- added_part += "\t uniformValue \t constant (0 0 0);\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += '\t value \t uniform (0 0 0);\n'
+ added_part += '\t uniformValue \t constant (0 0 0);\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/U"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/0/U'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_p_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_p_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/pFileTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/pFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- p0 = 0.0;
+ # BC and initial condition
+ p0 = 0.0
+ # Internal Field #########################
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(p0)
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {p0:.4f};\n'
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(p0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {p0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/p"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/p'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_nut_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_nut_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
# wind_speed = wind_data['roofHeightWindSpeed']
# building_height = wind_data['buildingHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/nutFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/nutFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- nut0 = 0.0
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(nut0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
+
+ # BC and initial condition
+ nut0 = 0.0
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {nut0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t uniformFixedValue;\n"
- added_part += "\t uniformValue \t constant {:.4f};\n".format(nut0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t uniformFixedValue;\n'
+ added_part += f'\t uniformValue \t constant {nut0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t nutkAtmRoughWallFunction;\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t value \t uniform 0.0;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t nutUSpaldingWallFunction;\n"
- added_part += "\t value \t uniform 0;\n"
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutkAtmRoughWallFunction;\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t value \t uniform 0.0;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t nutUSpaldingWallFunction;\n'
+ added_part += '\t value \t uniform 0;\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/0/nut"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/nut'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_epsilon_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_epsilon_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/epsilonFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/epsilonFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition
- epsilon0 = 0.01
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField uniform {:.4f};\n".format(epsilon0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletEpsilon;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ # BC and initial condition
+ epsilon0 = 0.01
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField uniform {epsilon0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletEpsilon;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(epsilon0)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {epsilon0:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
-
- #Note: Should be replaced with smooth wall function for epsilon,
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
+
+ # Note: Should be replaced with smooth wall function for epsilon,
# now the same with rough wall function.
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t epsilonWallFunction;\n"
- added_part += "\t Cmu \t {:.4f};\n".format(0.09)
- added_part += "\t kappa \t {:.4f};\n".format(0.41)
- added_part += "\t E \t {:.4f};\n".format(9.8)
- added_part += "\t value \t uniform {:.4f};\n".format(epsilon0)
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t epsilonWallFunction;\n'
+ added_part += f'\t Cmu \t {0.09:.4f};\n'
+ added_part += f'\t kappa \t {0.41:.4f};\n'
+ added_part += f'\t E \t {9.8:.4f};\n'
+ added_part += f'\t value \t uniform {epsilon0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/epsilon"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/epsilon'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_k_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_k_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- boundary_data = json_data["boundaryConditions"]
- wind_data = json_data["windCharacteristics"]
-
-
- sides_BC_type = boundary_data['sidesBoundaryCondition']
- top_BC_type = boundary_data['topBoundaryCondition']
- ground_BC_type = boundary_data['groundBoundaryCondition']
+ boundary_data = json_data['boundaryConditions']
+ wind_data = json_data['windCharacteristics']
+
+ sides_BC_type = boundary_data['sidesBoundaryCondition'] # noqa: N806
+ top_BC_type = boundary_data['topBoundaryCondition'] # noqa: N806
+ ground_BC_type = boundary_data['groundBoundaryCondition'] # noqa: N806
wind_speed = wind_data['referenceWindSpeed']
building_height = wind_data['referenceHeight']
roughness_length = wind_data['aerodynamicRoughnessLength']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/kFileTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/kFileTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #BC and initial condition (you may need to scale to model scale)
+
+ # BC and initial condition (you may need to scale to model scale)
# k0 = 1.3 #not in model scale
-
- I = 0.1
- k0 = 1.5*(I*wind_speed)**2
-
- ##################### Internal Field #########################
-
- start_index = foam.find_keyword_line(dict_lines, "internalField")
- dict_lines[start_index] = "internalField \t uniform {:.4f};\n".format(k0)
-
-
- ###################### Inlet BC ##############################
- #Write uniform
- start_index = foam.find_keyword_line(dict_lines, "inlet") + 2
- added_part = ""
- added_part += "\t type \t atmBoundaryLayerInletK;\n"
- added_part += "\t Uref \t {:.4f};\n".format(wind_speed)
- added_part += "\t Zref \t {:.4f};\n".format(building_height)
- added_part += "\t zDir \t (0.0 0.0 1.0);\n"
- added_part += "\t flowDir \t (1.0 0.0 0.0);\n"
- added_part += "\t z0 \t uniform {:.4e};\n".format(roughness_length)
- added_part += "\t zGround \t uniform 0.0;\n"
-
+
+ I = 0.1 # noqa: N806, E741
+ k0 = 1.5 * (I * wind_speed) ** 2
+
+ # Internal Field #########################
+
+ start_index = foam.find_keyword_line(dict_lines, 'internalField')
+ dict_lines[start_index] = f'internalField \t uniform {k0:.4f};\n'
+
+ # Inlet BC ##############################
+ # Write uniform
+ start_index = foam.find_keyword_line(dict_lines, 'inlet') + 2
+ added_part = ''
+ added_part += '\t type \t atmBoundaryLayerInletK;\n'
+ added_part += f'\t Uref \t {wind_speed:.4f};\n'
+ added_part += f'\t Zref \t {building_height:.4f};\n'
+ added_part += '\t zDir \t (0.0 0.0 1.0);\n'
+ added_part += '\t flowDir \t (1.0 0.0 0.0);\n'
+ added_part += f'\t z0 \t uniform {roughness_length:.4e};\n'
+ added_part += '\t zGround \t uniform 0.0;\n'
+
dict_lines.insert(start_index, added_part)
- ###################### Outlet BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "outlet") + 2
- added_part = ""
- added_part += "\t type \t inletOutlet;\n"
- added_part += "\t inletValue \t uniform {:.4f};\n".format(k0)
- added_part += "\t value \t uniform {:.4f};\n".format(k0)
-
+ # Outlet BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'outlet') + 2
+ added_part = ''
+ added_part += '\t type \t inletOutlet;\n'
+ added_part += f'\t inletValue \t uniform {k0:.4f};\n'
+ added_part += f'\t value \t uniform {k0:.4f};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Ground BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "ground") + 2
-
- if ground_BC_type == "noSlip":
- added_part = ""
- added_part += "\t type \t zeroGradient;\n"
-
- if ground_BC_type == "smoothWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
-
- if ground_BC_type == "roughWallFunction":
- added_part = ""
- added_part += "\t type \t kqRWallFunction;\n"
- added_part += "\t value \t uniform {:.4f};\n".format(0.0)
+
+ # Ground BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'ground') + 2
+
+ if ground_BC_type == 'noSlip':
+ added_part = ''
+ added_part += '\t type \t zeroGradient;\n'
+
+ if ground_BC_type == 'smoothWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
+
+ if ground_BC_type == 'roughWallFunction':
+ added_part = ''
+ added_part += '\t type \t kqRWallFunction;\n'
+ added_part += f'\t value \t uniform {0.0:.4f};\n'
dict_lines.insert(start_index, added_part)
-
-
- ###################### Top BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "top") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(top_BC_type)
-
+
+ # Top BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'top') + 2
+ added_part = ''
+ added_part += f'\t type \t {top_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Front BC ##############################
-
- start_index = foam.find_keyword_line(dict_lines, "front") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Front BC ##############################
+
+ start_index = foam.find_keyword_line(dict_lines, 'front') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- ###################### Back BC ################################
-
- start_index = foam.find_keyword_line(dict_lines, "back") + 2
- added_part = ""
- added_part += "\t type \t {};\n".format(sides_BC_type)
-
+
+ # Back BC ################################
+
+ start_index = foam.find_keyword_line(dict_lines, 'back') + 2
+ added_part = ''
+ added_part += f'\t type \t {sides_BC_type};\n'
+
dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/0/k"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/0/k'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_controlDict_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_controlDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
- rm_data = json_data["resultMonitoring"]
-
+ ns_data = json_data['numericalSetup']
+ rm_data = json_data['resultMonitoring'] # noqa: F841
+
solver_type = ns_data['solverType']
duration = ns_data['duration']
time_step = ns_data['timeStep']
max_courant_number = ns_data['maxCourantNumber']
adjust_time_step = ns_data['adjustTimeStep']
-
+
# monitor_wind_profiles = rm_data['monitorWindProfile']
# monitor_vtk_planes = rm_data['monitorVTKPlane']
# wind_profiles = rm_data['windProfiles']
# vtk_planes = rm_data['vtkPlanes']
-
- # Need to change this for
- max_delta_t = 10*time_step
-
+ # Need to change this for
+ max_delta_t = 10 * time_step
+
write_interval = 1000
- purge_write = 3
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/controlDictTemplate", "r")
+ purge_write = 3
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/controlDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write application type
- start_index = foam.find_keyword_line(dict_lines, "application")
- dict_lines[start_index] = "application \t{};\n".format(solver_type)
-
- #Write end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime \t{:.6f};\n".format(duration)
-
- #Write time step time
- start_index = foam.find_keyword_line(dict_lines, "deltaT")
- dict_lines[start_index] = "deltaT \t\t{:.6f};\n".format(time_step)
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeControl \t{};\n".format("adjustableRunTime")
+
+ # Write application type
+ start_index = foam.find_keyword_line(dict_lines, 'application')
+ dict_lines[start_index] = f'application \t{solver_type};\n'
+
+ # Write end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime \t{duration:.6f};\n'
+
+ # Write time step time
+ start_index = foam.find_keyword_line(dict_lines, 'deltaT')
+ dict_lines[start_index] = f'deltaT \t\t{time_step:.6f};\n'
+
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = 'writeControl \t{};\n'.format('adjustableRunTime')
else:
- dict_lines[start_index] = "writeControl \t\t{};\n".format("timeStep")
-
- #Write adjustable time step or not
- start_index = foam.find_keyword_line(dict_lines, "adjustTimeStep")
- dict_lines[start_index] = "adjustTimeStep \t\t{};\n".format("yes" if adjust_time_step else "no")
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = "writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ dict_lines[start_index] = 'writeControl \t\t{};\n'.format('timeStep')
+
+ # Write adjustable time step or not
+ start_index = foam.find_keyword_line(dict_lines, 'adjustTimeStep')
+ dict_lines[start_index] = 'adjustTimeStep \t\t{};\n'.format(
+ 'yes' if adjust_time_step else 'no'
+ )
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f'writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = "writeInterval \t{};\n".format(write_interval)
-
- #Write maxCo
- start_index = foam.find_keyword_line(dict_lines, "maxCo")
- dict_lines[start_index] = "maxCo \t{:.2f};\n".format(max_courant_number)
-
- #Write maximum time step
- start_index = foam.find_keyword_line(dict_lines, "maxDeltaT")
- dict_lines[start_index] = "maxDeltaT \t{:.6f};\n".format(max_delta_t)
-
-
- #Write purge write interval
- start_index = foam.find_keyword_line(dict_lines, "purgeWrite")
- dict_lines[start_index] = "purgeWrite \t{};\n".format(purge_write)
-
- ########################### Function Objects ##############################
-
- #Find function object location
- start_index = foam.find_keyword_line(dict_lines, "functions") + 2
+ dict_lines[start_index] = f'writeInterval \t{write_interval};\n'
+
+ # Write maxCo
+ start_index = foam.find_keyword_line(dict_lines, 'maxCo')
+ dict_lines[start_index] = f'maxCo \t{max_courant_number:.2f};\n'
+
+ # Write maximum time step
+ start_index = foam.find_keyword_line(dict_lines, 'maxDeltaT')
+ dict_lines[start_index] = f'maxDeltaT \t{max_delta_t:.6f};\n'
+
+ # Write purge write interval
+ start_index = foam.find_keyword_line(dict_lines, 'purgeWrite')
+ dict_lines[start_index] = f'purgeWrite \t{purge_write};\n'
+
+ # Function Objects ##############################
+
+ # Find function object location
+ start_index = foam.find_keyword_line(dict_lines, 'functions') + 2
# #Write wind profile monitoring functionObjects
# if monitor_wind_profiles:
@@ -1368,143 +1377,140 @@ def write_controlDict_file(input_json_path, template_dict_path, case_path):
# for prof in wind_profiles:
# added_part += " #includeFunc {}\n".format(prof["name"])
# dict_lines.insert(start_index, added_part)
-
- # #Write VTK sampling sampling points
+
+ # #Write VTK sampling sampling points
# if monitor_vtk_planes:
# added_part = ""
# for pln in vtk_planes:
# added_part += " #includeFunc {}\n".format(pln["name"])
# dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/controlDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/controlDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_fvSolution_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_fvSolution_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
json_file.close()
-
+
num_non_orthogonal_correctors = ns_data['numNonOrthogonalCorrectors']
num_correctors = ns_data['numCorrectors']
num_outer_correctors = ns_data['numOuterCorrectors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSolutionTemplate", "r")
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/fvSolutionTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write simpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "SIMPLE") + 2
- added_part = ""
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write simpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'SIMPLE') + 2
+ added_part = ''
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pimpleFoam options
- start_index = foam.find_keyword_line(dict_lines, "PIMPLE") + 2
- added_part = ""
- added_part += " nOuterCorrectors \t{};\n".format(num_outer_correctors)
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pimpleFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PIMPLE') + 2
+ added_part = ''
+ added_part += f' nOuterCorrectors \t{num_outer_correctors};\n'
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
- #Write pisoFoam options
- start_index = foam.find_keyword_line(dict_lines, "PISO") + 2
- added_part = ""
- added_part += " nCorrectors \t{};\n".format(num_correctors)
- added_part += " nNonOrthogonalCorrectors \t{};\n".format(num_non_orthogonal_correctors)
+ # Write pisoFoam options
+ start_index = foam.find_keyword_line(dict_lines, 'PISO') + 2
+ added_part = ''
+ added_part += f' nCorrectors \t{num_correctors};\n'
+ added_part += (
+ f' nNonOrthogonalCorrectors \t{num_non_orthogonal_correctors};\n'
+ )
dict_lines.insert(start_index, added_part)
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSolution"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSolution'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
-def write_pressure_probes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
+def write_pressure_probes_file(input_json_path, template_dict_path, case_path): # noqa: D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
pressure_sampling_points = rm_data['pressureSamplingPoints']
pressure_write_interval = rm_data['pressureWriteInterval']
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- dict_lines[start_index] = "writeInterval \t{};\n".format(pressure_write_interval)
-
- #Write fields to be motored
- start_index = foam.find_keyword_line(dict_lines, "fields")
- dict_lines[start_index] = "fields \t\t(p);\n"
-
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
-
- added_part = ""
-
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ dict_lines[start_index] = f'writeInterval \t{pressure_write_interval};\n'
+
+ # Write fields to be motored
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+ dict_lines[start_index] = 'fields \t\t(p);\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+
+ added_part = ''
+
for i in range(len(pressure_sampling_points)):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(pressure_sampling_points[i][0], pressure_sampling_points[i][1], pressure_sampling_points[i][2])
-
+ added_part += f' ({pressure_sampling_points[i][0]:.6f} {pressure_sampling_points[i][1]:.6f} {pressure_sampling_points[i][2]:.6f})\n'
+
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/system/pressureSamplingPoints"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/system/pressureSamplingPoints'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-
-def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_wind_profiles_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
+ rm_data = json_data['resultMonitoring']
- ns_data = json_data["numericalSetup"]
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
@@ -1512,511 +1518,509 @@ def write_wind_profiles_file(input_json_path, template_dict_path, case_path):
write_interval = rm_data['profileWriteInterval']
start_time = rm_data['profileStartTime']
- if rm_data['monitorWindProfile'] == False:
- return
-
- if len(wind_profiles)==0:
+ if rm_data['monitorWindProfile'] == False: # noqa: E712
+ return
+
+ if len(wind_profiles) == 0:
return
- #Write dict files for wind profiles
+ # Write dict files for wind profiles
for prof in wind_profiles:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/probeTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/probeTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
-
- #Write start time for the probes
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- #Write name of the profile
- name = prof["name"]
- start_index = foam.find_keyword_line(dict_lines, "profileName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = prof["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write point coordinates
- start_x = prof["startX"]
- start_y = prof["startY"]
- start_z = prof["startZ"]
-
- end_x = prof["endX"]
- end_y = prof["endY"]
- end_z = prof["endZ"]
- n_points = prof["nPoints"]
-
- dx = (end_x - start_x)/n_points
- dy = (end_y - start_y)/n_points
- dz = (end_z - start_z)/n_points
-
- #Write locations of the probes
- start_index = foam.find_keyword_line(dict_lines, "probeLocations") + 2
- added_part = ""
-
- for pi in range(n_points):
- added_part += " ({:.6f} {:.6f} {:.6f})\n".format(start_x + pi*dx, start_y + pi*dy, start_z + pi*dz)
-
- dict_lines.insert(start_index, added_part)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
+
+ # Write start time for the probes
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ # Write name of the profile
+ name = prof['name']
+ start_index = foam.find_keyword_line(dict_lines, 'profileName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = prof['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write point coordinates
+ start_x = prof['startX']
+ start_y = prof['startY']
+ start_z = prof['startZ']
+
+ end_x = prof['endX']
+ end_y = prof['endY']
+ end_z = prof['endZ']
+ n_points = prof['nPoints']
+
+ dx = (end_x - start_x) / n_points
+ dy = (end_y - start_y) / n_points
+ dz = (end_z - start_z) / n_points
+
+ # Write locations of the probes
+ start_index = foam.find_keyword_line(dict_lines, 'probeLocations') + 2
+ added_part = ''
+
+ for pi in range(n_points):
+ added_part += f' ({start_x + pi * dx:.6f} {start_y + pi * dy:.6f} {start_z + pi * dz:.6f})\n'
+
+ dict_lines.insert(start_index, added_part)
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_vtk_plane_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_vtk_plane_file(input_json_path, template_dict_path, case_path): # noqa: C901, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- rm_data = json_data["resultMonitoring"]
- ns_data = json_data["numericalSetup"]
+ rm_data = json_data['resultMonitoring']
+ ns_data = json_data['numericalSetup']
solver_type = ns_data['solverType']
time_step = ns_data['timeStep']
-
vtk_planes = rm_data['vtkPlanes']
write_interval = rm_data['vtkWriteInterval']
- if rm_data['monitorVTKPlane'] == False:
- return
-
- if len(vtk_planes)==0:
+ if rm_data['monitorVTKPlane'] == False: # noqa: E712
+ return
+
+ if len(vtk_planes) == 0:
return
- #Write dict files for wind profiles
+ # Write dict files for wind profiles
for pln in vtk_planes:
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/vtkPlaneTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/vtkPlaneTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write writeControl
- start_index = foam.find_keyword_line(dict_lines, "writeControl")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeControl \t{};\n".format("adjustableRunTime")
- else:
- dict_lines[start_index] = " writeControl \t{};\n".format("timeStep")
- #Write writeInterval
- start_index = foam.find_keyword_line(dict_lines, "writeInterval")
- if solver_type=="pimpleFoam":
- dict_lines[start_index] = " writeInterval \t{:.6f};\n".format(write_interval*time_step)
+ # Write writeControl
+ start_index = foam.find_keyword_line(dict_lines, 'writeControl')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = ' writeControl \t{};\n'.format(
+ 'adjustableRunTime'
+ )
+ else:
+ dict_lines[start_index] = ' writeControl \t{};\n'.format('timeStep')
+
+ # Write writeInterval
+ start_index = foam.find_keyword_line(dict_lines, 'writeInterval')
+ if solver_type == 'pimpleFoam':
+ dict_lines[start_index] = (
+ f' writeInterval \t{write_interval * time_step:.6f};\n'
+ )
else:
- dict_lines[start_index] = " writeInterval \t{};\n".format(write_interval)
+ dict_lines[start_index] = f' writeInterval \t{write_interval};\n'
- #Write start and end time for the section
+ # Write start and end time for the section
start_time = pln['startTime']
end_time = pln['endTime']
- start_index = foam.find_keyword_line(dict_lines, "timeStart")
- dict_lines[start_index] = " timeStart \t\t{:.6f};\n".format(start_time)
-
- start_index = foam.find_keyword_line(dict_lines, "timeEnd")
- dict_lines[start_index] = " timeEnd \t\t{:.6f};\n".format(end_time)
-
- #Write name of the profile
- name = pln["name"]
- start_index = foam.find_keyword_line(dict_lines, "planeName")
- dict_lines[start_index] = "{}\n".format(name)
-
- #Write field type
- field_type = pln["field"]
- start_index = foam.find_keyword_line(dict_lines, "fields")
-
- if field_type=="Velocity":
- dict_lines[start_index] = " fields \t\t({});\n".format("U")
- if field_type=="Pressure":
- dict_lines[start_index] = " fields \t\t({});\n".format("p")
-
- #Write normal and point coordinates
- point_x = pln["pointX"]
- point_y = pln["pointY"]
- point_z = pln["pointZ"]
-
- normal_axis = pln["normalAxis"]
-
- start_index = foam.find_keyword_line(dict_lines, "point")
- dict_lines[start_index] = "\t point\t\t({:.6f} {:.6f} {:.6f});\n".format(point_x, point_y, point_z)
-
- start_index = foam.find_keyword_line(dict_lines, "normal")
- if normal_axis=="X":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(1, 0, 0)
- if normal_axis=="Y":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 1, 0)
- if normal_axis=="Z":
- dict_lines[start_index] = "\t normal\t\t({} {} {});\n".format(0, 0, 1)
-
- #Write edited dict to file
- write_file_name = case_path + "/system/" + name
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ start_index = foam.find_keyword_line(dict_lines, 'timeStart')
+ dict_lines[start_index] = f' timeStart \t\t{start_time:.6f};\n'
+
+ start_index = foam.find_keyword_line(dict_lines, 'timeEnd')
+ dict_lines[start_index] = f' timeEnd \t\t{end_time:.6f};\n'
+
+ # Write name of the profile
+ name = pln['name']
+ start_index = foam.find_keyword_line(dict_lines, 'planeName')
+ dict_lines[start_index] = f'{name}\n'
+
+ # Write field type
+ field_type = pln['field']
+ start_index = foam.find_keyword_line(dict_lines, 'fields')
+
+ if field_type == 'Velocity':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('U')
+ if field_type == 'Pressure':
+ dict_lines[start_index] = ' fields \t\t({});\n'.format('p')
+
+ # Write normal and point coordinates
+ point_x = pln['pointX']
+ point_y = pln['pointY']
+ point_z = pln['pointZ']
+
+ normal_axis = pln['normalAxis']
+
+ start_index = foam.find_keyword_line(dict_lines, 'point')
+ dict_lines[start_index] = (
+ f'\t point\t\t({point_x:.6f} {point_y:.6f} {point_z:.6f});\n'
+ )
+
+ start_index = foam.find_keyword_line(dict_lines, 'normal')
+ if normal_axis == 'X':
+ dict_lines[start_index] = f'\t normal\t\t({1} {0} {0});\n'
+ if normal_axis == 'Y':
+ dict_lines[start_index] = f'\t normal\t\t({0} {1} {0});\n'
+ if normal_axis == 'Z':
+ dict_lines[start_index] = f'\t normal\t\t({0} {0} {1});\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/' + name
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-
-def write_momentumTransport_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_momentumTransport_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
- RANS_type = turb_data['RANSModelType']
- LES_type = turb_data['LESModelType']
- DES_type = turb_data['DESModelType']
+ RANS_type = turb_data['RANSModelType'] # noqa: N806
+ LES_type = turb_data['LESModelType'] # noqa: N806
+ DES_type = turb_data['DESModelType'] # noqa: N806
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/momentumTransportTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/momentumTransportTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "simulationType")
- dict_lines[start_index] = "simulationType \t{};\n".format("RAS" if simulation_type=="RANS" else simulation_type)
-
- if simulation_type=="RANS":
- #Write RANS model type
- start_index = foam.find_keyword_line(dict_lines, "RAS") + 2
- added_part = " model \t{};\n".format(RANS_type)
+
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'simulationType')
+ dict_lines[start_index] = 'simulationType \t{};\n'.format(
+ 'RAS' if simulation_type == 'RANS' else simulation_type
+ )
+
+ if simulation_type == 'RANS':
+ # Write RANS model type
+ start_index = foam.find_keyword_line(dict_lines, 'RAS') + 2
+ added_part = f' model \t{RANS_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="LES":
- #Write LES SGS model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(LES_type)
+
+ elif simulation_type == 'LES':
+ # Write LES SGS model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{LES_type};\n'
dict_lines.insert(start_index, added_part)
-
- elif simulation_type=="DES":
- #Write DES model type
- start_index = foam.find_keyword_line(dict_lines, "LES") + 2
- added_part = " model \t{};\n".format(DES_type)
+
+ elif simulation_type == 'DES':
+ # Write DES model type
+ start_index = foam.find_keyword_line(dict_lines, 'LES') + 2
+ added_part = f' model \t{DES_type};\n'
dict_lines.insert(start_index, added_part)
- #Write edited dict to file
- write_file_name = case_path + "/constant/momentumTransport"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/momentumTransport'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_physicalProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+
+def write_physicalProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/physicalPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/physicalPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.4e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.4e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/physicalProperties'
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/constant/physicalProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-
-def write_transportProperties_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_transportProperties_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
-
-
+ wc_data = json_data['windCharacteristics']
+
kinematic_viscosity = wc_data['kinematicViscosity']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/transportPropertiesTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/transportPropertiesTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write type of the simulation
- start_index = foam.find_keyword_line(dict_lines, "nu")
- dict_lines[start_index] = "nu\t\t[0 2 -1 0 0 0 0] {:.3e};\n".format(kinematic_viscosity)
+ # Write type of the simulation
+ start_index = foam.find_keyword_line(dict_lines, 'nu')
+ dict_lines[start_index] = f'nu\t\t[0 2 -1 0 0 0 0] {kinematic_viscosity:.3e};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/transportProperties'
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
- #Write edited dict to file
- write_file_name = case_path + "/constant/transportProperties"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
output_file.close()
-def write_fvSchemes_file(input_json_path, template_dict_path, case_path):
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_fvSchemes_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/fvSchemesTemplate{}".format(simulation_type), "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + f'/fvSchemesTemplate{simulation_type}') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/fvSchemes"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/fvSchemes'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_decomposeParDict_file(input_json_path, template_dict_path, case_path):
+ output_file.close()
+
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+def write_decomposeParDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- ns_data = json_data["numericalSetup"]
-
+ ns_data = json_data['numericalSetup']
+
num_processors = ns_data['numProcessors']
-
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/decomposeParDictTemplate", "r")
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/decomposeParDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write number of sub-domains
- start_index = foam.find_keyword_line(dict_lines, "numberOfSubdomains")
- dict_lines[start_index] = "numberOfSubdomains\t{};\n".format(num_processors)
-
- #Write method of decomposition
- start_index = foam.find_keyword_line(dict_lines, "decomposer")
- dict_lines[start_index] = "decomposer\t\t{};\n".format("scotch")
-
- #Write method of decomposition for OF-V9 and lower compatability
- start_index = foam.find_keyword_line(dict_lines, "method")
- dict_lines[start_index] = "method\t\t{};\n".format("scotch")
-
-
- #Write edited dict to file
- write_file_name = case_path + "/system/decomposeParDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write number of sub-domains
+ start_index = foam.find_keyword_line(dict_lines, 'numberOfSubdomains')
+ dict_lines[start_index] = f'numberOfSubdomains\t{num_processors};\n'
+
+ # Write method of decomposition
+ start_index = foam.find_keyword_line(dict_lines, 'decomposer')
+ dict_lines[start_index] = 'decomposer\t\t{};\n'.format('scotch')
+
+ # Write method of decomposition for OF-V9 and lower compatibility
+ start_index = foam.find_keyword_line(dict_lines, 'method')
+ dict_lines[start_index] = 'method\t\t{};\n'.format('scotch')
+
+ # Write edited dict to file
+ write_file_name = case_path + '/system/decomposeParDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
-def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path):
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
-
+ output_file.close()
+
+
+def write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path): # noqa: N802, D103
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
+
fmax = 200.0
# Returns JSON object as a dictionary
- wc_data = json_data["windCharacteristics"]
- ns_data = json_data["numericalSetup"]
-
+ wc_data = json_data['windCharacteristics']
+ ns_data = json_data['numericalSetup']
+
wind_speed = wc_data['referenceWindSpeed']
duration = ns_data['duration']
-
- #Generate a little longer duration to be safe
- duration = duration*1.010
- #Open the template file (OpenFOAM file) for manipulation
- dict_file = open(template_dict_path + "/DFSRTurbDictTemplate", "r")
+ # Generate a little longer duration to be safe
+ duration = duration * 1.010 # noqa: PLR6104
+
+ # Open the template file (OpenFOAM file) for manipulation
+ dict_file = open(template_dict_path + '/DFSRTurbDictTemplate') # noqa: PLW1514, PTH123, SIM115
dict_lines = dict_file.readlines()
dict_file.close()
-
- #Write the end time
- start_index = foam.find_keyword_line(dict_lines, "endTime")
- dict_lines[start_index] = "endTime\t\t\t{:.4f};\n".format(duration)
-
- #Write patch name
- start_index = foam.find_keyword_line(dict_lines, "patchName")
- dict_lines[start_index] = "patchName\t\t\"{}\";\n".format("inlet")
-
- #Write cohUav
- start_index = foam.find_keyword_line(dict_lines, "cohUav")
- dict_lines[start_index] = "cohUav\t\t\t{:.4f};\n".format(wind_speed)
-
- #Write fmax
- start_index = foam.find_keyword_line(dict_lines, "fMax")
- dict_lines[start_index] = "fMax\t\t\t{:.4f};\n".format(fmax)
-
- #Write time step
- start_index = foam.find_keyword_line(dict_lines, "timeStep")
- dict_lines[start_index] = "timeStep\t\t{:.4f};\n".format(1.0/fmax)
-
- #Write edited dict to file
- write_file_name = case_path + "/constant/DFSRTurbDict"
-
- if os.path.exists(write_file_name):
- os.remove(write_file_name)
-
- output_file = open(write_file_name, "w+")
+
+ # Write the end time
+ start_index = foam.find_keyword_line(dict_lines, 'endTime')
+ dict_lines[start_index] = f'endTime\t\t\t{duration:.4f};\n'
+
+ # Write patch name
+ start_index = foam.find_keyword_line(dict_lines, 'patchName')
+ dict_lines[start_index] = 'patchName\t\t"{}";\n'.format('inlet')
+
+ # Write cohUav
+ start_index = foam.find_keyword_line(dict_lines, 'cohUav')
+ dict_lines[start_index] = f'cohUav\t\t\t{wind_speed:.4f};\n'
+
+ # Write fmax
+ start_index = foam.find_keyword_line(dict_lines, 'fMax')
+ dict_lines[start_index] = f'fMax\t\t\t{fmax:.4f};\n'
+
+ # Write time step
+ start_index = foam.find_keyword_line(dict_lines, 'timeStep')
+ dict_lines[start_index] = f'timeStep\t\t{1.0 / fmax:.4f};\n'
+
+ # Write edited dict to file
+ write_file_name = case_path + '/constant/DFSRTurbDict'
+
+ if os.path.exists(write_file_name): # noqa: PTH110
+ os.remove(write_file_name) # noqa: PTH107
+
+ output_file = open(write_file_name, 'w+') # noqa: PLW1514, PTH123, SIM115
for line in dict_lines:
output_file.write(line)
- output_file.close()
-
+ output_file.close()
+
-if __name__ == '__main__':
-
+if __name__ == '__main__':
input_args = sys.argv
# Set filenames
input_json_path = sys.argv[1]
template_dict_path = sys.argv[2]
- case_path = sys.argv[3]
-
- #Read JSON data
- with open(input_json_path + "/SurroundedBuildingCFD.json") as json_file:
- json_data = json.load(json_file)
+ case_path = sys.argv[3]
+
+ # Read JSON data
+ with open(input_json_path + '/SurroundedBuildingCFD.json') as json_file: # noqa: PLW1514, PTH123
+ json_data = json.load(json_file)
# Returns JSON object as a dictionary
- turb_data = json_data["turbulenceModeling"]
-
+ turb_data = json_data['turbulenceModeling']
+
simulation_type = turb_data['simulationType']
RANS_type = turb_data['RANSModelType']
LES_type = turb_data['LESModelType']
-
- #Write blockMesh
+
+ # Write blockMesh
write_block_mesh_dict(input_json_path, template_dict_path, case_path)
- #Create and write the main building "*.stl" file
+ # Create and write the main building "*.stl" file
write_main_building_stl_file(input_json_path, case_path)
-
- #Write surrounding building STL file
+
+ # Write surrounding building STL file
n_surr_bldgs = write_surrounding_buildings_stl_file(input_json_path, case_path)
- #Write surfaceFeaturesDict file
- write_surfaceFeaturesDict_file(input_json_path, template_dict_path, case_path, n_surr_bldgs)
+ # Write surfaceFeaturesDict file
+ write_surfaceFeaturesDict_file(
+ input_json_path, template_dict_path, case_path, n_surr_bldgs
+ )
+
+ # Create and write the SnappyHexMeshDict file
+ write_snappy_hex_mesh_dict(
+ input_json_path, template_dict_path, case_path, n_surr_bldgs
+ )
- #Create and write the SnappyHexMeshDict file
- write_snappy_hex_mesh_dict(input_json_path, template_dict_path, case_path, n_surr_bldgs)
-
- #Write files in "0" directory
+ # Write files in "0" directory
write_U_file(input_json_path, template_dict_path, case_path)
write_p_file(input_json_path, template_dict_path, case_path)
write_nut_file(input_json_path, template_dict_path, case_path)
write_k_file(input_json_path, template_dict_path, case_path)
-
- if simulation_type == "RANS" and RANS_type=="kEpsilon":
+
+ if simulation_type == 'RANS' and RANS_type == 'kEpsilon':
write_epsilon_file(input_json_path, template_dict_path, case_path)
- #Write control dict
+ # Write control dict
write_controlDict_file(input_json_path, template_dict_path, case_path)
-
- #Write results to be monitored
+
+ # Write results to be monitored
# write_wind_profiles_file(input_json_path, template_dict_path, case_path)
# write_vtk_plane_file(input_json_path, template_dict_path, case_path)
-
- #Write fvSolution dict
+
+ # Write fvSolution dict
write_fvSolution_file(input_json_path, template_dict_path, case_path)
- #Write fvSchemes dict
+ # Write fvSchemes dict
write_fvSchemes_file(input_json_path, template_dict_path, case_path)
- #Write momentumTransport dict
+ # Write momentumTransport dict
write_momentumTransport_file(input_json_path, template_dict_path, case_path)
-
- #Write physicalProperties dict
+
+ # Write physicalProperties dict
write_physicalProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
+
+ # Write transportProperties (physicalProperties in OF-10) dict for OpenFOAM-9 and below
write_transportProperties_file(input_json_path, template_dict_path, case_path)
-
- #Write decomposeParDict
+
+ # Write decomposeParDict
write_decomposeParDict_file(input_json_path, template_dict_path, case_path)
-
- #Write DFSRTurb dict
+
+ # Write DFSRTurb dict
# write_DFSRTurbDict_file(input_json_path, template_dict_path, case_path)
-
- #Write TInf files
+
+ # Write TInf files
# write_boundary_data_files(input_json_path, case_path)
diff --git a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/CMakeLists.txt b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/CMakeLists.txt
index abcb0b541..b066a51ee 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/CMakeLists.txt
+++ b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/CMakeLists.txt
@@ -5,7 +5,7 @@ simcenter_add_file(NAME nutFileTemplate)
simcenter_add_file(NAME pFileTemplate)
simcenter_add_file(NAME epsilonFileTemplate)
-#Files in "costant" directory
+#Files in "constant" directory
simcenter_add_file(NAME physicalPropertiesTemplate)
simcenter_add_file(NAME transportPropertiesTemplate)
simcenter_add_file(NAME momentumTransportTemplate)
diff --git a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
index c8b14b2c4..00f3fc64e 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
+++ b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/DFSRTurbDictTemplate
@@ -57,7 +57,7 @@ C (//x y z
windProfile
{
//read scaling factors for I, L
- //that varies with hieght
+ //that varies with height
adjustProfile off;
//Factors to scale turbulence intensities and length scale profiles
diff --git a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
index c93a2398e..62ed6e269 100644
--- a/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
+++ b/modules/createEVENT/SurroundedBuildingCFD/templateOF10Dicts/decomposeParDictTemplate
@@ -18,7 +18,7 @@ numberOfSubdomains 8;
decomposer hierarchical;
-//Needed for compatability
+//Needed for compatibility
method hierarchical;
distributor ptscotch;
diff --git a/modules/createEVENT/coupledDigitalTwin/CoupledDigitalTwin.py b/modules/createEVENT/coupledDigitalTwin/CoupledDigitalTwin.py
index 7d63cb323..256bf0eff 100644
--- a/modules/createEVENT/coupledDigitalTwin/CoupledDigitalTwin.py
+++ b/modules/createEVENT/coupledDigitalTwin/CoupledDigitalTwin.py
@@ -1,103 +1,88 @@
-from __future__ import print_function
-import os, sys
-import re
+import argparse # noqa: CPY001, D100, INP001
import json
-import argparse
-class FloorForces:
+
+class FloorForces: # noqa: D101
def __init__(self):
self.X = [0]
self.Y = [0]
self.Z = [0]
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsArray, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
- """
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
+def addFloorForceToEvent(patternsArray, force, direction, floor): # noqa: ARG001, N802, N803
+ """Add force (one component) time series and pattern in the event file""" # noqa: D400
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "dof": directionToDof(direction)
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'dof': directionToDof(direction),
}
patternsArray.append(pattern)
-def writeEVENT(forces, eventFilePath):
- """
- This method writes the EVENT.json file
- """
- patternsArray = []
- windEventJson = {
- "type" : "Hydro",
- "subtype": "CoupledDigitalTwin",
- "pattern": patternsArray,
- "pressure": [],
- "numSteps": len(forces[0].X),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+def writeEVENT(forces, eventFilePath): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ patternsArray = [] # noqa: N806
+ windEventJson = { # noqa: N806
+ 'type': 'Hydro',
+ 'subtype': 'CoupledDigitalTwin',
+ 'pattern': patternsArray,
+ 'pressure': [],
+ 'numSteps': len(forces[0].X),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [windEventJson]}
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [windEventJson]} # noqa: N806
- #Adding floor forces
- for floorForces in forces:
+ # Adding floor forces
+ for floorForces in forces: # noqa: N806
floor = forces.index(floorForces) + 1
- addFloorForceToEvent(patternsArray, floorForces.X, "X", floor)
- addFloorForceToEvent(patternsArray, floorForces.Y, "Y", floor)
+ addFloorForceToEvent(patternsArray, floorForces.X, 'X', floor)
+ addFloorForceToEvent(patternsArray, floorForces.Y, 'Y', floor)
- with open(eventFilePath, "w", encoding='utf-8') as eventsFile:
+ with open(eventFilePath, 'w', encoding='utf-8') as eventsFile: # noqa: PTH123, N806
json.dump(eventDict, eventsFile)
-def GetFloorsCount(BIMFilePath):
- with open(BIMFilePath,'r', encoding='utf-8') as BIMFile:
- bim = json.load(BIMFile)
-
- return int(bim["GeneralInformation"]["stories"])
-
-if __name__ == "__main__":
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ with open(BIMFilePath, encoding='utf-8') as BIMFile: # noqa: PTH123, N806
+ bim = json.load(BIMFile)
+
+ return int(bim['GeneralInformation']['stories'])
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using CFD
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by CFD")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True)
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True)
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by CFD'
+ )
+ parser.add_argument('-b', '--filenameAIM', help='BIM File', required=True)
+ parser.add_argument('-e', '--filenameEVENT', help='Event File', required=True)
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
- #parsing arguments
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
- if arguments.getRV == True:
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
+ if arguments.getRV == True: # noqa: E712
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
forces = []
- for i in range(floorsCount):
- forces.append(FloorForces())
- #write the event file
+ for i in range(floorsCount): # noqa: B007
+ forces.append(FloorForces()) # noqa: PERF401
+ # write the event file
writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/experimentalWindForces/convertWindMat.py b/modules/createEVENT/experimentalWindForces/convertWindMat.py
index 209656cde..02f8a17c0 100644
--- a/modules/createEVENT/experimentalWindForces/convertWindMat.py
+++ b/modules/createEVENT/experimentalWindForces/convertWindMat.py
@@ -1,69 +1,72 @@
+# python code to open the .mat file # noqa: CPY001, D100, INP001
+# and put data into a SimCenter JSON file
-# python code to open the .mat file
-# and put data into a SimCenter JSON file
-
-import sys
-import os
-import subprocess
import json
-import stat
-import shutil
+import os
+
import numpy as np
import scipy.io as sio
-from pprint import pprint
-def parseWindMatFile(matFileIn, windFileOutName):
- dataDir = os.getcwd()
- scriptDir = os.path.dirname(os.path.realpath(__file__))
-
+def parseWindMatFile(matFileIn, windFileOutName): # noqa: N802, N803, D103
+ dataDir = os.getcwd() # noqa: PTH109, N806, F841
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806, F841
- mat_contents = sio.loadmat(matFileIn)
+ mat_contents = sio.loadmat(matFileIn)
depth = float(mat_contents['D'][0])
- breadth = float(mat_contents['B'][0])
- height = float(mat_contents['H'][0])
- fs = float(mat_contents['fs'][0])
- vRef=float(mat_contents['Vref'][0])
-
- if "s_target" in mat_contents:
- case = "spectra"
- comp_CFmean = np.squeeze(np.array(mat_contents['comp_CFmean']))
+ breadth = float(mat_contents['B'][0])
+ height = float(mat_contents['H'][0])
+ fs = float(mat_contents['fs'][0])
+ vRef = float(mat_contents['Vref'][0]) # noqa: N806
+
+ if 's_target' in mat_contents:
+ case = 'spectra' # noqa: F841
+ comp_CFmean = np.squeeze(np.array(mat_contents['comp_CFmean'])) # noqa: N806
norm_all = np.squeeze(np.array(mat_contents['norm_all']))
f_target = np.squeeze(np.array(mat_contents['f_target']))
s_target = np.squeeze(np.array(mat_contents['s_target']))
- createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_target, s_target, comp_CFmean, norm_all)
-
- elif "Fx" in mat_contents:
-
-
- Fx = np.squeeze(np.array(mat_contents['Fx']))
- Fy = np.squeeze(np.array(mat_contents['Fy']))
- Tz = np.squeeze(np.array(mat_contents['Tz']))
+ createSpectraJson(
+ windFileOutName,
+ breadth,
+ depth,
+ height,
+ fs,
+ vRef,
+ f_target,
+ s_target,
+ comp_CFmean,
+ norm_all,
+ )
+
+ elif 'Fx' in mat_contents:
+ Fx = np.squeeze(np.array(mat_contents['Fx'])) # noqa: N806
+ Fy = np.squeeze(np.array(mat_contents['Fy'])) # noqa: N806
+ Tz = np.squeeze(np.array(mat_contents['Tz'])) # noqa: N806
t = np.squeeze(np.array(mat_contents['t']))
- myJson = {}
- myJson["D"] = depth
- myJson["H"] = height
- myJson["B"] = breadth
- myJson["fs"] = fs
- myJson["Vref"] = vRef
-
- myJson["Fx"] = np.array(Fx).tolist()
- myJson["Fy"] = np.array(Fy).tolist()
- myJson["Tz"] = np.array(Tz).tolist()
- myJson["t"] = np.array(t).tolist()
- with open(windFileOutName,"w") as f:
+ myJson = {} # noqa: N806
+ myJson['D'] = depth
+ myJson['H'] = height
+ myJson['B'] = breadth
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+
+ myJson['Fx'] = np.array(Fx).tolist()
+ myJson['Fy'] = np.array(Fy).tolist()
+ myJson['Tz'] = np.array(Tz).tolist()
+ myJson['t'] = np.array(t).tolist()
+ with open(windFileOutName, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
- # file = open(windFileOutName,"w")
- # file.write("{")
- # file.write("\"D\":%f," % depth)
- # file.write("\"H\":%f," % height)
- # file.write("\"B\":%f," % breadth)
- # file.write("\"fs\":%f," % fs)
- # file.write("\"Vref\":%f," % vRef)
+ # file = open(windFileOutName,"w")
+ # file.write("{")
+ # file.write("\"D\":%f," % depth)
+ # file.write("\"H\":%f," % height)
+ # file.write("\"B\":%f," % breadth)
+ # file.write("\"fs\":%f," % fs)
+ # file.write("\"Vref\":%f," % vRef)
# case = "timeHistory"
# Fx = mat_contents['Fx']
@@ -118,70 +121,74 @@ def parseWindMatFile(matFileIn, windFileOutName):
# else:
# file.write(",")
-
# file.write("}")
# file.close()
- # Check valid JSON file,
+ # Check valid JSON file,
validate = True
if validate:
- with open(windFileOutName, "r") as infile:
+ with open(windFileOutName) as infile: # noqa: FURB101, PLW1514, PTH123
json_data = infile.read()
# Try to parse the JSON data
try:
- json_object = json.loads(json_data)
- print("JSON file is valid")
+ json_object = json.loads(json_data) # noqa: F841
+ print('JSON file is valid') # noqa: T201
except json.decoder.JSONDecodeError:
- print("JSON file is not valid")
-
-
-
-
-
-def createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_target, s_target, comp_CFmean, norm_all):
-
-
- ncomp = comp_CFmean.shape[0]
- nf = f_target.shape[0]
-
- myJson = {}
- myJson["D"] = depth
- myJson["H"] = height
- myJson["B"] = breadth
- myJson["fs"] = fs
- myJson["Vref"] = vRef
- myJson["comp_CFmean"] = comp_CFmean.tolist()
- myJson["norm_all"] = norm_all.tolist()
- myJson["f_target"] = f_target.tolist()
-
-
- myJson["s_target_real"] = np.real(s_target).tolist()
- myJson["s_target_imag"] = np.imag(s_target).tolist()
-
- with open(windFileOutName,"w") as f:
+ print('JSON file is not valid') # noqa: T201
+
+
+def createSpectraJson( # noqa: N802, D103
+ windFileOutName, # noqa: N803
+ breadth,
+ depth,
+ height,
+ fs,
+ vRef, # noqa: N803
+ f_target,
+ s_target,
+ comp_CFmean, # noqa: N803
+ norm_all,
+):
+ ncomp = comp_CFmean.shape[0] # noqa: F841
+ nf = f_target.shape[0] # noqa: F841
+
+ myJson = {} # noqa: N806
+ myJson['D'] = depth
+ myJson['H'] = height
+ myJson['B'] = breadth
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+ myJson['comp_CFmean'] = comp_CFmean.tolist()
+ myJson['norm_all'] = norm_all.tolist()
+ myJson['f_target'] = f_target.tolist()
+
+ myJson['s_target_real'] = np.real(s_target).tolist()
+ myJson['s_target_imag'] = np.imag(s_target).tolist()
+
+ with open(windFileOutName, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
- # Check valid JSON file
+ # Check valid JSON file
validate = True
if validate:
- with open(windFileOutName, "r") as infile:
+ with open(windFileOutName) as infile: # noqa: FURB101, PLW1514, PTH123
json_data = infile.read()
# Try to parse the JSON data
try:
- json_object = json.loads(json_data)
- print("JSON file is valid")
+ json_object = json.loads(json_data) # noqa: F841
+ print('JSON file is valid') # noqa: T201
except json.decoder.JSONDecodeError:
- print("JSON file is not valid")
-
- # file = open(windFileOutName,"w")
- # file.write("{")
- # file.write("\"D\":%f," % depth)
- # file.write("\"H\":%f," % height)
- # file.write("\"B\":%f," % breadth)
- # file.write("\"fs\":%f," % fs)
- # file.write("\"Vref\":%f," % vRef)
+ print('JSON file is not valid') # noqa: T201
+
+ # file = open(windFileOutName,"w")
+ # file.write("{")
+ # file.write("\"D\":%f," % depth)
+ # file.write("\"H\":%f," % height)
+ # file.write("\"B\":%f," % breadth)
+ # file.write("\"fs\":%f," % fs)
+ # file.write("\"Vref\":%f," % vRef)
# file.write("\"units\":{\"length\":\"m\",\"time\":\"sec\"},")
# ncomp = comp_CFmean.shape[0]
@@ -244,22 +251,33 @@ def createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_targe
# file.close()
-def createPODJson(filename, V, D1, SpeN, f_target, norm_all, D, H, B, fs, vRef,comp_CFmean):
-
-
- myJson = {}
- myJson["V_imag"] = np.imag(V).tolist()
- myJson["V_real"] = np.real(V).tolist()
- myJson["D1"] = D1.tolist()
- myJson["SpeN"] = SpeN
- myJson["f_target"] = f_target.tolist()
- myJson["norm_all"] = norm_all.tolist()
- myJson["comp_CFmean"] = comp_CFmean.tolist()
- myJson["D"] = D
- myJson["H"] = H
- myJson["B"] = B
- myJson["fs"] = fs
- myJson["Vref"] = vRef
-
- with open(filename,"w") as f:
+def createPODJson( # noqa: N802, D103
+ filename,
+ V, # noqa: N803
+ D1, # noqa: N803
+ SpeN, # noqa: N803
+ f_target,
+ norm_all,
+ D, # noqa: N803
+ H, # noqa: N803
+ B, # noqa: N803
+ fs,
+ vRef, # noqa: N803
+ comp_CFmean, # noqa: N803
+):
+ myJson = {} # noqa: N806
+ myJson['V_imag'] = np.imag(V).tolist()
+ myJson['V_real'] = np.real(V).tolist()
+ myJson['D1'] = D1.tolist()
+ myJson['SpeN'] = SpeN
+ myJson['f_target'] = f_target.tolist()
+ myJson['norm_all'] = norm_all.tolist()
+ myJson['comp_CFmean'] = comp_CFmean.tolist()
+ myJson['D'] = D
+ myJson['H'] = H
+ myJson['B'] = B
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
diff --git a/modules/createEVENT/experimentalWindForces/experimentalWindForces.py b/modules/createEVENT/experimentalWindForces/experimentalWindForces.py
index 2a611b59b..0f8164baa 100644
--- a/modules/createEVENT/experimentalWindForces/experimentalWindForces.py
+++ b/modules/createEVENT/experimentalWindForces/experimentalWindForces.py
@@ -1,37 +1,40 @@
-
-import json
-import math
-import time
+import json # noqa: CPY001, D100, INP001
import os
+import time
+
try:
- moduleName = "numpy"
+ moduleName = 'numpy' # noqa: N816
import numpy as np
- moduleName = "scipy"
- from scipy.signal import csd, windows
+ moduleName = 'scipy' # noqa: N816
from scipy.interpolate import interp1d
+ from scipy.signal import csd, windows
+
error_tag = False # global variable
-except:
+except: # noqa: E722
error_tag = True
-from convertWindMat import *
-
-def main(aimName,evtName,getRV):
+from convertWindMat import * # noqa: F403
+def main(aimName, evtName, getRV): # noqa: C901, D103, N803, PLR0914, PLR0915
# THIS IS PERFORMED ONLY ONCE with open(aimName, 'r', encoding='utf-8') as f:
- with open(aimName,'r') as f:
+ with open(aimName) as f: # noqa: PLW1514, PTH123
aim_data = json.load(f)
- evt_data = aim_data["Events"][0]
+ evt_data = aim_data['Events'][0]
- filename = evt_data["filename"]
+ filename = evt_data['filename']
# from UI
- V_H = evt_data["windSpeed"] # wind speed at full scale (vel)
- T_full = evt_data["fullScaleDuration"] # Duration of wind load at full scale (time)
- perc_mod = evt_data["modePercent"] # percentage of modes to include in the simulation
- seed = evt_data["seed"]
+ V_H = evt_data['windSpeed'] # wind speed at full scale (vel) # noqa: N806
+ T_full = evt_data[ # noqa: N806
+ 'fullScaleDuration'
+ ] # Duration of wind load at full scale (time)
+ perc_mod = evt_data[
+ 'modePercent'
+ ] # percentage of modes to include in the simulation
+ seed = evt_data['seed']
# ^ Choose percentage of modes to include in the simulation (%). We suggest between 25% and 30% for higher accuracy
#
@@ -40,106 +43,108 @@ def main(aimName,evtName,getRV):
if filename.endswith('.mat'):
mat_filenmae = filename
- base = os.path.splitext(mat_filenmae)[0]
- json_filename = base + ".json"
+ base = os.path.splitext(mat_filenmae)[0] # noqa: PTH122
+ json_filename = base + '.json'
if getRV:
- parseWindMatFile(mat_filenmae, json_filename)
- os.remove(mat_filenmae)
+ parseWindMatFile(mat_filenmae, json_filename) # noqa: F405
+ os.remove(mat_filenmae) # noqa: PTH107
filename = json_filename
- with open(filename,'r', encoding='utf-8') as jsonFile:
+ with open(filename, encoding='utf-8') as jsonFile: # noqa: PTH123, N806
data = json.load(jsonFile)
-
if not getRV:
+ case = 'PODmodes'
- case = "PODmodes"
-
- elif evt_data["type"] == "WindForceSpectrum": # creates {forceSpectra}.json
+ elif evt_data['type'] == 'WindForceSpectrum': # creates {forceSpectra}.json
+ if ('s_target_real' not in data) or ('s_target_imag' not in data):
+ raise Exception( # noqa: TRY002
+ 'Target Spectrum info not found in ' + evt_data['filename'] + '.'
+ )
- if (not ("s_target_real" in data)) or (not ("s_target_imag" in data)):
- raise Exception("Target Spectrum info not found in " + evt_data["filename"] + ".")
+ case = 'spectra'
- case = "spectra"
+ elif (
+ evt_data['type'] == 'ExperimentalWindForces'
+ ): # creates {forceTimehistory}.json here and later overwrites it with {forceSpectra}.json
+ if ('Fx' not in data) or ('Fy' not in data) or ('Tz' not in data):
+ raise Exception( # noqa: TRY002
+ 'Force time histories not found in ' + evt_data['filename'] + '.'
+ )
- elif evt_data["type"] == "ExperimentalWindForces": # creates {forceTimehistory}.json here and later overwrites it with {forceSpectra}.json
+ case = 'timeHistory'
- if (not ("Fx" in data)) or (not ("Fy" in data)) or (not ("Tz" in data)):
- raise Exception("Force time histories not found in " + evt_data["filename"] + ".")
-
- case = "timeHistory"
-
- #elif not getRV:
+ # elif not getRV:
# # read {forceSpectra}.json
# case = "spectra"
else:
+ raise Exception('Event type [' + evt_data['type'] + '] not found.') # noqa: TRY002
- raise Exception("Event type [" + evt_data["type"] + "] not found.")
-
-
- D = data["D"]
- H = data["H"]
- B = data["B"]
- fs = data["fs"]
- vRef = data["Vref"]
+ D = data['D'] # noqa: N806
+ H = data['H'] # noqa: N806
+ B = data['B'] # noqa: N806
+ fs = data['fs']
+ vRef = data['Vref'] # noqa: N806
#
# check if model scale is found in the key
#
- ms = evt_data.get("modelScale",0) # model scale
- if ms==0: # when mat file is imported, model scale is not precalculated
- print("Model scale not found. Calculating the unified model scale..")
- D_full = aim_data["GeneralInformation"]["depth"]
- H_full = aim_data["GeneralInformation"]["height"]
- B_full = aim_data["GeneralInformation"]["width"]
- ms = H_full/H
- print("Model scaling factor of {:.2} is used".format(ms))
- if (((not ms == D_full/D ) or (not ms == B_full/B )) and getRV):
- print("Warning: target-data geometry scaling ratio is inconsistent: H={:.2}, B={:.2}, D={:.2}".format(H_full/H,B_full/B,D_full/D))
-
- if case == "timeHistory":
- #Tw = 4 # duration of window (sec) - user defined - smaller window leads to more smoothing
- #overlap = 0.5 # 50% overlap - user defined
- Tw = evt_data["windowSize"]
- overlap = evt_data["overlapPerc"]/100
-
- Fx = np.array(data["Fx"])
- Fy = np.array(data["Fy"])
- Tz = np.array(data["Tz"])
-
- t = data["t"]
- N = Fx.shape[1]
+ ms = evt_data.get('modelScale', 0) # model scale
+ if ms == 0: # when mat file is imported, model scale is not precalculated
+ print('Model scale not found. Calculating the unified model scale..') # noqa: T201
+ D_full = aim_data['GeneralInformation']['depth'] # noqa: N806
+ H_full = aim_data['GeneralInformation']['height'] # noqa: N806
+ B_full = aim_data['GeneralInformation']['width'] # noqa: N806
+ ms = H_full / H
+ print(f'Model scaling factor of {ms:.2} is used') # noqa: T201
+ if ((ms != D_full / D) or (ms != B_full / B)) and getRV:
+ print( # noqa: T201
+ f'Warning: target-data geometry scaling ratio is inconsistent: H={H_full / H:.2}, B={B_full / B:.2}, D={D_full / D:.2}'
+ )
+
+ if case == 'timeHistory':
+ # Tw = 4 # duration of window (sec) - user defined - smaller window leads to more smoothing
+ # overlap = 0.5 # 50% overlap - user defined
+ Tw = evt_data['windowSize'] # noqa: N806
+ overlap = evt_data['overlapPerc'] / 100
+
+ Fx = np.array(data['Fx']) # noqa: N806
+ Fy = np.array(data['Fy']) # noqa: N806
+ Tz = np.array(data['Tz']) # noqa: N806
+
+ t = data['t'] # noqa: F841
+ N = Fx.shape[1] # noqa: N806
nfloors = Fx.shape[0]
- nfloors_GI = aim_data["GeneralInformation"]["NumberOfStories"]
-
- if not nfloors==nfloors_GI:
- err_exit("Number of floors does not match - input file has {} floors, GI tab defines {} floors".format(nfloors,nfloors_GI))
+ nfloors_GI = aim_data['GeneralInformation']['NumberOfStories'] # noqa: N806
- elif case == "spectra":
+ if nfloors != nfloors_GI:
+ err_exit(
+ f'Number of floors does not match - input file has {nfloors} floors, GI tab defines {nfloors_GI} floors'
+ )
- s_target_real = np.array(data["s_target_real"])
- s_target_imag = np.array(data["s_target_imag"])
+ elif case == 'spectra':
+ s_target_real = np.array(data['s_target_real'])
+ s_target_imag = np.array(data['s_target_imag'])
s_target = s_target_real + 1j * s_target_imag
- f_target = np.array(data["f_target"])
- norm_all = np.array(data["norm_all"])
- comp_CFmean = np.array(data["comp_CFmean"])
-
- elif case == "PODmodes":
-
- V_imag = np.array(data["V_imag"])
- V_real = np.array(data["V_real"])
- V = V_real + 1j * V_imag
- D1 = np.array(data["D1"])
- SpeN = data["SpeN"]
- f_target = np.array(data["f_target"])
- norm_all = np.array(data["norm_all"])
- comp_CFmean = np.array(data["comp_CFmean"])
+ f_target = np.array(data['f_target'])
+ norm_all = np.array(data['norm_all'])
+ comp_CFmean = np.array(data['comp_CFmean']) # noqa: N806
+
+ elif case == 'PODmodes':
+ V_imag = np.array(data['V_imag']) # noqa: N806
+ V_real = np.array(data['V_real']) # noqa: N806
+ V = V_real + 1j * V_imag # noqa: N806
+ D1 = np.array(data['D1']) # noqa: N806
+ SpeN = data['SpeN'] # noqa: N806
+ f_target = np.array(data['f_target'])
+ norm_all = np.array(data['norm_all'])
+ comp_CFmean = np.array(data['comp_CFmean']) # noqa: N806
#
# Below here is fully parameterized
#
@@ -148,131 +153,185 @@ def main(aimName,evtName,getRV):
# Compute the basic quantities
#
- dtm = 1/fs # time step model scale
- fc = fs/2 # Nyquist Frequency (Hz) wind tunnel
- fp = fs/ms # scaled frequency
- fcut = fc/ms # scaled Nyquist frequency
- air_dens = 1.225 # (kg/m3) at 15 oC, sea level
- ndir = 3 # number of coordinate axes (X,Y,Z)
-
- if case == "timeHistory": # Experimental wind forces
+ dtm = 1 / fs # time step model scale # noqa: F841
+ fc = fs / 2 # Nyquist Frequency (Hz) wind tunnel
+ fp = fs / ms # scaled frequency
+ fcut = fc / ms # scaled Nyquist frequency
+ air_dens = 1.225 # (kg/m3) at 15 oC, sea level
+ ndir = 3 # number of coordinate axes (X,Y,Z)
- T = N / fs # duration of simulation in model scale (s)
- ncomp = nfloors*ndir # total number of force components
-
- elif case == "spectra" or case == "PODmodes":
+ if case == 'timeHistory': # Experimental wind forces
+ T = N / fs # duration of simulation in model scale (s) # noqa: N806, F841
+ ncomp = nfloors * ndir # total number of force components
+ elif case == 'spectra' or case == 'PODmodes': # noqa: PLR1714
ncomp = comp_CFmean.shape[0]
- nfloors = int(ncomp/ndir)
-
+ nfloors = int(ncomp / ndir)
#
# Number of modes to be included
#
- l_mo = int(np.round(ncomp * ((perc_mod) / 100)+1.e-10)) # small value added to make .5 round up
- if l_mo>100 or l_mo<0:
- msg = 'Error: Number of modes should be equal or less than the number of components'
+ l_mo = int(
+ np.round(ncomp * ((perc_mod) / 100) + 1.0e-10)
+ ) # small value added to make .5 round up
+ if l_mo > 100 or l_mo < 0: # noqa: PLR2004
+ msg = 'Error: Number of modes should be equal or less than the number of components' # noqa: F841
- print("Number of modes = " + str(l_mo))
+ print('Number of modes = ' + str(l_mo)) # noqa: T201
#
# Scaling building geometry
#
- B_full = B * ms # full scale
- D_full = D * ms # full scale
- H_full = H * ms # full scale
- MaxD_full = max(D_full, B_full) # full scale
+ B_full = B * ms # full scale # noqa: N806
+ D_full = D * ms # full scale # noqa: N806
+ H_full = H * ms # full scale # noqa: N806
+ MaxD_full = max(D_full, B_full) # full scale # noqa: N806
#
# Get CPSD
#
- if case == "timeHistory":
- [s_target, f_target, norm_all, comp_CFmean, Fx_full, Fy_full, Tz_full] = learn_CPSD(Fx, Fy, Tz, ms, air_dens, vRef, H_full, B_full, D_full, MaxD_full, fs, Tw, overlap, fp,V_H, fcut, T_full)
-
-
+ if case == 'timeHistory':
+ [s_target, f_target, norm_all, comp_CFmean, Fx_full, Fy_full, Tz_full] = ( # noqa: F841, N806
+ learn_CPSD(
+ Fx,
+ Fy,
+ Tz,
+ ms,
+ air_dens,
+ vRef,
+ H_full,
+ B_full,
+ D_full,
+ MaxD_full,
+ fs,
+ Tw,
+ overlap,
+ fp,
+ V_H,
+ fcut,
+ T_full,
+ )
+ )
#
# Eigen decomposition
#
- if (case == "timeHistory" ) or (case == "spectra"):
- V, D1, SpeN = perform_POD(s_target, f_target, ncomp, l_mo)
-
+ if (case == 'timeHistory') or (case == 'spectra'): # noqa: PLR1714
+ V, D1, SpeN = perform_POD(s_target, f_target, ncomp, l_mo) # noqa: N806
+
if getRV:
- # # let us overwrite the json file.
- createPODJson(filename, V, D1, SpeN, f_target, norm_all, D, H, B, fs, vRef,comp_CFmean)
+ # # let us overwrite the json file.
+ createPODJson( # noqa: F405
+ filename,
+ V,
+ D1,
+ SpeN,
+ f_target,
+ norm_all,
+ D,
+ H,
+ B,
+ fs,
+ vRef,
+ comp_CFmean,
+ )
#
# Simulation of Gaussian Stochastic wind force coefficients
#
- f_full = f_target[0:] # don't exclude freq = 0 Hz
- f_vH = (V_H / vRef) * f_full # scaledfreq.(Hz)
- V_vH = V # scaled eigenmodes
- D_vH = (V_H / vRef) ** 3 * D1 # scaled eigenvalues
- theta_vH = np.arctan2(np.imag(V_vH), np.real(V_vH)) # scaled theta
+ f_full = f_target[0:] # don't exclude freq = 0 Hz
+ f_vH = (V_H / vRef) * f_full # scaledfreq.(Hz) # noqa: N806
+ V_vH = V # scaled eigenmodes # noqa: N806
+ D_vH = (V_H / vRef) ** 3 * D1 # scaled eigenvalues # noqa: N806
+ theta_vH = np.arctan2(np.imag(V_vH), np.real(V_vH)) # scaled theta # noqa: N806
fcut_sc = (V_H / vRef) * fcut
- f_inc = 1 / T_full # freq.increment(Hz)
- N_f = round(T_full * fcut_sc) + 1 # number of freq.points considered
- dt = 1 / (2 * fcut_sc) # max.time incremen to avoid aliasing(s)
- N_t = round(T_full / dt) # number of time points
- fvec = np.arange(0, f_inc * (N_f),f_inc) # frequency line
- tvec = np.arange(0, dt * (N_t),dt) # time line
- f = f_vH[0:SpeN] # frequencies from the decomposition upto SpeN points(Hz)
- nf_dir = np.arange(ncomp)# vector number of components
-
-
- #
- #
- #
-
- Nsim = 1 # Number of realizations to be generated
- seeds = np.arange(seed,Nsim+seed) # Set seeds for reproducibility
-
- CF_sim0 = np.zeros((len(seeds),ncomp,N_t))
+ f_inc = 1 / T_full # freq.incremen
+ # number of freq.points consideredt(Hz)
+ N_f = round(T_full * fcut_sc) + 1 # noqa: N806
+ dt = 1 / (2 * fcut_sc) # max.time incremen to avoid aliasing(s)
+ N_t = round(T_full / dt) # number of time points # noqa: N806
+ fvec = np.arange(0, f_inc * (N_f), f_inc) # frequency line # noqa: F841
+ tvec = np.arange(0, dt * (N_t), dt) # time line
+ f = f_vH[0:SpeN] # frequencies from the decomposition upto SpeN points(Hz)
+ nf_dir = np.arange(ncomp) # vector number of components
+
+ Nsim = 1 # Number of realizations to be generated # noqa: N806
+ seeds = np.arange(seed, Nsim + seed) # Set seeds for reproducibility
+
+ CF_sim0 = np.zeros((len(seeds), ncomp, N_t)) # noqa: N806
for seed_num in range(len(seeds)):
- print("Creating Realization # {} among {} ".format(seed_num+1,len(seeds)));
- t_init=time.time()
-
- F_jzm = simulation_gaussian(ncomp, N_t, V_vH, D_vH, theta_vH, nf_dir, N_f, f_inc, f, l_mo, tvec, SpeN, V_H, vRef, seeds, seed_num);
- CF_sim0[seed_num,:,:] = F_jzm # zero-mean force coefficient time series (simulation)
-
- print(" - Elapsed time: {:.3} seconds.\n".format(time.time() - t_init))
+ print(f'Creating Realization # {seed_num + 1} among {len(seeds)} ') # noqa: T201
+ t_init = time.time()
+
+ F_jzm = simulation_gaussian( # noqa: N806
+ ncomp,
+ N_t,
+ V_vH,
+ D_vH,
+ theta_vH,
+ nf_dir,
+ N_f,
+ f_inc,
+ f,
+ l_mo,
+ tvec,
+ SpeN,
+ V_H,
+ vRef,
+ seeds,
+ seed_num,
+ )
+ CF_sim0[seed_num, :, :] = (
+ F_jzm # zero-mean force coefficient time series (simulation)
+ )
+
+ print(f' - Elapsed time: {time.time() - t_init:.3} seconds.\n') # noqa: T201
#
# Destandardize force coefficients
#
#
- CF_sim1 = np.transpose(CF_sim0, (1, 2, 0)) / (V_H/vRef)**3 / np.sqrt(V_H/vRef) # rescale Force Coefficients
-
- CF_sim = CF_sim1*np.transpose(norm_all[np.newaxis,np.newaxis], (2, 1, 0))+np.transpose(comp_CFmean[np.newaxis,np.newaxis], (2, 1, 0)) #force coefficients
- #Transform back the Force Coefficients into Forces (N)
- static_pres=np.vstack((np.ones((nfloors,1,1))*(0.5*air_dens*vRef**2*H_full*B_full),
- np.ones((nfloors,1,1))*(0.5*air_dens*vRef**2*H_full*D_full),
- np.ones((nfloors,1,1))*(0.5*air_dens*vRef**2*H_full*MaxD_full**2/2)))
- F_sim = (V_H/vRef)**2 * CF_sim * static_pres # simulated forces at full scale wind speed
-
-
- #return F_sim
+ CF_sim1 = ( # noqa: N806
+ np.transpose(CF_sim0, (1, 2, 0)) / (V_H / vRef) ** 3 / np.sqrt(V_H / vRef)
+ ) # rescale Force Coefficients
+
+ CF_sim = CF_sim1 * np.transpose( # noqa: N806
+ norm_all[np.newaxis, np.newaxis], (2, 1, 0)
+ ) + np.transpose(
+ comp_CFmean[np.newaxis, np.newaxis], (2, 1, 0)
+ ) # force coefficients
+ # Transform back the Force Coefficients into Forces (N)
+ static_pres = np.vstack(
+ (
+ np.ones((nfloors, 1, 1)) * (0.5 * air_dens * vRef**2 * H_full * B_full),
+ np.ones((nfloors, 1, 1)) * (0.5 * air_dens * vRef**2 * H_full * D_full),
+ np.ones((nfloors, 1, 1))
+ * (0.5 * air_dens * vRef**2 * H_full * MaxD_full**2 / 2),
+ )
+ )
+ F_sim = ( # noqa: N806
+ (V_H / vRef) ** 2 * CF_sim * static_pres
+ ) # simulated forces at full scale wind speed
+
+ # return F_sim
#
# Writing results to an event file
#
if getRV:
- F_sim=np.zeros(F_sim.shape)
-
+ F_sim = np.zeros(F_sim.shape) # noqa: N806
+ evtInfo = {} # noqa: N806
-
- evtInfo = {}
-
- evtInfo["dT"] = tvec[1]-tvec[0]
- evtInfo["numSteps"] = tvec.shape[0]
+ evtInfo['dT'] = tvec[1] - tvec[0]
+ evtInfo['numSteps'] = tvec.shape[0]
patterns = []
id_timeseries = 0
@@ -280,48 +339,45 @@ def main(aimName,evtName,getRV):
ts_floor_info = []
for nd in range(ndir):
for nf in range(nfloors):
- id_timeseries +=1
+ id_timeseries += 1
my_pattern = {}
- if nd==0 or nd==1:
- my_pattern["dof"]=nd+1 # x and y dir
- elif nd==2:
- my_pattern["dof"]=6 # moments
-
- my_pattern["floor"]=str(nf+1)
- my_pattern["name"]=str(id_timeseries)
- my_pattern["staticWindLoad"]=0.0
- my_pattern["timeSeries"]=str(id_timeseries)
- my_pattern["type"]="WindFloorLoad"
+ if nd == 0 or nd == 1: # noqa: PLR1714
+ my_pattern['dof'] = nd + 1 # x and y dir
+ elif nd == 2: # noqa: PLR2004
+ my_pattern['dof'] = 6 # moments
+
+ my_pattern['floor'] = str(nf + 1)
+ my_pattern['name'] = str(id_timeseries)
+ my_pattern['staticWindLoad'] = 0.0
+ my_pattern['timeSeries'] = str(id_timeseries)
+ my_pattern['type'] = 'WindFloorLoad'
patterns += [my_pattern]
ts_dof_info += [nd]
ts_floor_info += [nf]
- evtInfo["pattern"] = patterns
- evtInfo["subtype"] = "ExperimentalWindForces"
- evtInfo["type"] = "Wind"
-
+ evtInfo['pattern'] = patterns
+ evtInfo['subtype'] = 'ExperimentalWindForces'
+ evtInfo['type'] = 'Wind'
- timeSeries = []
- for id in range(id_timeseries):
+ timeSeries = [] # noqa: N806
+ for id in range(id_timeseries): # noqa: A001
my_ts = {}
- my_ts["dT"]=tvec[1]-tvec[0]
- my_ts["name"]=str(id+1)
- my_ts["type"]="Value"
+ my_ts['dT'] = tvec[1] - tvec[0]
+ my_ts['name'] = str(id + 1)
+ my_ts['type'] = 'Value'
cur_dof = ts_dof_info[id]
cur_floor = ts_floor_info[id]
- my_ts["data"] = F_sim[(cur_dof)*nfloors + cur_floor , :,0].tolist()
+ my_ts['data'] = F_sim[(cur_dof) * nfloors + cur_floor, :, 0].tolist()
- timeSeries += [my_ts]
+ timeSeries += [my_ts] # noqa: N806
+ evtInfo['timeSeries'] = timeSeries
- evtInfo["timeSeries"] = timeSeries
+ with open(evtName, 'w', encoding='utf-8') as fp: # noqa: PTH123
+ json.dump({'Events': [evtInfo]}, fp)
- with open(evtName, "w", encoding='utf-8') as fp:
- json.dump({"Events":[evtInfo]} , fp)
-
-
- '''
+ """
# plotting
import matplotlib.pyplot as plt
# Plots of time series at different floors
@@ -375,212 +431,287 @@ def main(aimName,evtName,getRV):
plt.title('Force - Floor = 20 - Full Scale')
plt.show()
- '''
+ """
-def perform_POD(s_target,f_target, ncomp, l_mo):
- S_F = s_target[:,:,0:] # do not exclude freq = 0 Hz
- f_full = f_target[0:] # do not exclude freq = 0 Hz
+def perform_POD(s_target, f_target, ncomp, l_mo): # noqa: N802, D103
+ S_F = s_target[:, :, 0:] # do not exclude freq = 0 Hz # noqa: N806
+ f_full = f_target[0:] # do not exclude freq = 0 Hz
- SpeN = f_full.shape[0] # exclude freq = 0 Hz
+ SpeN = f_full.shape[0] # exclude freq = 0 Hz # noqa: N806
- Vs = np.zeros((ncomp,ncomp,SpeN), dtype = 'complex_')
- Ds = np.zeros((ncomp,ncomp,SpeN))
+ Vs = np.zeros((ncomp, ncomp, SpeN), dtype='complex_') # noqa: N806
+ Ds = np.zeros((ncomp, ncomp, SpeN)) # noqa: N806
- for ii in range(SpeN): # eigen - decomposition at every frequency of CPSD matrix and sort them
- [D_all, V_all] = np.linalg.eig(S_F[:,:, ii])
+ for ii in range(
+ SpeN
+ ): # eigen - decomposition at every frequency of CPSD matrix and sort them
+ [D_all, V_all] = np.linalg.eig(S_F[:, :, ii]) # noqa: N806
ind = np.argsort(D_all)
- Ds[:,:, ii] = np.real(np.diag(D_all[ind]))
- Vs[:,:, ii] = V_all[:, ind]
+ Ds[:, :, ii] = np.real(np.diag(D_all[ind]))
+ Vs[:, :, ii] = V_all[:, ind]
# Truncation
- V = np.zeros((ncomp,l_mo,SpeN), dtype = 'complex_')
- D0 = np.zeros((l_mo,l_mo,SpeN))
+ V = np.zeros((ncomp, l_mo, SpeN), dtype='complex_') # noqa: N806
+ D0 = np.zeros((l_mo, l_mo, SpeN)) # noqa: N806
for tt in range(l_mo):
- V[:, tt,:] = Vs[:, ncomp - 1 - tt,:]
- D0[tt, tt,:] = Ds[ncomp - 1 - tt, ncomp - 1 - tt,:]
+ V[:, tt, :] = Vs[:, ncomp - 1 - tt, :]
+ D0[tt, tt, :] = Ds[ncomp - 1 - tt, ncomp - 1 - tt, :]
- D1 = np.zeros((l_mo,1,SpeN))
- for ii in range(SpeN):
- D1[:,0,ii] = np.diag(D0[:,:,ii])
+ D1 = np.zeros((l_mo, 1, SpeN)) # noqa: N806
+ for ii in range(SpeN):
+ D1[:, 0, ii] = np.diag(D0[:, :, ii])
return V, D1, SpeN
-
-def learn_CPSD(Fx, Fy, Tz, ms, air_dens, vRef, H_full, B_full, D_full, MaxD_full, fs, Tw, overlap, fp, V_H, fcut, T_full):
- Fx_full = ms ** 2 * Fx # full scale Fx(N)
- Fy_full = ms ** 2 * Fy # full scale Fy(N)
- Tz_full = ms ** 3 * Tz # full scale Tz(N.m)
+def learn_CPSD( # noqa: D103, N802, PLR0913, PLR0917
+ Fx, # noqa: N803
+ Fy, # noqa: N803
+ Tz, # noqa: N803
+ ms,
+ air_dens,
+ vRef, # noqa: N803
+ H_full, # noqa: N803
+ B_full, # noqa: N803
+ D_full, # noqa: N803
+ MaxD_full, # noqa: N803
+ fs,
+ Tw, # noqa: N803
+ overlap,
+ fp,
+ V_H, # noqa: N803
+ fcut,
+ T_full, # noqa: N803
+):
+ Fx_full = ms**2 * Fx # full scale Fx(N) # noqa: N806
+ Fy_full = ms**2 * Fy # full scale Fy(N) # noqa: N806
+ Tz_full = ms**3 * Tz # full scale Tz(N.m) # noqa: N806
# Force Coefficients (unitless)
- CFx = Fx_full/(0.5*air_dens*vRef**2*H_full*B_full)
- CFy = Fy_full/(0.5*air_dens*vRef**2*H_full*D_full)
- CTz = Tz_full/(0.5*air_dens*vRef**2*H_full*MaxD_full**2/2)
+ CFx = Fx_full / (0.5 * air_dens * vRef**2 * H_full * B_full) # noqa: N806
+ CFy = Fy_full / (0.5 * air_dens * vRef**2 * H_full * D_full) # noqa: N806
+ CTz = Tz_full / (0.5 * air_dens * vRef**2 * H_full * MaxD_full**2 / 2) # noqa: N806
# Mean Force Coefficients
- CFx_mean = np.mean(CFx,axis=1)
- CFy_mean = np.mean(CFy,axis=1)
- CTz_mean = np.mean(CTz,axis=1)
-
- comp_CFmean = np.concatenate([CFx_mean,CFy_mean,CTz_mean])
+ CFx_mean = np.mean(CFx, axis=1) # noqa: N806
+ CFy_mean = np.mean(CFy, axis=1) # noqa: N806
+ CTz_mean = np.mean(CTz, axis=1) # noqa: N806
- RF = 3.5 # Reduction Factor
+ comp_CFmean = np.concatenate([CFx_mean, CFy_mean, CTz_mean]) # noqa: N806
+ RF = 3.5 # Reduction Factor # noqa: N806
# Normalization factor
- xnorm = np.std(CFx-CFx_mean[np.newaxis].T,axis=1)*RF
- ynorm= np.std(CFy-CFy_mean[np.newaxis].T,axis=1)*RF
- tornorm = np.std(CTz-CTz_mean[np.newaxis].T,axis=1)*RF
- norm_all = np.concatenate([xnorm,ynorm,tornorm])
-
+ xnorm = np.std(CFx - CFx_mean[np.newaxis].T, axis=1) * RF
+ ynorm = np.std(CFy - CFy_mean[np.newaxis].T, axis=1) * RF
+ tornorm = np.std(CTz - CTz_mean[np.newaxis].T, axis=1) * RF
+ norm_all = np.concatenate([xnorm, ynorm, tornorm])
# Standardazation of Forces (force coeff have about the same range)
- CFx_norm = (CFx-np.mean(CFx,axis=1)[np.newaxis].T)/xnorm[np.newaxis].T
- CFy_norm = (CFy-np.mean(CFy,axis=1)[np.newaxis].T)/ynorm[np.newaxis].T
- CTz_norm = (CTz-np.mean(CTz,axis=1)[np.newaxis].T)/tornorm[np.newaxis].T
- Components = np.vstack([CFx_norm,CFy_norm,CTz_norm]).T
-
+ CFx_norm = (CFx - np.mean(CFx, axis=1)[np.newaxis].T) / xnorm[np.newaxis].T # noqa: N806
+ CFy_norm = (CFy - np.mean(CFy, axis=1)[np.newaxis].T) / ynorm[np.newaxis].T # noqa: N806
+ CTz_norm = (CTz - np.mean(CTz, axis=1)[np.newaxis].T) / tornorm[np.newaxis].T # noqa: N806
+ Components = np.vstack([CFx_norm, CFy_norm, CTz_norm]).T # noqa: N806
# Smoothed target CPSD
- wind_size = fs*Tw;
- nover = round(overlap*wind_size)
+ wind_size = fs * Tw
+ nover = round(overlap * wind_size)
- #nfft = int(wind_size)
+ # nfft = int(wind_size)
fcut_sc = (V_H / vRef) * fcut
- dt = 1 / (2 * fcut_sc) # max.time incremen to avoid aliasing(s)
- N_t = round(T_full / dt) # number of time points
+ dt = 1 / (2 * fcut_sc) # max.time incremen to avoid aliasing(s)
+ N_t = round(T_full / dt) # number of time points # noqa: N806
nfft = N_t
- t_init=time.time()
- # [s_target,f_target] = cpsd(Components,Components,hanning(wind_size),nover,nfft,fp,'mimo');
- s_target, f_target = cpsd_matlab(Components,Components,wind_size,nover,nfft,fp)
+ t_init = time.time()
+ # [s_target,f_target] = cpsd(Components,Components,hanning(wind_size),nover,nfft,fp,'mimo');
+ s_target, f_target = cpsd_matlab(
+ Components, Components, wind_size, nover, nfft, fp
+ )
- print(" - Elapsed time: {:.3} seconds.\n".format( time.time() - t_init))
+ print(f' - Elapsed time: {time.time() - t_init:.3} seconds.\n') # noqa: T201
return s_target, f_target, norm_all, comp_CFmean, Fx_full, Fy_full, Tz_full
-def cpsd_matlab(Components1,Components2,wind_size,nover,nfft,fp):
+def cpsd_matlab(Components1, Components2, wind_size, nover, nfft, fp): # noqa: N803, D103
window = windows.hann(int(wind_size))
ncombs1 = Components1.shape[1]
ncombs2 = Components2.shape[1]
- nSampPoints = int(nfft/2+1)
- s_target = np.zeros((ncombs1,ncombs2,nSampPoints),dtype = 'complex_')
-
- print("Training cross power spectrum density..");
+ nSampPoints = int(nfft / 2 + 1) # noqa: N806
+ s_target = np.zeros((ncombs1, ncombs2, nSampPoints), dtype='complex_')
+ print('Training cross power spectrum density..') # noqa: T201
for nc2 in range(ncombs2):
for nc1 in range(ncombs1):
- [f_target,s_tmp] = csd(Components1[:,nc1],Components2[:,nc2],window=window,noverlap = nover,nfft = nfft,fs = fp)
- s_target[nc1,nc2,:] = s_tmp #*4/np.pi
+ [f_target, s_tmp] = csd(
+ Components1[:, nc1],
+ Components2[:, nc2],
+ window=window,
+ noverlap=nover,
+ nfft=nfft,
+ fs=fp,
+ )
+ s_target[nc1, nc2, :] = s_tmp # *4/np.pi
return s_target, f_target
-def simulation_gaussian(ncomp, N_t, V_vH, D_vH, theta_vH, nf_dir,N_f,f_inc,f,l_mo,tvec,SpeN,V_H,vRef,seed,seed_num):
+def simulation_gaussian( # noqa: D103, PLR0913, PLR0917
+ ncomp,
+ N_t, # noqa: N803
+ V_vH, # noqa: N803
+ D_vH, # noqa: N803
+ theta_vH, # noqa: N803
+ nf_dir,
+ N_f, # noqa: N803
+ f_inc,
+ f,
+ l_mo,
+ tvec,
+ SpeN, # noqa: ARG001, N803
+ V_H, # noqa: N803
+ vRef, # noqa: N803
+ seed,
+ seed_num,
+):
#
# Set Seed
#
- folderName = os.path.basename(os.getcwd()) # Lets get n from workdir.n and add this to the seed
- sampNum = folderName.split(".")[-1]
+ folderName = os.path.basename( # noqa: PTH119, N806
+ os.getcwd() # noqa: PTH109
+ ) # Lets get n from workdir.n and add this to the seed
+ sampNum = folderName.split('.')[-1] # noqa: N806
- if sampNum == "templatedir":
+ if sampNum == 'templatedir':
np.random.seed(seed[seed_num])
else:
- np.random.seed(seed[seed_num]+int(sampNum))
+ np.random.seed(seed[seed_num] + int(sampNum))
-
-
- F_jzm = np.zeros((ncomp,N_t)) #force coefficients initialize matrix
- f_tmp = np.linspace(0,(N_f-1)*f_inc,N_f)
+ # force coefficients initialize matrix
+ F_jzm = np.zeros((ncomp, N_t)) # noqa: N806
+ f_tmp = np.linspace(0, (N_f - 1) * f_inc, N_f)
for m in range(l_mo):
- mo = m # current mode #
- Vmo = V_vH[nf_dir, mo,:] # eigenvector for mode mo
- #Dmo = D_vH[mo, 0,:] # eigenvalue for mode mo
- Dmo = D_vH[mo, 0,:] + 1j * 0 # To avoid nan when calculating VDmo
-
- thetmo = theta_vH[nf_dir, mo,:] # theta for mode mo
- VDmo = np.sqrt((V_H / vRef) ** 3) * np.abs(Vmo) * (np.ones((ncomp, 1)) * np.sqrt(Dmo)) # product of eigenvector X
+ mo = m # current mode #
+ Vmo = V_vH[nf_dir, mo, :] # eigenvector for mode mo # noqa: N806
+ # Dmo = D_vH[mo, 0,:] # eigenvalue for mode mo
+ # To avoid nan when calculating VDmo
+ Dmo = D_vH[mo, 0, :] + 1j * 0 # noqa: N806
+
+ thetmo = theta_vH[nf_dir, mo, :] # theta for mode mo
+ VDmo = ( # noqa: N806
+ np.sqrt((V_H / vRef) ** 3)
+ * np.abs(Vmo)
+ * (np.ones((ncomp, 1)) * np.sqrt(Dmo))
+ ) # product of eigenvector X
# Generate random phase angle for each frequency SpeN
varth = (2 * np.pi) * np.random.random(size=(1, N_f))
# Loop over floors
# g_jm = np.zeros((N_t, ncomp),dtype = 'complex_')
- F_jm = np.zeros((ncomp, N_t))
+ F_jm = np.zeros((ncomp, N_t)) # noqa: N806
coef = np.sqrt(2) * np.sqrt(f_inc) * np.exp(1j * varth)
- coef2 = np.exp(1j*((mo+1)/l_mo*f_inc)*tvec)
+ coef2 = np.exp(1j * ((mo + 1) / l_mo * f_inc) * tvec)
- fVDmo = interp1d(f, VDmo, kind='linear', fill_value="extrapolate")
- fthetmo = interp1d(f, thetmo, kind='linear', fill_value="extrapolate")
- fV_interp = np.abs(fVDmo(f_tmp))
- fthet_interp = np.exp((1j) * (fthetmo(f_tmp)))
+ fVDmo = interp1d(f, VDmo, kind='linear', fill_value='extrapolate') # noqa: N806
+ fthetmo = interp1d(f, thetmo, kind='linear', fill_value='extrapolate')
+ fV_interp = np.abs(fVDmo(f_tmp)) # noqa: N806
+ fthet_interp = np.exp((1j) * (fthetmo(f_tmp)))
for j in range(ncomp):
# l denotes a particular freq. point
# m denotes a particular mode
# j denotes a particular floor
- fVDmo = interp1d(f,VDmo[j,:], kind='linear',fill_value="extrapolate")
- fthetmo = interp1d(f,thetmo[j,:], kind='linear',fill_value="extrapolate")
+ fVDmo = interp1d(f, VDmo[j, :], kind='linear', fill_value='extrapolate') # noqa: N806
+ fthetmo = interp1d(
+ f, thetmo[j, :], kind='linear', fill_value='extrapolate'
+ )
- B_jm = np.zeros((N_t,),dtype = 'complex_')
- B_jm[0:N_f] = coef * fV_interp[j,:] * fthet_interp[j,:]
+ B_jm = np.zeros((N_t,), dtype='complex_') # noqa: N806
+ B_jm[0:N_f] = coef * fV_interp[j, :] * fthet_interp[j, :]
- g_jm = np.fft.ifft(B_jm)*N_t
- F_jm[j,:] = np.real(g_jm*coef2)
+ g_jm = np.fft.ifft(B_jm) * N_t
+ F_jm[j, :] = np.real(g_jm * coef2)
- F_jzm = F_jzm + F_jm # sum up F from different modes (zero - mean)
+ # sum up F from different modes (zero - mean)
+ F_jzm = F_jzm + F_jm # noqa: N806, PLR6104
return F_jzm
-def err_exit(msg):
- print(msg)
- with open("../workflow.err","w") as f:
+def err_exit(msg): # noqa: D103
+ print(msg) # noqa: T201
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write(msg)
- exit(-1)
+ exit(-1) # noqa: PLR1722
+
if __name__ == '__main__':
- #parseWindMatFile("Forces_ANG000_phase1.mat", "Forces_ANG000_phase1.json")
- #parseWindMatFile("TargetSpectra_ANG000_phase1.mat", "TargetSpectra_ANG000_phase1.json")
+ # parseWindMatFile("Forces_ANG000_phase1.mat", "Forces_ANG000_phase1.json")
+ # parseWindMatFile("TargetSpectra_ANG000_phase1.mat", "TargetSpectra_ANG000_phase1.json")
- inputArgs = sys.argv
+ inputArgs = sys.argv # noqa: N816, F405
# set filenames
- aimName = sys.argv[2]
- evtName = sys.argv[4]
-
- getRV = False;
- for myarg in sys.argv:
- if (myarg == "--getRV"):
- getRV = True;
+ aimName = sys.argv[2] # noqa: N816, F405
+ evtName = sys.argv[4] # noqa: N816, F405
+ getRV = False # noqa: N816
+ for myarg in sys.argv: # noqa: F405
+ if myarg == '--getRV':
+ getRV = True # noqa: N816
if error_tag and getRV:
- with open("../workflow.err","w") as f:
- print("Failed to import module " + moduleName)
- f.write("Failed to import module " + moduleName + ". Please check the python path in the preference")
- exit(-1)
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ print('Failed to import module ' + moduleName) # noqa: T201
+ f.write(
+ 'Failed to import module '
+ + moduleName
+ + '. Please check the python path in the preference'
+ )
+ exit(-1) # noqa: PLR1722
# if getRV:
# aimName = aimName + ".sc"
try:
main(aimName, evtName, getRV)
- except Exception as err:
+ except Exception as err: # noqa: BLE001
import traceback
+
if getRV:
- with open("../workflow.err","w") as f:
- f.write("Failed in wind load generator preprocessor:" + str(err) + "..." + str(traceback.format_exc()))
- print("Failed in wind load generator preprocessor:" + str(err) + "..." + str(traceback.format_exc()))
- exit(-1)
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write(
+ 'Failed in wind load generator preprocessor:'
+ + str(err)
+ + '...'
+ + str(traceback.format_exc())
+ )
+ print( # noqa: T201
+ 'Failed in wind load generator preprocessor:'
+ + str(err)
+ + '...'
+ + str(traceback.format_exc())
+ )
+ exit(-1) # noqa: PLR1722
else:
- with open("../dakota.err","w") as f:
- f.write("Failed to generate wind load: " + str(err) + "..." + str(traceback.format_exc()))
- print("Failed to generate wind load:" + str(err) + "..." + str(traceback.format_exc()))
- exit(-1)
+ with open('../dakota.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write(
+ 'Failed to generate wind load: '
+ + str(err)
+ + '...'
+ + str(traceback.format_exc())
+ )
+ print( # noqa: T201
+ 'Failed to generate wind load:'
+ + str(err)
+ + '...'
+ + str(traceback.format_exc())
+ )
+ exit(-1) # noqa: PLR1722
diff --git a/modules/createEVENT/experimentalWindPressures/convertWindMat.py b/modules/createEVENT/experimentalWindPressures/convertWindMat.py
index 209656cde..02f8a17c0 100644
--- a/modules/createEVENT/experimentalWindPressures/convertWindMat.py
+++ b/modules/createEVENT/experimentalWindPressures/convertWindMat.py
@@ -1,69 +1,72 @@
+# python code to open the .mat file # noqa: CPY001, D100, INP001
+# and put data into a SimCenter JSON file
-# python code to open the .mat file
-# and put data into a SimCenter JSON file
-
-import sys
-import os
-import subprocess
import json
-import stat
-import shutil
+import os
+
import numpy as np
import scipy.io as sio
-from pprint import pprint
-def parseWindMatFile(matFileIn, windFileOutName):
- dataDir = os.getcwd()
- scriptDir = os.path.dirname(os.path.realpath(__file__))
-
+def parseWindMatFile(matFileIn, windFileOutName): # noqa: N802, N803, D103
+ dataDir = os.getcwd() # noqa: PTH109, N806, F841
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806, F841
- mat_contents = sio.loadmat(matFileIn)
+ mat_contents = sio.loadmat(matFileIn)
depth = float(mat_contents['D'][0])
- breadth = float(mat_contents['B'][0])
- height = float(mat_contents['H'][0])
- fs = float(mat_contents['fs'][0])
- vRef=float(mat_contents['Vref'][0])
-
- if "s_target" in mat_contents:
- case = "spectra"
- comp_CFmean = np.squeeze(np.array(mat_contents['comp_CFmean']))
+ breadth = float(mat_contents['B'][0])
+ height = float(mat_contents['H'][0])
+ fs = float(mat_contents['fs'][0])
+ vRef = float(mat_contents['Vref'][0]) # noqa: N806
+
+ if 's_target' in mat_contents:
+ case = 'spectra' # noqa: F841
+ comp_CFmean = np.squeeze(np.array(mat_contents['comp_CFmean'])) # noqa: N806
norm_all = np.squeeze(np.array(mat_contents['norm_all']))
f_target = np.squeeze(np.array(mat_contents['f_target']))
s_target = np.squeeze(np.array(mat_contents['s_target']))
- createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_target, s_target, comp_CFmean, norm_all)
-
- elif "Fx" in mat_contents:
-
-
- Fx = np.squeeze(np.array(mat_contents['Fx']))
- Fy = np.squeeze(np.array(mat_contents['Fy']))
- Tz = np.squeeze(np.array(mat_contents['Tz']))
+ createSpectraJson(
+ windFileOutName,
+ breadth,
+ depth,
+ height,
+ fs,
+ vRef,
+ f_target,
+ s_target,
+ comp_CFmean,
+ norm_all,
+ )
+
+ elif 'Fx' in mat_contents:
+ Fx = np.squeeze(np.array(mat_contents['Fx'])) # noqa: N806
+ Fy = np.squeeze(np.array(mat_contents['Fy'])) # noqa: N806
+ Tz = np.squeeze(np.array(mat_contents['Tz'])) # noqa: N806
t = np.squeeze(np.array(mat_contents['t']))
- myJson = {}
- myJson["D"] = depth
- myJson["H"] = height
- myJson["B"] = breadth
- myJson["fs"] = fs
- myJson["Vref"] = vRef
-
- myJson["Fx"] = np.array(Fx).tolist()
- myJson["Fy"] = np.array(Fy).tolist()
- myJson["Tz"] = np.array(Tz).tolist()
- myJson["t"] = np.array(t).tolist()
- with open(windFileOutName,"w") as f:
+ myJson = {} # noqa: N806
+ myJson['D'] = depth
+ myJson['H'] = height
+ myJson['B'] = breadth
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+
+ myJson['Fx'] = np.array(Fx).tolist()
+ myJson['Fy'] = np.array(Fy).tolist()
+ myJson['Tz'] = np.array(Tz).tolist()
+ myJson['t'] = np.array(t).tolist()
+ with open(windFileOutName, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
- # file = open(windFileOutName,"w")
- # file.write("{")
- # file.write("\"D\":%f," % depth)
- # file.write("\"H\":%f," % height)
- # file.write("\"B\":%f," % breadth)
- # file.write("\"fs\":%f," % fs)
- # file.write("\"Vref\":%f," % vRef)
+ # file = open(windFileOutName,"w")
+ # file.write("{")
+ # file.write("\"D\":%f," % depth)
+ # file.write("\"H\":%f," % height)
+ # file.write("\"B\":%f," % breadth)
+ # file.write("\"fs\":%f," % fs)
+ # file.write("\"Vref\":%f," % vRef)
# case = "timeHistory"
# Fx = mat_contents['Fx']
@@ -118,70 +121,74 @@ def parseWindMatFile(matFileIn, windFileOutName):
# else:
# file.write(",")
-
# file.write("}")
# file.close()
- # Check valid JSON file,
+ # Check valid JSON file,
validate = True
if validate:
- with open(windFileOutName, "r") as infile:
+ with open(windFileOutName) as infile: # noqa: FURB101, PLW1514, PTH123
json_data = infile.read()
# Try to parse the JSON data
try:
- json_object = json.loads(json_data)
- print("JSON file is valid")
+ json_object = json.loads(json_data) # noqa: F841
+ print('JSON file is valid') # noqa: T201
except json.decoder.JSONDecodeError:
- print("JSON file is not valid")
-
-
-
-
-
-def createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_target, s_target, comp_CFmean, norm_all):
-
-
- ncomp = comp_CFmean.shape[0]
- nf = f_target.shape[0]
-
- myJson = {}
- myJson["D"] = depth
- myJson["H"] = height
- myJson["B"] = breadth
- myJson["fs"] = fs
- myJson["Vref"] = vRef
- myJson["comp_CFmean"] = comp_CFmean.tolist()
- myJson["norm_all"] = norm_all.tolist()
- myJson["f_target"] = f_target.tolist()
-
-
- myJson["s_target_real"] = np.real(s_target).tolist()
- myJson["s_target_imag"] = np.imag(s_target).tolist()
-
- with open(windFileOutName,"w") as f:
+ print('JSON file is not valid') # noqa: T201
+
+
+def createSpectraJson( # noqa: N802, D103
+ windFileOutName, # noqa: N803
+ breadth,
+ depth,
+ height,
+ fs,
+ vRef, # noqa: N803
+ f_target,
+ s_target,
+ comp_CFmean, # noqa: N803
+ norm_all,
+):
+ ncomp = comp_CFmean.shape[0] # noqa: F841
+ nf = f_target.shape[0] # noqa: F841
+
+ myJson = {} # noqa: N806
+ myJson['D'] = depth
+ myJson['H'] = height
+ myJson['B'] = breadth
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+ myJson['comp_CFmean'] = comp_CFmean.tolist()
+ myJson['norm_all'] = norm_all.tolist()
+ myJson['f_target'] = f_target.tolist()
+
+ myJson['s_target_real'] = np.real(s_target).tolist()
+ myJson['s_target_imag'] = np.imag(s_target).tolist()
+
+ with open(windFileOutName, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
- # Check valid JSON file
+ # Check valid JSON file
validate = True
if validate:
- with open(windFileOutName, "r") as infile:
+ with open(windFileOutName) as infile: # noqa: FURB101, PLW1514, PTH123
json_data = infile.read()
# Try to parse the JSON data
try:
- json_object = json.loads(json_data)
- print("JSON file is valid")
+ json_object = json.loads(json_data) # noqa: F841
+ print('JSON file is valid') # noqa: T201
except json.decoder.JSONDecodeError:
- print("JSON file is not valid")
-
- # file = open(windFileOutName,"w")
- # file.write("{")
- # file.write("\"D\":%f," % depth)
- # file.write("\"H\":%f," % height)
- # file.write("\"B\":%f," % breadth)
- # file.write("\"fs\":%f," % fs)
- # file.write("\"Vref\":%f," % vRef)
+ print('JSON file is not valid') # noqa: T201
+
+ # file = open(windFileOutName,"w")
+ # file.write("{")
+ # file.write("\"D\":%f," % depth)
+ # file.write("\"H\":%f," % height)
+ # file.write("\"B\":%f," % breadth)
+ # file.write("\"fs\":%f," % fs)
+ # file.write("\"Vref\":%f," % vRef)
# file.write("\"units\":{\"length\":\"m\",\"time\":\"sec\"},")
# ncomp = comp_CFmean.shape[0]
@@ -244,22 +251,33 @@ def createSpectraJson(windFileOutName, breadth, depth, height, fs, vRef, f_targe
# file.close()
-def createPODJson(filename, V, D1, SpeN, f_target, norm_all, D, H, B, fs, vRef,comp_CFmean):
-
-
- myJson = {}
- myJson["V_imag"] = np.imag(V).tolist()
- myJson["V_real"] = np.real(V).tolist()
- myJson["D1"] = D1.tolist()
- myJson["SpeN"] = SpeN
- myJson["f_target"] = f_target.tolist()
- myJson["norm_all"] = norm_all.tolist()
- myJson["comp_CFmean"] = comp_CFmean.tolist()
- myJson["D"] = D
- myJson["H"] = H
- myJson["B"] = B
- myJson["fs"] = fs
- myJson["Vref"] = vRef
-
- with open(filename,"w") as f:
+def createPODJson( # noqa: N802, D103
+ filename,
+ V, # noqa: N803
+ D1, # noqa: N803
+ SpeN, # noqa: N803
+ f_target,
+ norm_all,
+ D, # noqa: N803
+ H, # noqa: N803
+ B, # noqa: N803
+ fs,
+ vRef, # noqa: N803
+ comp_CFmean, # noqa: N803
+):
+ myJson = {} # noqa: N806
+ myJson['V_imag'] = np.imag(V).tolist()
+ myJson['V_real'] = np.real(V).tolist()
+ myJson['D1'] = D1.tolist()
+ myJson['SpeN'] = SpeN
+ myJson['f_target'] = f_target.tolist()
+ myJson['norm_all'] = norm_all.tolist()
+ myJson['comp_CFmean'] = comp_CFmean.tolist()
+ myJson['D'] = D
+ myJson['H'] = H
+ myJson['B'] = B
+ myJson['fs'] = fs
+ myJson['Vref'] = vRef
+
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
json.dump(myJson, f)
diff --git a/modules/createEVENT/experimentalWindPressures/experimentalWindPressures.py b/modules/createEVENT/experimentalWindPressures/experimentalWindPressures.py
index ff5b59441..524b73c3c 100644
--- a/modules/createEVENT/experimentalWindPressures/experimentalWindPressures.py
+++ b/modules/createEVENT/experimentalWindPressures/experimentalWindPressures.py
@@ -1,130 +1,147 @@
-import json
-import math
-import time
+import json # noqa: CPY001, D100, INP001
import os
+import time
try:
- moduleName = "numpy"
+ moduleName = 'numpy' # noqa: N816
import numpy as np
- moduleName = "scipy"
- from scipy.signal import csd, windows, butter, lfilter
+ moduleName = 'scipy' # noqa: N816
+ import os
+
+ from scipy import interpolate
from scipy.interpolate import interp1d
+ from scipy.signal import butter, csd, lfilter, windows
from scipy.stats import gaussian_kde, genpareto, norm
- from scipy import interpolate
- import os
error_tag = False # global variable
-except:
+except: # noqa: E722
error_tag = True
-from convertWindMat import *
+from convertWindMat import * # noqa: F403
-errPath = "./workflow.err" # error file name
-sys.stderr = open(errPath, 'w') # redirecting stderr (this way we can capture all sorts of python errors)
+errPath = './workflow.err' # error file name # noqa: N816
+sys.stderr = open( # noqa: F405, PLW1514, PTH123, SIM115
+ errPath, 'w'
+) # redirecting stderr (this way we can capture all sorts of python errors)
-def err_exit(msg):
- print("Failed in wind load generator: " + msg) # display in stdout
- print("Failed in wind load generator: " + msg, file=sys.stderr) # display in stderr
- exit(-1) # exit with non-zero exit code
+def err_exit(msg): # noqa: D103
+ print('Failed in wind load generator: ' + msg) # display in stdout # noqa: T201
+ print(
+ 'Failed in wind load generator: ' + msg,
+ file=sys.stderr, # noqa: F405
+ ) # display in stderr
+ exit(-1) # exit with non-zero exit code # noqa: PLR1722
-def main(aimName, evtName, getRV):
- with open(aimName, 'r', encoding='utf-8') as f:
+def main(aimName, evtName, getRV): # noqa: C901, D103, N803, PLR0914, PLR0915
+ with open(aimName, encoding='utf-8') as f: # noqa: PTH123
aim_data = json.load(f)
- evt_data = aim_data["Events"][0]
+ evt_data = aim_data['Events'][0]
- filename = evt_data["filename"]
+ filename = evt_data['filename']
#
# User-defined variables
#
- V_H = evt_data["windSpeed"] # 4*Vref, wind speed at full scale (m/s)
- T_full = evt_data["fullScaleDuration"] # 1600, Duration of wind pressure realization at full scale (s)
- # TODO check if there is recommended modes
- perc_mod = evt_data["modePercent"] / 100 # percentage of modes to include in the simulation
- seed = evt_data["seed"] # Set seeds for reproducibility
- Tw = evt_data["windowSize"] # 4, window size/duration (sec) - smaller window leads to more smoothing - model scale
- overlap = evt_data["overlapPerc"] / 100 # 0.5 , 50% overlap - user defined
+ # 4*Vref, wind speed at full scale (m/s)
+ V_H = evt_data['windSpeed'] # noqa: N806
+ T_full = evt_data[ # noqa: N806
+ 'fullScaleDuration'
+ ] # 1600, Duration of wind pressure realization at full scale (s)
+ # TODO check if there is recommended modes # noqa: TD002, TD004
+ perc_mod = (
+ evt_data['modePercent'] / 100
+ ) # percentage of modes to include in the simulation
+ seed = evt_data['seed'] # Set seeds for reproducibility
+ Tw = evt_data[ # noqa: N806
+ 'windowSize'
+ ] # 4, window size/duration (sec) - smaller window leads to more smoothing - model scale
+ overlap = evt_data['overlapPerc'] / 100 # 0.5 , 50% overlap - user defined
gg = evt_data[
- "cpsdGroupSize"] # 5 , User defined: variable that subdivides the CPSD matrix into ggxgg "groups" in order to avoid running out of memory
+ 'cpsdGroupSize'
+ ] # 5 , User defined: variable that subdivides the CPSD matrix into ggxgg "groups" in order to avoid running out of memory
- ms = evt_data.get("modelScale", 0) # model scale
+ ms = evt_data.get('modelScale', 0) # model scale
- selected_taps = np.array(evt_data[
- "selectedTaps"]) # np.arange(91,150+1) - 1 , to start from zero # selected taps for simulation (1:510 if all taps are included)
+ selected_taps = np.array(
+ evt_data['selectedTaps']
+ ) # np.arange(91,150+1) - 1 , to start from zero # selected taps for simulation (1:510 if all taps are included)
tap = len(selected_taps)
- filtHz = 100 # if applied - filtering high-frequency noise - model scale
+ filtHz = 100 # if applied - filtering high-frequency noise - model scale # noqa: N806
# set equal to 0 if not applied
l_mo = int(np.ceil(tap * perc_mod)) # number of modes included in the simulation
- if l_mo > 100 or l_mo < 0:
- err_exit('Number of modes should be equal or less than the number of components')
+ if l_mo > 100 or l_mo < 0: # noqa: PLR2004
+ err_exit(
+ 'Number of modes should be equal or less than the number of components'
+ )
- print('Number of modes = ' + str(l_mo))
+ print('Number of modes = ' + str(l_mo)) # noqa: T201
#
# Parameters
#
- tailTrshd = 5 # Percentage of tail threshold on both tails - Fixed value for all times series
+ tailTrshd = 5 # Percentage of tail threshold on both tails - Fixed value for all times series # noqa: N806
nl = tailTrshd / 100 # Lower Tail Threshold
nu = 1 - nl # Upper Tail Threshold
if getRV:
- print("Running Get RV")
+ print('Running Get RV') # noqa: T201
do_parallel = True
- runType = aim_data["runType"]
+ runType = aim_data['runType'] # noqa: N806
if do_parallel:
-
#
# Define pool
#
- if runType == "runningLocal":
- from multiprocessing import Pool
+ if runType == 'runningLocal':
+ from multiprocessing import Pool # noqa: PLC0415
+
n_processor = os.cpu_count()
- print("Starting pool")
+ print('Starting pool') # noqa: T201
tmp = time.time()
pool = Pool(n_processor)
- print(" - Elapsed time: {:.3f} seconds.\n".format(time.time() - tmp))
+ print(f' - Elapsed time: {time.time() - tmp:.3f} seconds.\n') # noqa: T201
else:
- from mpi4py import MPI
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py import MPI # noqa: PLC0415
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
+
world = MPI.COMM_WORLD
n_processor = world.Get_size()
pool = MPIPoolExecutor()
if filename.endswith('.mat'):
- pressure_data = sio.loadmat(filename)
+ pressure_data = sio.loadmat(filename) # noqa: F405
for key in pressure_data:
# print(key)
- if not key.startswith("__"):
+ if not key.startswith('__'):
pressure_data[key] = pressure_data[key][0]
elif filename.endswith('.json'):
- with open(filename, 'r', encoding='utf-8') as jsonFile:
+ with open(filename, encoding='utf-8') as jsonFile: # noqa: PTH123, N806
pressure_data = json.load(jsonFile)
- fs = np.squeeze(pressure_data["frequency"])
- Vref = np.squeeze(pressure_data["windSpeed"])
- Td = np.squeeze(pressure_data["period"])
- pressure_json = pressure_data["pressureCoefficients"]
+ fs = np.squeeze(pressure_data['frequency'])
+ Vref = np.squeeze(pressure_data['windSpeed']) # noqa: N806
+ Td = np.squeeze(pressure_data['period']) # noqa: N806
+ pressure_json = pressure_data['pressureCoefficients']
dt = 1 / fs
tvec = np.arange(0, Td, dt) + dt
- Cp_pf = np.zeros((len(tvec), len(pressure_json)))
+ Cp_pf = np.zeros((len(tvec), len(pressure_json))) # noqa: N806
id_list = set()
for tap_info in pressure_json:
- id = np.squeeze(tap_info["id"])
- data = np.squeeze(tap_info["data"])
+ id = np.squeeze(tap_info['id']) # noqa: A001
+ data = np.squeeze(tap_info['data'])
Cp_pf[:, id - 1] = data
id_list.add(int(id))
- '''
+ """
import matplotlib.pyplot as plt
myx = np.array([np.squeeze(a).item() for a in np.squeeze(pressure_data["tapLocations"]["xLoc"])])[selected_taps-1]
@@ -135,30 +152,31 @@ def main(aimName, evtName, getRV):
plt.scatter(myx[id],myy[id],20,c=myMean[id])
plt.show()
- '''
+ """
if ms == 0: # when mat file is imported, model scale is not precalculated
- print("Model scale not found. Calculating the unified model scale..")
- D = np.squeeze(pressure_data["depth"])
- H = np.squeeze(pressure_data["height"])
- B = np.squeeze(pressure_data["breadth"])
- D_full = aim_data["GeneralInformation"]["depth"]
- H_full = aim_data["GeneralInformation"]["height"]
- B_full = aim_data["GeneralInformation"]["width"]
+ print('Model scale not found. Calculating the unified model scale..') # noqa: T201
+ D = np.squeeze(pressure_data['depth']) # noqa: N806
+ H = np.squeeze(pressure_data['height']) # noqa: N806
+ B = np.squeeze(pressure_data['breadth']) # noqa: N806
+ D_full = aim_data['GeneralInformation']['depth'] # noqa: N806
+ H_full = aim_data['GeneralInformation']['height'] # noqa: N806
+ B_full = aim_data['GeneralInformation']['width'] # noqa: N806
ms = H_full / H
- print("Model scaling factor of {:.2f} is used".format(ms))
- if (((not ms == D_full / D) or (not ms == B_full / B)) and getRV):
- print("Warning: target-data geometry scaling ratio is inconsistent: H={:.2}, B={:.2}, D={:.2}".format(
- H_full / H, B_full / B, D_full / D))
+ print(f'Model scaling factor of {ms:.2f} is used') # noqa: T201
+ if ((ms != D_full / D) or (ms != B_full / B)) and getRV:
+ print( # noqa: T201
+ f'Warning: target-data geometry scaling ratio is inconsistent: H={H_full / H:.2}, B={B_full / B:.2}, D={D_full / D:.2}'
+ )
if len(set(selected_taps.flatten()).difference(id_list)) > 0:
- msg = "The selected taps are not a subset of your original set: following tabs are not found"
+ msg = 'The selected taps are not a subset of your original set: following tabs are not found'
msg += set(selected_taps.flatten()).difference(set(id_list))
err_exit(msg)
# Values for paretotails function
- N = np.size(Cp_pf, 1) # total number of data points
+ N = np.size(Cp_pf, 1) # total number of data points # noqa: N806, F841
fc = fs / 2 # Nyquist Frequency (Hz) wind tunnel
fp = fs / ms # scaled frequency
fcut = fc / ms # scaled Nyquist frequency
@@ -170,13 +188,13 @@ def main(aimName, evtName, getRV):
# filtering added
if filtHz > 0:
n = 2
- Hz = filtHz
- Wn = Hz / (fs / 2)
+ Hz = filtHz # noqa: N806
+ Wn = Hz / (fs / 2) # noqa: N806
[b, a] = butter(n, Wn)
x = Cp_pf - np.mean(Cp_pf, axis=0)
# y = filter(b, a, x)
y = lfilter(b, a, x, axis=0)
- Cp = (y + np.mean(Cp_pf, axis=0))
+ Cp = y + np.mean(Cp_pf, axis=0) # noqa: N806
#######################################################################################################################
# Standardization of wind records
@@ -185,18 +203,23 @@ def main(aimName, evtName, getRV):
# when standardized, requiring less modes in the simulation.
# Pressure Coefficients Time historites
- Cp_std = np.std(Cp, axis=0) # std of time series for later use
- Cp_mean = np.mean(Cp, axis=0) # mean of time series for later use
- # Cp_norm = np.normalize(Cp) # standardize Cp time series such that mean = 0 and std = 1 for all taps.
- row_sums = Cp.sum(axis=1)
- Cp_norm = (Cp - Cp_mean) / Cp_std
+ Cp_std = np.std(Cp, axis=0) # std of time series for later use # noqa: N806
+ # mean of time series for later use
+ Cp_mean = np.mean(Cp, axis=0) # noqa: N806
+
+ # standardize Cp time series such that mean = 0 and std = 1
+ # for all taps.
+ # Cp_norm = np.normalize(Cp)
+
+ row_sums = Cp.sum(axis=1) # noqa: F841
+ Cp_norm = (Cp - Cp_mean) / Cp_std # noqa: N806
# Smoothed target CPSD
wind_size = fs * Tw
nover = np.round(overlap * wind_size)
fcut_sc = (V_H / Vref) * fcut # scaled cut-off frequency
dt = 1 / (2 * fcut_sc) # max. time increment to avoid aliasing (s)
- N_t = int(np.round(T_full / dt)) # number of time points
+ N_t = int(np.round(T_full / dt)) # number of time points # noqa: N806
nfft = N_t
#
@@ -209,23 +232,27 @@ def main(aimName, evtName, getRV):
if out > 0:
d = np.concatenate([d, np.array([d[-1] + out])])
- # TODO: dealing with gpuArray, gather
- nSampPoints = int(nfft / 2 + 1)
- s_target = np.zeros((len(selected_taps), len(selected_taps), nSampPoints), dtype='complex_')
- startTime = time.time()
- # TODO: works only if the selected taps are is continuous
+ # TODO: dealing with gpuArray, gather # noqa: TD002
+ nSampPoints = int(nfft / 2 + 1) # noqa: N806
+ s_target = np.zeros(
+ (len(selected_taps), len(selected_taps), nSampPoints), dtype='complex_'
+ )
+ startTime = time.time() # noqa: N806, F841
+ # TODO: works only if the selected taps are is continuous # noqa: TD002
selected_taps_tmp = np.concatenate(
- [selected_taps, [selected_taps[-1] + 1]]) # zero is dummy that will not appear in the analysis
+ [selected_taps, [selected_taps[-1] + 1]]
+ ) # zero is dummy that will not appear in the analysis
- print("Training cross power spectrum density..");
+ print('Training cross power spectrum density..') # noqa: T201
t_init = time.time()
nloop = (len(d) - 1) * (len(d) - 1)
for i in range(1, len(d)):
for j in range(1, len(d)):
-
if np.mod((i - 1) * (len(d) - 1) + j, round(nloop / 10)) == 0:
- print("{:.0f} % completed".format(((i - 1) * (len(d) - 1) + j) / nloop * 100))
+ print( # noqa: T201
+ f'{((i - 1) * (len(d) - 1) + j) / nloop * 100:.0f} % completed'
+ )
kk = np.arange(d[i - 1], d[i])
ll = np.arange(d[j - 1], d[j])
@@ -234,40 +261,46 @@ def main(aimName, evtName, getRV):
ii = selected_taps_tmp[kk]
jj = selected_taps_tmp[ll]
- [s, f_target] = cpsd_matlab(Cp_norm[:, ii - 1], Cp_norm[:, jj - 1], wind_size, nover, nfft,
- fp) # -1 becuase tab1 is at column 0
+ [s, f_target] = cpsd_matlab(
+ Cp_norm[:, ii - 1],
+ Cp_norm[:, jj - 1],
+ wind_size,
+ nover,
+ nfft,
+ fp,
+ ) # -1 because tab1 is at column 0
# cpsd_all[kk,ll] = s
- s_target[d[i - 1]:d[i], d[j - 1]:d[j]] = s
+ s_target[d[i - 1] : d[i], d[j - 1] : d[j]] = s
- print(" - Elapsed time: {:.1f} seconds.\n".format(time.time() - t_init))
+ print(f' - Elapsed time: {time.time() - t_init:.1f} seconds.\n') # noqa: T201
- unitLength = aim_data["GeneralInformation"]["units"]["length"]
- unitTime = aim_data["GeneralInformation"]["units"]["time"]
+ unitLength = aim_data['GeneralInformation']['units']['length'] # noqa: N806
+ unitTime = aim_data['GeneralInformation']['units']['time'] # noqa: N806
- print("Performing POD..");
+ print('Performing POD..') # noqa: T201
t_init = time.time()
# Spectral Proper Orthogonal Decomposition
- V, D1, SpeN = perform_POD(s_target, f_target, tap, l_mo, pool)
- print(" - Elapsed time: {:.1f} seconds.\n".format(time.time() - t_init))
+ V, D1, SpeN = perform_POD(s_target, f_target, tap, l_mo, pool) # noqa: N806
+ print(f' - Elapsed time: {time.time() - t_init:.1f} seconds.\n') # noqa: T201
#
# Computing nonGaussian CDFs
#
if do_parallel:
- print("Computing nonGaussian CDF in parallel")
+ print('Computing nonGaussian CDF in parallel') # noqa: T201
tmp = time.time()
iterables = ((Cp_norm[:, selected_taps[i] - 1],) for i in range(tap))
try:
result_objs = list(pool.starmap(getCDF, iterables))
- print(" - Elapsed time: {:.3f} seconds.\n".format(time.time() - tmp))
+ print(f' - Elapsed time: {time.time() - tmp:.3f} seconds.\n') # noqa: T201
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
- self.pool.shutdown()
- except Exception:
- sys.exit()
+ self.pool.shutdown() # noqa: F405
+ except Exception: # noqa: BLE001
+ sys.exit() # noqa: F405
my_cdf_vects = np.zeros((1000, tap))
my_cdf_x_range = np.zeros((2, tap))
@@ -275,14 +308,13 @@ def main(aimName, evtName, getRV):
my_cdf_vects[:, i] = result_objs[i][0]
my_cdf_x_range[:, i] = result_objs[i][1]
-
else:
- print("Computing nonGaussian CDF")
+ print('Computing nonGaussian CDF') # noqa: T201
tmp = time.time()
my_cdf_vects = np.zeros((1000, tap))
my_cdf_x_range = np.zeros((2, tap))
for i in range(tap):
- '''
+ """
Cp_temp = Cp_norm[:, selected_taps[i]]
kernel = gaussian_kde(Cp_temp)
kernel_cdf = np.vectorize(lambda x: kernel.integrate_box_1d(-np.inf, x))
@@ -290,10 +322,12 @@ def main(aimName, evtName, getRV):
my_cdf_vects[:, i] = kernel_cdf(my_cdf_x) # Takes too long to evaluate
my_cdf_x_range[:, i] = [min(Cp_temp), max(Cp_temp)]
- '''
- my_cdf_vects[:, i], my_cdf_x_range[:, i] = getCDF(Cp_norm[:, selected_taps[i] - 1])
+ """
+ my_cdf_vects[:, i], my_cdf_x_range[:, i] = getCDF(
+ Cp_norm[:, selected_taps[i] - 1]
+ )
- print(" - Elapsed time: {:.1f} seconds.\n".format(time.time() - t_init))
+ print(f' - Elapsed time: {time.time() - t_init:.1f} seconds.\n') # noqa: T201
# Simulation of Gaussian Stochastic wind force coefficients
@@ -302,68 +336,68 @@ def main(aimName, evtName, getRV):
# ------------------------------------------------------
iterm_json = {}
- iterm_json["selected_taps"] = selected_taps
- iterm_json["ms"] = ms
- iterm_json["V_H"] = V_H
- iterm_json["T_full"] = T_full
- iterm_json["Cp_norm"] = Cp_norm
+ iterm_json['selected_taps'] = selected_taps
+ iterm_json['ms'] = ms
+ iterm_json['V_H'] = V_H
+ iterm_json['T_full'] = T_full
+ iterm_json['Cp_norm'] = Cp_norm
# iterm_json["Tw"] = Tw
# iterm_json["overlap"] = overlap
# iterm_json["nover"] = nover
- iterm_json["dt"] = dt
+ iterm_json['dt'] = dt
# iterm_json["fs"] = fs
# iterm_json["N_t"] = N_t
- iterm_json["fcut_sc"] = fcut_sc
- iterm_json["Vref"] = Vref
- iterm_json["Cp_std"] = Cp_std
- iterm_json["Cp_mean"] = Cp_mean
+ iterm_json['fcut_sc'] = fcut_sc
+ iterm_json['Vref'] = Vref
+ iterm_json['Cp_std'] = Cp_std
+ iterm_json['Cp_mean'] = Cp_mean
# iterm_json["s_target"] = s_target
- iterm_json["f_target"] = f_target
- iterm_json["pressureData"] = pressure_data
- iterm_json["length"] = unitLength
- iterm_json["time"] = unitTime
- iterm_json["V"] = V
- iterm_json["D1"] = D1
- iterm_json["SpeN"] = SpeN
- iterm_json["my_cdf_vects"] = my_cdf_vects
- iterm_json["my_cdf_x_range"] = my_cdf_x_range
+ iterm_json['f_target'] = f_target
+ iterm_json['pressureData'] = pressure_data
+ iterm_json['length'] = unitLength
+ iterm_json['time'] = unitTime
+ iterm_json['V'] = V
+ iterm_json['D1'] = D1
+ iterm_json['SpeN'] = SpeN
+ iterm_json['my_cdf_vects'] = my_cdf_vects
+ iterm_json['my_cdf_x_range'] = my_cdf_x_range
#
# save into a file
#
- if not os.path.exists("../input_File"):
- os.makedirs("../input_File")
- sio.savemat('../input_File/POD_Cp.mat', iterm_json)
+ if not os.path.exists('../input_File'): # noqa: PTH110
+ os.makedirs('../input_File') # noqa: PTH103
+ sio.savemat('../input_File/POD_Cp.mat', iterm_json) # noqa: F405
file_loaded = False
else:
- iterm_json = sio.loadmat('../input_File/POD_Cp.mat')
- selected_taps = np.squeeze(iterm_json["selected_taps"])
- ms = np.squeeze(iterm_json["ms"])
- V_H = np.squeeze(iterm_json["V_H"])
- T_full = np.squeeze(iterm_json["T_full"])
- Cp_norm = np.squeeze(iterm_json["Cp_norm"])
+ iterm_json = sio.loadmat('../input_File/POD_Cp.mat') # noqa: F405
+ selected_taps = np.squeeze(iterm_json['selected_taps'])
+ ms = np.squeeze(iterm_json['ms'])
+ V_H = np.squeeze(iterm_json['V_H']) # noqa: N806
+ T_full = np.squeeze(iterm_json['T_full']) # noqa: N806
+ Cp_norm = np.squeeze(iterm_json['Cp_norm']) # noqa: N806
# Tw =np.squeeze(iterm_json["Tw"])
# overlap =np.squeeze(iterm_json["overlap"])
# nover =np.squeeze(iterm_json["nover"])
- dt = np.squeeze(iterm_json["dt"])
+ dt = np.squeeze(iterm_json['dt'])
# fs =np.squeeze(iterm_json["fs"])
# N_t =np.squeeze(iterm_json["N_t"])
- fcut_sc = np.squeeze(iterm_json["fcut_sc"])
+ fcut_sc = np.squeeze(iterm_json['fcut_sc'])
# s_target =np.squeeze(iterm_json["s_target"])
- f_target = np.squeeze(iterm_json["f_target"])
- Vref = np.squeeze(iterm_json["Vref"])
- Cp_std = np.squeeze(iterm_json["Cp_std"])
- Cp_mean = np.squeeze(iterm_json["Cp_mean"])
- unitLength = np.squeeze(iterm_json["length"])
- unitTime = np.squeeze(iterm_json["time"])
- V = np.squeeze(iterm_json["V"])
- D1 = iterm_json["D1"]
- SpeN = np.squeeze(iterm_json["SpeN"])
- my_cdf_vects = np.squeeze(iterm_json["my_cdf_vects"])
- my_cdf_x_range = np.squeeze(iterm_json["my_cdf_x_range"])
+ f_target = np.squeeze(iterm_json['f_target'])
+ Vref = np.squeeze(iterm_json['Vref']) # noqa: N806
+ Cp_std = np.squeeze(iterm_json['Cp_std']) # noqa: N806
+ Cp_mean = np.squeeze(iterm_json['Cp_mean']) # noqa: N806
+ unitLength = np.squeeze(iterm_json['length']) # noqa: N806
+ unitTime = np.squeeze(iterm_json['time']) # noqa: N806
+ V = np.squeeze(iterm_json['V']) # noqa: N806
+ D1 = iterm_json['D1'] # noqa: N806
+ SpeN = np.squeeze(iterm_json['SpeN']) # noqa: N806
+ my_cdf_vects = np.squeeze(iterm_json['my_cdf_vects'])
+ my_cdf_x_range = np.squeeze(iterm_json['my_cdf_x_range'])
do_parallel = False
file_loaded = True
@@ -372,82 +406,115 @@ def main(aimName, evtName, getRV):
selected_taps = np.arange(0, Cp_norm.shape[0])
f_full = f_target[0:] # don't exclude freq = 0 Hz
- f_vH = (V_H / Vref) * f_full # scaledfreq.(Hz)
- V_vH = V # scaled eigenmodes
- D_vH = (V_H / Vref) ** 3 * D1 # scaled eigenvalues
- theta_vH = np.arctan2(np.imag(V_vH), np.real(V_vH)) # scaled theta
+ f_vH = (V_H / Vref) * f_full # scaledfreq.(Hz) # noqa: N806
+ V_vH = V # scaled eigenmodes # noqa: N806
+ D_vH = (V_H / Vref) ** 3 * D1 # scaled eigenvalues # noqa: N806
+ theta_vH = np.arctan2(np.imag(V_vH), np.real(V_vH)) # scaled theta # noqa: N806
f_inc = 1 / T_full # freq.increment(Hz)
- N_f = round(T_full * np.squeeze(fcut_sc)) + 1 # number of time points
+ # number of time points
+ N_f = round(T_full * np.squeeze(fcut_sc)) + 1 # noqa: N806
- N_t = round(T_full / dt) # number of time points
- fvec = np.arange(0, f_inc * (N_f), f_inc) # frequency line
+ N_t = round(T_full / dt) # number of time points # noqa: N806
+ fvec = np.arange(0, f_inc * (N_f), f_inc) # frequency line # noqa: F841
t_vec_sc = np.linspace(0, dt * N_t, N_t) # time line
f = f_vH[0:SpeN] # frequencies from the decomposition upto SpeN points(Hz)
nf_dir = np.arange(tap) # vector number of components
- Nsim = 1 # Number of realizations to be generated
+ Nsim = 1 # Number of realizations to be generated # noqa: N806
seeds = np.arange(seed, Nsim + seed) # Set seeds for reproducibility
#
- # Creating Gaussian Relizations
+ # Creating Gaussian Realizations
#
- print("Creating Gaussian Realizations");
+ print('Creating Gaussian Realizations') # noqa: T201
t_init = time.time()
- CP_sim = np.zeros((len(seeds), tap, N_t))
+ CP_sim = np.zeros((len(seeds), tap, N_t)) # noqa: N806
for seed_num in range(len(seeds)):
t_init = time.time()
- F_jzm = simulation_gaussian(tap, N_t, V_vH, D_vH, theta_vH, nf_dir, N_f, f_inc, f, l_mo, t_vec_sc, SpeN, V_H,
- Vref, seeds, seed_num)
- CP_sim[seed_num, :, :] = F_jzm # zero-mean force coefficient time series (simulation)
-
- print(" - Elapsed time: {:.1f} seconds.\n".format(time.time() - t_init))
+ F_jzm = simulation_gaussian( # noqa: N806
+ tap,
+ N_t,
+ V_vH,
+ D_vH,
+ theta_vH,
+ nf_dir,
+ N_f,
+ f_inc,
+ f,
+ l_mo,
+ t_vec_sc,
+ SpeN,
+ V_H,
+ Vref,
+ seeds,
+ seed_num,
+ )
+ CP_sim[seed_num, :, :] = (
+ F_jzm # zero-mean force coefficient time series (simulation)
+ )
+
+ print(f' - Elapsed time: {time.time() - t_init:.1f} seconds.\n') # noqa: T201
#
- # Creating Non-Gaussian Relizations
+ # Creating Non-Gaussian Realizations
#
- print("Creating NonGaussian Realizations");
+ print('Creating NonGaussian Realizations') # noqa: T201
if do_parallel:
- Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds)))
- print("Running {} simulations in parallel".format(tap))
+ Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds))) # noqa: N806
+ print(f'Running {tap} simulations in parallel') # noqa: T201
tmp = time.time()
iterables = (
- (Cp_norm[:, selected_taps[i] - 1], CP_sim[seed_num, i, :], nl, nu, my_cdf_vects[:, i], my_cdf_x_range[:, i]) for
- i in range(tap))
+ (
+ Cp_norm[:, selected_taps[i] - 1],
+ CP_sim[seed_num, i, :],
+ nl,
+ nu,
+ my_cdf_vects[:, i],
+ my_cdf_x_range[:, i],
+ )
+ for i in range(tap)
+ )
try:
result_objs = list(pool.starmap(genCP, iterables))
- print(" - Elapsed time: {:.3f} seconds.\n".format(time.time() - tmp))
+ print(f' - Elapsed time: {time.time() - tmp:.3f} seconds.\n') # noqa: T201
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
- self.pool.shutdown()
- except Exception:
- sys.exit()
+ self.pool.shutdown() # noqa: F405
+ except Exception: # noqa: BLE001
+ sys.exit() # noqa: F405
- Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds)))
+ Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds))) # noqa: N806
Cp_nongauss_kernel[:, :, 0] = np.array(result_objs)
-
else:
- Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds)))
+ Cp_nongauss_kernel = np.zeros((tap, CP_sim.shape[2], len(seeds))) # noqa: N806
- print("Running {} simulations in series".format(tap))
+ print(f'Running {tap} simulations in series') # noqa: T201
tmp = time.time()
for seed_num in range(len(seeds)): # always 1
for i in range(tap):
- Cp_nongauss_kernel[i, :, seed_num] = genCP(Cp_norm[:, selected_taps[i] - 1], CP_sim[seed_num, i, :], nl,
- nu, my_cdf_vects[:, i], my_cdf_x_range[:, i])
-
- print(" - Elapsed time: {:.3f} seconds.\n".format(time.time() - tmp))
-
- Cp_std_tmp = Cp_std[selected_taps - 1][:, np.newaxis, np.newaxis]
- Cp_mean_tmp = Cp_mean[selected_taps - 1][:, np.newaxis, np.newaxis]
- Cp_nongauss = np.transpose(Cp_nongauss_kernel, (0, 2, 1)) * np.tile(Cp_std_tmp, (1, len(seeds), N_t)) + np.tile(
- Cp_mean_tmp, (1, len(seeds), N_t)) # destandardize the time series
+ Cp_nongauss_kernel[i, :, seed_num] = genCP(
+ Cp_norm[:, selected_taps[i] - 1],
+ CP_sim[seed_num, i, :],
+ nl,
+ nu,
+ my_cdf_vects[:, i],
+ my_cdf_x_range[:, i],
+ )
+
+ print(f' - Elapsed time: {time.time() - tmp:.3f} seconds.\n') # noqa: T201
+
+ Cp_std_tmp = Cp_std[selected_taps - 1][:, np.newaxis, np.newaxis] # noqa: N806
+ Cp_mean_tmp = Cp_mean[selected_taps - 1][:, np.newaxis, np.newaxis] # noqa: N806
+ Cp_nongauss = np.transpose(Cp_nongauss_kernel, (0, 2, 1)) * np.tile( # noqa: N806
+ Cp_std_tmp, (1, len(seeds), N_t)
+ ) + np.tile(Cp_mean_tmp, (1, len(seeds), N_t)) # destandardize the time series
# Convert to Full Scale Pressure time series
# P_full=Cp_nongauss*(1/2)*air_dens*V_H**2 # Net Pressure values in full scale (Pa)
@@ -459,100 +526,100 @@ def main(aimName, evtName, getRV):
# Save Results
#
- print("Saving results")
+ print('Saving results') # noqa: T201
- pressure_data = iterm_json["pressureData"]
+ pressure_data = iterm_json['pressureData']
new_json = {}
# new_json["period"] = Td*ms*Vref/V_H
- new_json["period"] = t_vec_sc[-1]
- new_json["frequency"] = 1 / (t_vec_sc[1] - t_vec_sc[0])
+ new_json['period'] = t_vec_sc[-1]
+ new_json['frequency'] = 1 / (t_vec_sc[1] - t_vec_sc[0])
# new_json["windSpeed"] =float(pressure_data["windSpeed"])
- new_json["windSpeed"] = float(evt_data["windSpeed"])
+ new_json['windSpeed'] = float(evt_data['windSpeed'])
-
- new_json["units"] = {}
- new_json["units"]["length"] = str(np.squeeze(unitLength))
- new_json["units"]["time"] = str(np.squeeze(unitTime))
+ new_json['units'] = {}
+ new_json['units']['length'] = str(np.squeeze(unitLength))
+ new_json['units']['time'] = str(np.squeeze(unitTime))
if file_loaded:
- new_json["breadth"] = float(pressure_data["breadth"][0][0][0][0] * ms)
- new_json["depth"] = float(pressure_data["depth"][0][0][0][0] * ms)
- new_json["height"] = float(pressure_data["height"][0][0][0][0] * ms)
+ new_json['breadth'] = float(pressure_data['breadth'][0][0][0][0] * ms)
+ new_json['depth'] = float(pressure_data['depth'][0][0][0][0] * ms)
+ new_json['height'] = float(pressure_data['height'][0][0][0][0] * ms)
new_taps = []
- for taps in pressure_data["tapLocations"][0][0][0]:
- if taps["id"][0][0] in selected_taps:
+ for taps in pressure_data['tapLocations'][0][0][0]:
+ if taps['id'][0][0] in selected_taps:
tmp = {}
- tmp["id"] = int(taps["id"][0][0])
- tmp["xLoc"] = float(taps["xLoc"][0][0]) * ms
- tmp["yLoc"] = float(taps["yLoc"][0][0]) * ms
- tmp["face"] = int(taps["face"][0][0])
+ tmp['id'] = int(taps['id'][0][0])
+ tmp['xLoc'] = float(taps['xLoc'][0][0]) * ms
+ tmp['yLoc'] = float(taps['yLoc'][0][0]) * ms
+ tmp['face'] = int(taps['face'][0][0])
new_taps += [tmp]
+ elif filename.endswith('.mat'):
+ new_json['breadth'] = float(pressure_data['breadth'][0] * ms)
+ new_json['depth'] = float(pressure_data['depth'][0] * ms)
+ new_json['height'] = float(pressure_data['height'][0] * ms)
+ new_taps = []
+ for taps in pressure_data['tapLocations']:
+ if taps['id'] in selected_taps:
+ tmp = {}
+ tmp['id'] = int(taps['id'][0][0])
+ tmp['xLoc'] = float(taps['xLoc'][0][0]) * ms
+ tmp['yLoc'] = float(taps['yLoc'][0][0]) * ms
+ tmp['face'] = int(taps['face'][0][0])
+ new_taps += [tmp]
else:
- if filename.endswith('.mat'):
- new_json["breadth"] = float(pressure_data["breadth"][0] * ms)
- new_json["depth"] = float(pressure_data["depth"][0] * ms)
- new_json["height"] = float(pressure_data["height"][0] * ms)
- new_taps = []
- for taps in pressure_data["tapLocations"]:
- if taps["id"] in selected_taps:
- tmp = {}
- tmp["id"] = int(taps["id"][0][0])
- tmp["xLoc"] = float(taps["xLoc"][0][0]) * ms
- tmp["yLoc"] = float(taps["yLoc"][0][0]) * ms
- tmp["face"] = int(taps["face"][0][0])
- new_taps += [tmp];
- else:
- new_json["breadth"] = float(pressure_data["breadth"] * ms)
- new_json["depth"] = float(pressure_data["depth"] * ms)
- new_json["height"] = float(pressure_data["height"] * ms)
- new_taps = []
- for taps in pressure_data["tapLocations"]:
- if taps["id"] in selected_taps:
- tmp = {}
- tmp["id"] = int(taps["id"])
- tmp["xLoc"] = float(taps["xLoc"]) * ms
- tmp["yLoc"] = float(taps["yLoc"]) * ms
- tmp["face"] = int(taps["face"])
- new_taps += [tmp];
+ new_json['breadth'] = float(pressure_data['breadth'] * ms)
+ new_json['depth'] = float(pressure_data['depth'] * ms)
+ new_json['height'] = float(pressure_data['height'] * ms)
+ new_taps = []
+ for taps in pressure_data['tapLocations']:
+ if taps['id'] in selected_taps:
+ tmp = {}
+ tmp['id'] = int(taps['id'])
+ tmp['xLoc'] = float(taps['xLoc']) * ms
+ tmp['yLoc'] = float(taps['yLoc']) * ms
+ tmp['face'] = int(taps['face'])
+ new_taps += [tmp]
new_pressures = []
for i in range(len(selected_taps)):
tmp = {}
- tmp["id"] = int(selected_taps[i])
- tmp["data"] = Cp_nongauss[i, 0, :].tolist()
+ tmp['id'] = int(selected_taps[i])
+ tmp['data'] = Cp_nongauss[i, 0, :].tolist()
new_pressures += [tmp]
- new_json["pressureCoefficients"] = new_pressures
- new_json["tapLocations"] = new_taps
+ new_json['pressureCoefficients'] = new_pressures
+ new_json['tapLocations'] = new_taps
# some dummy values that will not be used in the analysis
- new_json["pitch"] = 0
- new_json["roofType"] = "flat"
- new_json["incidenceAngle"] = 0
+ new_json['pitch'] = 0
+ new_json['roofType'] = 'flat'
+ new_json['incidenceAngle'] = 0
#
# %% Plots for verification of code
#
- with open('tmpSimCenterLowRiseTPU.json', 'w', encoding='utf-8') as f:
+ with open('tmpSimCenterLowRiseTPU.json', 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(new_json, f)
# curScriptPath = abspath(getsourcefile(lambda:0))
- curScriptPath = os.path.realpath(__file__)
- creatEVENTDir = os.path.dirname(os.path.dirname(curScriptPath))
+ curScriptPath = os.path.realpath(__file__) # noqa: N806
+ creatEVENTDir = os.path.dirname(os.path.dirname(curScriptPath)) # noqa: PTH120, N806
- siteFile = os.path.join(creatEVENTDir, 'LowRiseTPU', 'LowRiseTPU')
+ siteFile = os.path.join(creatEVENTDir, 'LowRiseTPU', 'LowRiseTPU') # noqa: PTH118, N806
- command_line = f"{siteFile} \"--filenameAIM\" {aimName} \"--filenameEVENT\" {evtName}"
- print("Processing pressure->force:")
- print(command_line)
+ command_line = (
+ f'{siteFile} "--filenameAIM" {aimName} "--filenameEVENT" {evtName}'
+ )
+ print('Processing pressure->force:') # noqa: T201
+ print(command_line) # noqa: T201
# run command
try:
- os.system(command_line)
- except:
+ os.system(command_line) # noqa: S605
+ except: # noqa: E722
err_exit('Failed to convert pressure to force.')
# t_sc = ms*(Vref/V_H); #scale wind tunnel time series to compare
@@ -560,7 +627,7 @@ def main(aimName, evtName, getRV):
# #
# # Pressure coefficients (selected tap 10)
#
- '''
+ """
t_sc = ms*(Vref/V_H);
import matplotlib.pyplot as plt
@@ -589,36 +656,38 @@ def main(aimName, evtName, getRV):
# plt.ylim([-1400,2000])
# plt.xlim([0,1000])
# plt.show()
- '''
+ """ # noqa: W291
-def genCP(Cp_temp, Cp_sim_temp, nl, nu, my_cdf_vect, my_cdf_x_range):
+def genCP(Cp_temp, Cp_sim_temp, nl, nu, my_cdf_vect, my_cdf_x_range): # noqa: N802, N803, D103
#
# combining the loops to directly send temp instead of dist_kde
#
- # TODO; why double?
+ # TODO; why double? # noqa: TD002, TD004
- meanCp = np.mean(Cp_sim_temp)
- stdCp = np.std(Cp_sim_temp)
- F_vvv = (Cp_sim_temp - meanCp) / stdCp
+ meanCp = np.mean(Cp_sim_temp) # noqa: N806
+ stdCp = np.std(Cp_sim_temp) # noqa: N806
+ F_vvv = (Cp_sim_temp - meanCp) / stdCp # noqa: N806
- # cdf points from gaussian distribuition
+ # CDF points from Gaussian distribution
cdf_vvv = norm.cdf(F_vvv, 0, 1)
# force the data being bounded in due to numerical errors that can happen in Matlab when CDF ~0 or ~1;
- cdf_vvv[cdf_vvv < 0.00001] = 0.00001
- cdf_vvv[cdf_vvv > 0.99999] = 0.99999
+ cdf_vvv[cdf_vvv < 0.00001] = 0.00001 # noqa: PLR2004
+ cdf_vvv[cdf_vvv > 0.99999] = 0.99999 # noqa: PLR2004
# map F_vvv into F_nongauss through inverse cdf of the mix distribution
- # TODO why single precision for cdf_vv?
+ # TODO why single precision for cdf_vv? # noqa: TD002, TD004
return paretotails_icdf(cdf_vvv, nl, nu, Cp_temp, my_cdf_vect, my_cdf_x_range)
-def getCDF(Cp_temp):
+def getCDF(Cp_temp): # noqa: N802, N803, D103
kernel = gaussian_kde(Cp_temp)
kernel_cdf = np.vectorize(lambda x: kernel.integrate_box_1d(-np.inf, x))
- my_cdf_x = np.linspace(min(Cp_temp), max(Cp_temp), 1000) # TODO is 1000 enough?
+ my_cdf_x = np.linspace(
+ min(Cp_temp), max(Cp_temp), 1000
+ ) # TODO is 1000 enough? # noqa: TD002, TD004
my_cdf_vects = kernel_cdf(my_cdf_x) # Takes too long to evaluate
my_cdf_x_range = [min(Cp_temp), max(Cp_temp)]
@@ -626,7 +695,7 @@ def getCDF(Cp_temp):
return my_cdf_vects, my_cdf_x_range
-def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
+def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x): # noqa: D103
#
# Pareto percentile
#
@@ -646,7 +715,7 @@ def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
# lower pareto
#
idx1 = np.where(pf < nl)
- myX = -lower_temp
+ myX = -lower_temp # noqa: N806
c, loc, scal = genpareto.fit(myX, loc=np.min(myX))
mydist = genpareto(c=c, loc=loc, scale=scal)
@@ -659,9 +728,11 @@ def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
my_cdf_x = np.linspace(my_cdf_x[0], my_cdf_x[1], my_cdf_vect.shape[0])
idx2 = np.where((pf >= nl) * (pf < nu)) # not to have duplicates in x
- unique_val, unique_id = np.unique(my_cdf_vect, return_index=True)
+ unique_val, unique_id = np.unique(my_cdf_vect, return_index=True) # noqa: F841
- kernel_icdf = interpolate.interp1d(my_cdf_vect[unique_id], my_cdf_x[unique_id], kind='cubic', bounds_error=False)
+ kernel_icdf = interpolate.interp1d(
+ my_cdf_vect[unique_id], my_cdf_x[unique_id], kind='cubic', bounds_error=False
+ )
icdf_vals[idx2] = kernel_icdf(pf[idx2])
@@ -670,7 +741,7 @@ def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
#
idx3 = np.where(pf > nu)
- myX = upper_temp
+ myX = upper_temp # noqa: N806
c, loc, scal = genpareto.fit(myX, loc=np.min(myX))
mydist = genpareto(c=c, loc=loc, scale=scal)
@@ -678,7 +749,7 @@ def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
return icdf_vals
- '''
+ """
# for verification
c = 0.1
r = genpareto.rvs(c, size=1000)
@@ -707,41 +778,47 @@ def paretotails_icdf(pf, nl, nu, temp, my_cdf_vect, my_cdf_x):
plt.show()
plt.hist(-myX)
plt.show()
- '''
+ """
- return kernel, gpareto_param_lower, gpareto_param_upper
+ return kernel, gpareto_param_lower, gpareto_param_upper # noqa: F405
-def cpsd_matlab(Components1, Components2, wind_size, nover, nfft, fp):
+def cpsd_matlab(Components1, Components2, wind_size, nover, nfft, fp): # noqa: N803, D103
window = windows.hann(int(wind_size))
ncombs1 = Components1.shape[1]
ncombs2 = Components2.shape[1]
- nSampPoints = int(nfft / 2 + 1)
+ nSampPoints = int(nfft / 2 + 1) # noqa: N806
- if nfft < 2500:
- print("ERROR: time series is too short. Please put a longer duration")
- exit(-1)
+ if nfft < 2500: # noqa: PLR2004
+ print('ERROR: time series is too short. Please put a longer duration') # noqa: T201
+ exit(-1) # noqa: PLR1722
s_target = np.zeros((ncombs1, ncombs2, nSampPoints), dtype='complex_')
for nc2 in range(ncombs2):
for nc1 in range(ncombs1):
- [f_target, s_tmp] = csd(Components1[:, nc1], Components2[:, nc2], window=window, noverlap=nover, nfft=nfft,
- fs=fp)
+ [f_target, s_tmp] = csd(
+ Components1[:, nc1],
+ Components2[:, nc2],
+ window=window,
+ noverlap=nover,
+ nfft=nfft,
+ fs=fp,
+ )
s_target[nc1, nc2, :] = s_tmp # *4/np.pi
return s_target, f_target
-def perform_POD(s_target, f_target, ncomp, l_mo, pool):
- S_F = s_target[:, :, 0:] # do not exclude freq = 0 Hz
+def perform_POD(s_target, f_target, ncomp, l_mo, pool): # noqa: N802, D103
+ S_F = s_target[:, :, 0:] # do not exclude freq = 0 Hz # noqa: N806
f_full = f_target[0:] # do not exclude freq = 0 Hz
- SpeN = f_full.shape[0] # exclude freq = 0 Hz
+ SpeN = f_full.shape[0] # exclude freq = 0 Hz # noqa: N806
- Vs = np.zeros((ncomp, ncomp, SpeN), dtype='complex_')
- Ds = np.zeros((ncomp, ncomp, SpeN))
+ Vs = np.zeros((ncomp, ncomp, SpeN), dtype='complex_') # noqa: N806
+ Ds = np.zeros((ncomp, ncomp, SpeN)) # noqa: N806
#
# eigenvalue analysis in parallel
@@ -750,44 +827,62 @@ def perform_POD(s_target, f_target, ncomp, l_mo, pool):
try:
result_objs = list(pool.starmap(np.linalg.eig, iterables))
except MemoryError:
- err_exit("Low memory performing POD")
+ err_exit('Low memory performing POD')
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
- self.pool.shutdown()
- except Exception:
- sys.exit()
+ self.pool.shutdown() # noqa: F405
+ except Exception: # noqa: BLE001
+ sys.exit() # noqa: F405
for ii in range(SpeN):
- D_all = result_objs[ii][0]
- V_all = result_objs[ii][1]
+ D_all = result_objs[ii][0] # noqa: N806
+ V_all = result_objs[ii][1] # noqa: N806
ind = np.argsort(D_all)
Ds[:, :, ii] = np.real(np.diag(D_all[ind]))
Vs[:, :, ii] = V_all[:, ind]
# Truncation
- V = np.zeros((ncomp, l_mo, SpeN), dtype='complex_')
- D0 = np.zeros((l_mo, l_mo, SpeN))
+ V = np.zeros((ncomp, l_mo, SpeN), dtype='complex_') # noqa: N806
+ D0 = np.zeros((l_mo, l_mo, SpeN)) # noqa: N806
for tt in range(l_mo):
V[:, tt, :] = Vs[:, ncomp - 1 - tt, :]
D0[tt, tt, :] = Ds[ncomp - 1 - tt, ncomp - 1 - tt, :]
- D1 = np.zeros((l_mo, 1, SpeN))
+ D1 = np.zeros((l_mo, 1, SpeN)) # noqa: N806
for ii in range(SpeN):
D1[:, 0, ii] = np.diag(D0[:, :, ii])
return V, D1, SpeN
-def simulation_gaussian(ncomp, N_t, V_vH, D_vH, theta_vH, nf_dir, N_f, f_inc, f, l_mo, tvec, SpeN, V_H, vRef, seed,
- seed_num):
+def simulation_gaussian( # noqa: D103, PLR0913, PLR0917
+ ncomp,
+ N_t, # noqa: N803
+ V_vH, # noqa: N803
+ D_vH, # noqa: N803
+ theta_vH, # noqa: N803
+ nf_dir,
+ N_f, # noqa: N803
+ f_inc,
+ f,
+ l_mo,
+ tvec,
+ SpeN, # noqa: ARG001, N803
+ V_H, # noqa: N803
+ vRef, # noqa: N803
+ seed,
+ seed_num,
+):
#
# Set Seed
#
- folderName = os.path.basename(os.getcwd()) # Lets get n from workdir.n and add this to the seed
- sampNum = folderName.split(".")[-1]
+ folderName = os.path.basename( # noqa: PTH119, N806
+ os.getcwd() # noqa: PTH109
+ ) # Lets get n from workdir.n and add this to the seed
+ sampNum = folderName.split('.')[-1] # noqa: N806
if not sampNum.isnumeric():
np.random.seed(seed[seed_num])
@@ -798,49 +893,57 @@ def simulation_gaussian(ncomp, N_t, V_vH, D_vH, theta_vH, nf_dir, N_f, f_inc, f,
# Start the loop
#
- F_jzm = np.zeros((ncomp, N_t)) # force coefficients initialize matrix
+ # force coefficients initialize matrix
+ F_jzm = np.zeros((ncomp, N_t)) # noqa: N806
f_tmp = np.linspace(0, (N_f - 1) * f_inc, N_f)
for m in range(l_mo):
mo = m # current mode #
- Vmo = V_vH[nf_dir, mo, :] # eigenvector for mode mo
+ Vmo = V_vH[nf_dir, mo, :] # eigenvector for mode mo # noqa: N806
# Dmo = D_vH[mo, 0,:] # eigenvalue for mode mo
- Dmo = D_vH[mo, 0, :] + 1j * 0 # To avoid nan when calculating VDmo
+ # To avoid nan when calculating VDmo
+ Dmo = D_vH[mo, 0, :] + 1j * 0 # noqa: N806
thetmo = theta_vH[nf_dir, mo, :] # theta for mode mo
- VDmo = np.sqrt((V_H / vRef) ** 3) * np.abs(Vmo) * (
- np.ones((ncomp, 1)) * np.sqrt(Dmo)) # product of eigenvector X
+ VDmo = ( # noqa: N806
+ np.sqrt((V_H / vRef) ** 3)
+ * np.abs(Vmo)
+ * (np.ones((ncomp, 1)) * np.sqrt(Dmo))
+ ) # product of eigenvector X
# Generate random phase angle for each frequency SpeN
varth = (2 * np.pi) * np.random.random(size=(1, N_f))
# Loop over floors
# g_jm = np.zeros((N_t, ncomp),dtype = 'complex_')
- F_jm = np.zeros((ncomp, N_t))
+ F_jm = np.zeros((ncomp, N_t)) # noqa: N806
coef = np.sqrt(2) * np.sqrt(f_inc) * np.exp(1j * varth)
coef2 = np.exp(1j * ((mo + 1) / l_mo * f_inc) * tvec)
- fVDmo = interp1d(f, VDmo, kind='linear', fill_value="extrapolate")
- fthetmo = interp1d(f, thetmo, kind='linear', fill_value="extrapolate")
- fV_interp = np.abs(fVDmo(f_tmp))
+ fVDmo = interp1d(f, VDmo, kind='linear', fill_value='extrapolate') # noqa: N806
+ fthetmo = interp1d(f, thetmo, kind='linear', fill_value='extrapolate')
+ fV_interp = np.abs(fVDmo(f_tmp)) # noqa: N806
fthet_interp = np.exp((1j) * (fthetmo(f_tmp)))
for j in range(ncomp):
# l denotes a particular freq. point
# m denotes a particular mode
# j denotes a particular floor
- fVDmo = interp1d(f, VDmo[j, :], kind='linear', fill_value="extrapolate")
- fthetmo = interp1d(f, thetmo[j, :], kind='linear', fill_value="extrapolate")
+ fVDmo = interp1d(f, VDmo[j, :], kind='linear', fill_value='extrapolate') # noqa: N806
+ fthetmo = interp1d(
+ f, thetmo[j, :], kind='linear', fill_value='extrapolate'
+ )
- B_jm = np.zeros((N_t,), dtype='complex_')
+ B_jm = np.zeros((N_t,), dtype='complex_') # noqa: N806
B_jm[0:N_f] = coef * fV_interp[j, :] * fthet_interp[j, :]
g_jm = np.fft.ifft(B_jm) * N_t
F_jm[j, :] = np.real(g_jm * coef2)
- # TODO it is hard to tell whether they are similar or not
- F_jzm = F_jzm + F_jm # sum up F from different modes (zero - mean)
+ # TODO it is hard to tell whether they are similar or not # noqa: TD002, TD004
+ # sum up F from different modes (zero - mean)
+ F_jzm = F_jzm + F_jm # noqa: N806, PLR6104
return F_jzm
@@ -849,32 +952,34 @@ def simulation_gaussian(ncomp, N_t, V_vH, D_vH, theta_vH, nf_dir, N_f, f_inc, f,
if __name__ == '__main__':
-
- inputArgs = sys.argv
+ inputArgs = sys.argv # noqa: N816, F405
# set filenames
- aimName = sys.argv[2]
- evtName = sys.argv[4]
+ aimName = sys.argv[2] # noqa: N816, F405
+ evtName = sys.argv[4] # noqa: N816, F405
- getRV = False;
- for myarg in sys.argv:
- if (myarg == "--getRV") or (myarg == "getRV"):
- getRV = True;
+ getRV = False # noqa: N816
+ for myarg in sys.argv: # noqa: F405
+ if (myarg == '--getRV') or (myarg == 'getRV'): # noqa: PLR1714
+ getRV = True # noqa: N816
if error_tag and getRV:
err_exit(
- "Failed to import module " + moduleName + " for wind load generator. Please check the python path in the preference")
+ 'Failed to import module '
+ + moduleName
+ + ' for wind load generator. Please check the python path in the preference'
+ )
# if getRV:
# aimName = aimName + ".sc"
try:
main(aimName, evtName, getRV)
- except Exception as err:
+ except Exception as err: # noqa: BLE001
import traceback
if getRV:
- err_exit(str(err) + "..." + str(traceback.format_exc()))
+ err_exit(str(err) + '...' + str(traceback.format_exc()))
else:
- err_exit(str(err) + "..." + str(traceback.format_exc()))
+ err_exit(str(err) + '...' + str(traceback.format_exc()))
diff --git a/modules/createEVENT/groundMotionIM/IntensityMeasureComputer.py b/modules/createEVENT/groundMotionIM/IntensityMeasureComputer.py
index eae589210..df764531c 100644
--- a/modules/createEVENT/groundMotionIM/IntensityMeasureComputer.py
+++ b/modules/createEVENT/groundMotionIM/IntensityMeasureComputer.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,28 +37,42 @@
# Kuanshi Zhong
#
-import argparse, json, sys, os, bisect
-import numpy as np
+import argparse
+import bisect
+import json
+import os
+import sys
from pathlib import Path
+
+import numpy as np
+import pandas as pd
from scipy.integrate import cumtrapz
from scipy.interpolate import interp1d
from scipy.stats.mstats import gmean
-import pandas as pd
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import *
-
-IM_TYPES = ['PeakGroundResponse','PseudoSpectrum','AriasIntensity','Duration','SpectralShape']
-IM_MAP = {'PeakGroundResponse': ['PGA', 'PGV', 'PGD'],
- 'PseudoSpectrum': ['PSA', 'PSV', 'PSD'],
- 'AriasIntensity': ['Ia'],
- 'Duration': ['DS575', 'DS595'],
- 'SpectralShape': ['SaRatio']}
-
-class IntensityMeasureComputer:
-
- def __init__(self, time_hist_dict=dict(), units=dict(), ampScaled=False):
+from simcenter_common import * # noqa: E402, F403
+
+IM_TYPES = [
+ 'PeakGroundResponse',
+ 'PseudoSpectrum',
+ 'AriasIntensity',
+ 'Duration',
+ 'SpectralShape',
+]
+IM_MAP = {
+ 'PeakGroundResponse': ['PGA', 'PGV', 'PGD'],
+ 'PseudoSpectrum': ['PSA', 'PSV', 'PSD'],
+ 'AriasIntensity': ['Ia'],
+ 'Duration': ['DS575', 'DS595'],
+ 'SpectralShape': ['SaRatio'],
+}
+
+
+class IntensityMeasureComputer: # noqa: D101
+ def __init__(self, time_hist_dict=dict(), units=dict(), ampScaled=False): # noqa: FBT002, B006, C408, ARG002, N803
self.time_hist_dict = time_hist_dict
self.units = units
self._define_constants()
@@ -69,44 +82,51 @@ def __init__(self, time_hist_dict=dict(), units=dict(), ampScaled=False):
from_acc_unit = units.get('acceleration')
else:
from_acc_unit = '{}/{}^2'.format(units['length'], units['time'])
- for cur_hist_name, cur_hist in self.time_hist_dict.items():
- cur_hist[2] = self.convert_accel_units(cur_hist[2], from_acc_unit).tolist()
+ for cur_hist_name, cur_hist in self.time_hist_dict.items(): # noqa: B007, PERF102
+ cur_hist[2] = self.convert_accel_units(
+ cur_hist[2], from_acc_unit
+ ).tolist()
# initialize intensity measure dict
self._init_intensity_measures()
def _define_constants(self):
-
- self.km_sec_square = ("km/sec/sec", "km/sec**2", "km/sec^2")
- self.m_sec_square = ("m/sec/sec", "m/sec**2", "m/sec^2")
- self.cm_sec_square = ("cm/sec/sec", "cm/sec**2", "cm/sec^2")
- self.mm_sec_square = ("mm/sec/sec", "mm/sec**2", "mm/sec^2")
- self.in_sec_square = ("inch/sec/sec", "inch/sec**2", "inch/sec^2","in/sec/sec", "in/sec**2", "in/sec^2")
- self.ft_sec_square = ("ft/sec/sec", "ft/sec**2", "ft/sec^2")
- self.mile_sec_square = ("mile/sec/sec", "mile/sec**2", "mile/sec^2")
+ self.km_sec_square = ('km/sec/sec', 'km/sec**2', 'km/sec^2')
+ self.m_sec_square = ('m/sec/sec', 'm/sec**2', 'm/sec^2')
+ self.cm_sec_square = ('cm/sec/sec', 'cm/sec**2', 'cm/sec^2')
+ self.mm_sec_square = ('mm/sec/sec', 'mm/sec**2', 'mm/sec^2')
+ self.in_sec_square = (
+ 'inch/sec/sec',
+ 'inch/sec**2',
+ 'inch/sec^2',
+ 'in/sec/sec',
+ 'in/sec**2',
+ 'in/sec^2',
+ )
+ self.ft_sec_square = ('ft/sec/sec', 'ft/sec**2', 'ft/sec^2')
+ self.mile_sec_square = ('mile/sec/sec', 'mile/sec**2', 'mile/sec^2')
self.g = 9.80665
self.inch = 0.0254
def _init_intensity_measures(self):
-
# response spectra
- self.periods = dict()
- self.disp_spectrum = dict()
- self.vel_spectrum = dict()
- self.acc_spectrum = dict()
- self.psv = dict()
- self.psa = dict()
+ self.periods = dict() # noqa: C408
+ self.disp_spectrum = dict() # noqa: C408
+ self.vel_spectrum = dict() # noqa: C408
+ self.acc_spectrum = dict() # noqa: C408
+ self.psv = dict() # noqa: C408
+ self.psa = dict() # noqa: C408
# peak ground responses
- self.pga = dict()
- self.pgv = dict()
- self.pgd = dict()
+ self.pga = dict() # noqa: C408
+ self.pgv = dict() # noqa: C408
+ self.pgd = dict() # noqa: C408
# arias intensity
- self.i_a = dict()
+ self.i_a = dict() # noqa: C408
# significant duration
- self.ds575 = dict()
- self.ds595 = dict()
+ self.ds575 = dict() # noqa: C408
+ self.ds595 = dict() # noqa: C408
# saratio
- self.saratio = dict()
+ self.saratio = dict() # noqa: C408
# all
self.intensity_measures = {
@@ -122,7 +142,7 @@ def _init_intensity_measures(self):
'Ia': self.i_a,
'DS575': self.ds575,
'DS595': self.ds595,
- 'SaRatio': self.saratio
+ 'SaRatio': self.saratio,
}
# internal units
@@ -139,12 +159,11 @@ def _init_intensity_measures(self):
'Ia': 'cmps',
'DS575': 'sec',
'DS595': 'sec',
- 'SaRatio': 'scalar'
+ 'SaRatio': 'scalar',
}
- def convert_accel_units(self, acceleration, from_, to_='cm/sec/sec'):
- """
- Converts acceleration from/to different units
- """
+
+ def convert_accel_units(self, acceleration, from_, to_='cm/sec/sec'): # noqa: C901, PLR0911, PLR0912
+ """Converts acceleration from/to different units""" # noqa: D400, D401
acceleration = np.asarray(acceleration)
if from_ == 'g':
if to_ == 'g':
@@ -181,7 +200,7 @@ def convert_accel_units(self, acceleration, from_, to_='cm/sec/sec'):
return acceleration * 1000.0 / (12.0 * self.inch)
if to_ in self.mile_sec_square:
return acceleration * 1000.0 / (5280.0 * 12.0 * self.inch)
-
+
elif from_ in self.m_sec_square:
if to_ == 'g':
return acceleration / self.g
@@ -199,7 +218,7 @@ def convert_accel_units(self, acceleration, from_, to_='cm/sec/sec'):
return acceleration / (12.0 * self.inch)
if to_ in self.mile_sec_square:
return acceleration / (5280.0 * 12.0 * self.inch)
-
+
elif from_ in self.cm_sec_square:
if to_ == 'g':
return acceleration / 100.0 / self.g
@@ -290,10 +309,9 @@ def convert_accel_units(self, acceleration, from_, to_='cm/sec/sec'):
if to_ in self.mile_sec_square:
return acceleration
- raise ValueError(f"Unrecognized unit {from_}")
-
- def compute_response_spectrum(self, periods=[], damping=0.05, im_units=dict()):
+ raise ValueError(f'Unrecognized unit {from_}') # noqa: DOC501, EM102, TRY003
+ def compute_response_spectrum(self, periods=[], damping=0.05, im_units=dict()): # noqa: B006, C408, D102
if len(im_units) == 0:
unit_factor_vspec = 1.0
unit_factor_aspec = 1.0
@@ -301,17 +319,29 @@ def compute_response_spectrum(self, periods=[], damping=0.05, im_units=dict()):
unit_factor_psv = 1.0
unit_factor_psd = 1.0
else:
- unit_factor_vspec = get_unit_factor(self.im_units.get('VelocitySpectrum','cmps'), im_units.get('VelocitySpectrum','cmps'))
- unit_factor_aspec = get_unit_factor(self.im_units.get('AccelerationSpectrum','g'), im_units.get('AccelerationSpectrum','g'))
- unit_factor_psa = get_unit_factor(self.im_units.get('PSA','g'), im_units.get('PSA','g'))
- unit_factor_psv = get_unit_factor(self.im_units.get('PSV','cmps'), im_units.get('PSV','cmps'))
- unit_factor_psd = get_unit_factor(self.im_units.get('PSD','cm'), im_units.get('PSD','cm'))
-
+ unit_factor_vspec = get_unit_factor(
+ self.im_units.get('VelocitySpectrum', 'cmps'),
+ im_units.get('VelocitySpectrum', 'cmps'),
+ )
+ unit_factor_aspec = get_unit_factor(
+ self.im_units.get('AccelerationSpectrum', 'g'),
+ im_units.get('AccelerationSpectrum', 'g'),
+ )
+ unit_factor_psa = get_unit_factor(
+ self.im_units.get('PSA', 'g'), im_units.get('PSA', 'g')
+ )
+ unit_factor_psv = get_unit_factor(
+ self.im_units.get('PSV', 'cmps'), im_units.get('PSV', 'cmps')
+ )
+ unit_factor_psd = get_unit_factor(
+ self.im_units.get('PSD', 'cm'), im_units.get('PSD', 'cm')
+ )
+
# note this function assumes acceleration in cm/sec/sec
# psa is in g, psv in cm/sec
- if len(periods)==0:
+ if len(periods) == 0:
return
- elif type(periods)==list:
+ elif type(periods) == list: # noqa: RET505, E721
periods = np.array(periods)
num_periods = len(periods)
@@ -321,122 +351,194 @@ def compute_response_spectrum(self, periods=[], damping=0.05, im_units=dict()):
num_steps = len(ground_acc)
# discritize
dt_disc = 0.005
- num_steps_disc = int(np.floor(num_steps*dt/dt_disc))
- f = interp1d([dt*x for x in range(num_steps)], ground_acc, bounds_error=False, fill_value=(ground_acc[0], ground_acc[-1]))
- tmp_time = [dt_disc*x for x in range(num_steps_disc)]
+ num_steps_disc = int(np.floor(num_steps * dt / dt_disc))
+ f = interp1d(
+ [dt * x for x in range(num_steps)],
+ ground_acc,
+ bounds_error=False,
+ fill_value=(ground_acc[0], ground_acc[-1]),
+ )
+ tmp_time = [dt_disc * x for x in range(num_steps_disc)]
ground_acc = f(tmp_time)
# circular frequency, damping, and stiffness terms
- omega = (2*np.pi)/periods
- cval = damping*2*omega
- kval = ((2*np.pi)/periods)**2
+ omega = (2 * np.pi) / periods
+ cval = damping * 2 * omega
+ kval = ((2 * np.pi) / periods) ** 2
# Newmark-Beta
accel = np.zeros([num_steps_disc, num_periods])
vel = np.zeros([num_steps_disc, num_periods])
disp = np.zeros([num_steps_disc, num_periods])
a_t = np.zeros([num_steps_disc, num_periods])
- accel[0, :] =(-ground_acc[0] - (cval * vel[0, :])) - (kval * disp[0, :])
+ accel[0, :] = (-ground_acc[0] - (cval * vel[0, :])) - (kval * disp[0, :])
for j in range(1, num_steps_disc):
- delta_acc = ground_acc[j]-ground_acc[j-1]
- delta_d2u = (-delta_acc-dt_disc*cval*accel[j-1,:]-dt_disc*kval*(vel[j-1,:]+0.5*dt_disc*accel[j-1,:]))/ \
- (1.0+0.5*dt_disc*cval+0.25*dt_disc**2*kval)
- delta_du = dt_disc*accel[j-1,:]+0.5*dt_disc*delta_d2u
- delta_u = dt_disc*vel[j-1,:]+0.5*dt_disc**2*accel[j-1,:]+0.25*dt_disc**2*delta_d2u
- accel[j,:] = delta_d2u+accel[j-1,:]
- vel[j,:] = delta_du+vel[j-1,:]
- disp[j,:] = delta_u+disp[j-1,:]
+ delta_acc = ground_acc[j] - ground_acc[j - 1]
+ delta_d2u = (
+ -delta_acc
+ - dt_disc * cval * accel[j - 1, :]
+ - dt_disc
+ * kval
+ * (vel[j - 1, :] + 0.5 * dt_disc * accel[j - 1, :])
+ ) / (1.0 + 0.5 * dt_disc * cval + 0.25 * dt_disc**2 * kval)
+ delta_du = dt_disc * accel[j - 1, :] + 0.5 * dt_disc * delta_d2u
+ delta_u = (
+ dt_disc * vel[j - 1, :]
+ + 0.5 * dt_disc**2 * accel[j - 1, :]
+ + 0.25 * dt_disc**2 * delta_d2u
+ )
+ accel[j, :] = delta_d2u + accel[j - 1, :]
+ vel[j, :] = delta_du + vel[j - 1, :]
+ disp[j, :] = delta_u + disp[j - 1, :]
a_t[j, :] = ground_acc[j] + accel[j, :]
# collect data
- self.disp_spectrum.update({cur_hist_name: np.ndarray.tolist(unit_factor_psd*np.max(np.fabs(disp), axis=0))})
- self.vel_spectrum.update({cur_hist_name: np.ndarray.tolist(unit_factor_vspec*np.max(np.fabs(vel), axis=0))})
- self.acc_spectrum.update({cur_hist_name: np.ndarray.tolist(unit_factor_aspec*np.max(np.fabs(a_t), axis=0)/100.0/self.g)})
- self.psv.update({cur_hist_name: np.ndarray.tolist(unit_factor_psv*omega*np.max(np.fabs(disp), axis=0))})
- self.psa.update({cur_hist_name: np.ndarray.tolist(unit_factor_psa*omega**2*np.max(np.fabs(disp), axis=0)/100.0/self.g)})
+ self.disp_spectrum.update(
+ {
+ cur_hist_name: np.ndarray.tolist(
+ unit_factor_psd * np.max(np.fabs(disp), axis=0)
+ )
+ }
+ )
+ self.vel_spectrum.update(
+ {
+ cur_hist_name: np.ndarray.tolist(
+ unit_factor_vspec * np.max(np.fabs(vel), axis=0)
+ )
+ }
+ )
+ self.acc_spectrum.update(
+ {
+ cur_hist_name: np.ndarray.tolist(
+ unit_factor_aspec
+ * np.max(np.fabs(a_t), axis=0)
+ / 100.0
+ / self.g
+ )
+ }
+ )
+ self.psv.update(
+ {
+ cur_hist_name: np.ndarray.tolist(
+ unit_factor_psv * omega * np.max(np.fabs(disp), axis=0)
+ )
+ }
+ )
+ self.psa.update(
+ {
+ cur_hist_name: np.ndarray.tolist(
+ unit_factor_psa
+ * omega**2
+ * np.max(np.fabs(disp), axis=0)
+ / 100.0
+ / self.g
+ )
+ }
+ )
self.periods.update({cur_hist_name: periods.tolist()})
- def compute_peak_ground_responses(self, im_units=dict()):
-
+ def compute_peak_ground_responses(self, im_units=dict()): # noqa: B006, C408, D102
if len(im_units) == 0:
unit_factor_pga = 1.0
unit_factor_pgv = 1.0
unit_factor_pgd = 1.0
else:
- unit_factor_pga = get_unit_factor(self.im_units.get('PGA','g'), im_units.get('PGA','g'))
- unit_factor_pgv = get_unit_factor(self.im_units.get('PGV','cmps'), im_units.get('PGV','cmps'))
- unit_factor_pgd = get_unit_factor(self.im_units.get('PGD','cm'), im_units.get('PGD','cm'))
+ unit_factor_pga = get_unit_factor(
+ self.im_units.get('PGA', 'g'), im_units.get('PGA', 'g')
+ )
+ unit_factor_pgv = get_unit_factor(
+ self.im_units.get('PGV', 'cmps'), im_units.get('PGV', 'cmps')
+ )
+ unit_factor_pgd = get_unit_factor(
+ self.im_units.get('PGD', 'cm'), im_units.get('PGD', 'cm')
+ )
# note this function assumes acceleration in cm/sec/sec
# pga is in g, pgv in cm/sec, pgd in cm
for cur_hist_name, cur_hist in self.time_hist_dict.items():
dt = cur_hist[1]
ground_acc = cur_hist[2]
- num_steps = len(ground_acc)
+ num_steps = len(ground_acc) # noqa: F841
# integral
- velocity = dt * cumtrapz(ground_acc, initial=0.)
- displacement = dt * cumtrapz(velocity, initial=0.)
+ velocity = dt * cumtrapz(ground_acc, initial=0.0)
+ displacement = dt * cumtrapz(velocity, initial=0.0)
# collect data
- self.pga.update({cur_hist_name: np.max(np.fabs(ground_acc))/self.g/100.0*unit_factor_pga})
- self.pgv.update({cur_hist_name: np.max(np.fabs(velocity))*unit_factor_pgv})
- self.pgd.update({cur_hist_name: np.max(np.fabs(displacement))*unit_factor_pgd})
-
- def compute_arias_intensity(self, im_units=dict()):
-
+ self.pga.update(
+ {
+ cur_hist_name: np.max(np.fabs(ground_acc))
+ / self.g
+ / 100.0
+ * unit_factor_pga
+ }
+ )
+ self.pgv.update(
+ {cur_hist_name: np.max(np.fabs(velocity)) * unit_factor_pgv}
+ )
+ self.pgd.update(
+ {cur_hist_name: np.max(np.fabs(displacement)) * unit_factor_pgd}
+ )
+
+ def compute_arias_intensity(self, im_units=dict()): # noqa: B006, C408, D102
if len(im_units) == 0:
unit_factor_ai = 1.0
unit_factor_ds575 = 1.0
unit_factor_ds595 = 1.0
else:
- unit_factor_ai = get_unit_factor(self.im_units.get('Ia'), im_units.get('Ia','cmps'))
- unit_factor_ds575 = get_unit_factor(self.im_units.get('DS575','sec'), im_units.get('DS575','sec'))
- unit_factor_ds595 = get_unit_factor(self.im_units.get('DS595','sec'), im_units.get('DS595','sec'))
-
+ unit_factor_ai = get_unit_factor(
+ self.im_units.get('Ia'), im_units.get('Ia', 'cmps')
+ )
+ unit_factor_ds575 = get_unit_factor(
+ self.im_units.get('DS575', 'sec'), im_units.get('DS575', 'sec')
+ )
+ unit_factor_ds595 = get_unit_factor(
+ self.im_units.get('DS595', 'sec'), im_units.get('DS595', 'sec')
+ )
+
# note this function assumes acceleration in cm/sec/sec and return Arias Intensity in m/sec
for cur_hist_name, cur_hist in self.time_hist_dict.items():
dt = cur_hist[1]
ground_acc = cur_hist[2]
- num_steps = len(ground_acc)
- tmp = [x**2/100/100 for x in ground_acc]
+ num_steps = len(ground_acc) # noqa: F841
+ tmp = [x**2 / 100 / 100 for x in ground_acc]
# integral
- I_A = np.pi / 2 / self.g * dt * cumtrapz(tmp, initial=0.)
+ I_A = np.pi / 2 / self.g * dt * cumtrapz(tmp, initial=0.0) # noqa: N806
# collect data
- self.i_a.update({cur_hist_name: np.max(np.fabs(I_A))*unit_factor_ai})
+ self.i_a.update({cur_hist_name: np.max(np.fabs(I_A)) * unit_factor_ai})
# compute significant duration
ds575, ds595 = self._compute_significant_duration(I_A, dt)
- self.ds575.update({cur_hist_name: ds575*unit_factor_ds575})
- self.ds595.update({cur_hist_name: ds595*unit_factor_ds595})
-
- def _compute_significant_duration(self, I_A, dt):
+ self.ds575.update({cur_hist_name: ds575 * unit_factor_ds575})
+ self.ds595.update({cur_hist_name: ds595 * unit_factor_ds595})
+ def _compute_significant_duration(self, I_A, dt): # noqa: N803, PLR6301
# note this function return duration in sec
ds575 = 0.0
ds595 = 0.0
# normalize
- I_A_n = I_A / np.max(I_A)
+ I_A_n = I_A / np.max(I_A) # noqa: N806
# find 5%, 75%, 95%
- id5 = next(x for x, val in enumerate(I_A_n) if val > 0.05)
- id75 = next(x for x, val in enumerate(I_A_n) if val > 0.75)
- id95 = next(x for x, val in enumerate(I_A_n) if val > 0.95)
+ id5 = next(x for x, val in enumerate(I_A_n) if val > 0.05) # noqa: PLR2004
+ id75 = next(x for x, val in enumerate(I_A_n) if val > 0.75) # noqa: PLR2004
+ id95 = next(x for x, val in enumerate(I_A_n) if val > 0.95) # noqa: PLR2004
# compute ds
- ds575 = dt*(id75-id5)
- ds595 = dt*(id95-id5)
+ ds575 = dt * (id75 - id5)
+ ds595 = dt * (id95 - id5)
# return
return ds575, ds595
- def compute_saratio(self, T1 = 1.0, Ta = 0.02, Tb = 3.0, im_units=dict()):
-
+ def compute_saratio(self, T1=1.0, Ta=0.02, Tb=3.0, im_units=dict()): # noqa: B006, C408, N803, D102
if len(self.psa) == 0:
return
if len(im_units) == 0:
unit_factor = 1.0
else:
- unit_factor = get_unit_factor(self.im_units.get('SaRatio'), im_units.get('SaRatio','scalar'))
+ unit_factor = get_unit_factor(
+ self.im_units.get('SaRatio'), im_units.get('SaRatio', 'scalar')
+ )
# period list for SaRatio calculations
- period_list = [0.01*x for x in range(1500)]
+ period_list = [0.01 * x for x in range(1500)]
period_list = [x for x in period_list if x <= Tb and x >= Ta]
- for cur_hist_name, cur_hist in self.time_hist_dict.items():
+ for cur_hist_name, cur_hist in self.time_hist_dict.items(): # noqa: B007, PERF102
cur_psa = self.psa.get(cur_hist_name, None)
cur_periods = self.periods.get(cur_hist_name, None)
if (cur_psa is None) or (cur_periods is None):
@@ -444,123 +546,162 @@ def compute_saratio(self, T1 = 1.0, Ta = 0.02, Tb = 3.0, im_units=dict()):
self.saratio.update({cur_hist_name: 0.0})
else:
f = interp1d(cur_periods, cur_psa)
- self.saratio.update({cur_hist_name: f(T1)/gmean(f(period_list))*unit_factor})
+ self.saratio.update(
+ {cur_hist_name: f(T1) / gmean(f(period_list)) * unit_factor}
+ )
-def load_records(event_file, ampScaled):
+def load_records(event_file, ampScaled): # noqa: N803, D103
event_data = event_file.get('Events', None)
if event_data is None:
- raise ValueError(f"IntensityMeasureComputer: 'Events' attribute is not found in EVENT.json")
- else:
+ raise ValueError( # noqa: TRY003
+ "IntensityMeasureComputer: 'Events' attribute is not found in EVENT.json" # noqa: EM101
+ )
+ else: # noqa: RET506
event_data = event_data[0]
-
+
# check type
- if (event_data.get('type', None) != 'Seismic') and (event_data.get('type', None) != 'timeHistory'):
- return dict()
-
+ if (event_data.get('type', None) != 'Seismic') and (
+ event_data.get('type', None) != 'timeHistory'
+ ):
+ return dict() # noqa: C408
+
# get time series attribute
time_series = event_data.get('timeSeries', None)
if time_series is None:
- return dict()
+ return dict() # noqa: C408
ts_names = [x['name'] for x in time_series]
-
+
# collect time series tags
pattern = event_data.get('pattern', None)
if pattern is None:
- raise ValueError(f"IntensityMeasureComputer: 'pattern' is not found in EVENT.json")
- dict_ts = dict()
+ raise ValueError( # noqa: TRY003
+ "IntensityMeasureComputer: 'pattern' is not found in EVENT.json" # noqa: EM101
+ )
+ dict_ts = dict() # noqa: C408
for cur_pat in pattern:
dict_ts.update({cur_pat['timeSeries']: [cur_pat['dof']]})
-
+
# get time series (currently only for horizontal directions)
for cur_ts in list(dict_ts.keys()):
try:
cur_id = ts_names.index(cur_ts)
- except:
- raise ValueError(f"IntensityMeasureComputer: {cur_ts} is not found in 'timeSeries' in EVENT.json")
+ except: # noqa: E722
+ raise ValueError( # noqa: B904, TRY003
+ f"IntensityMeasureComputer: {cur_ts} is not found in 'timeSeries' in EVENT.json" # noqa: EM102
+ )
# get amplitude scaling (if the record is raw, i.e., ampScaled is false)
if not ampScaled:
- scalingFactor = time_series[cur_id].get('factor',1.0)
+ scalingFactor = time_series[cur_id].get('factor', 1.0) # noqa: N806
else:
- scalingFactor = 1.0
+ scalingFactor = 1.0 # noqa: N806
# append that record
dict_ts[cur_ts].append(time_series[cur_id]['dT'])
- dict_ts[cur_ts].append([x*scalingFactor for x in time_series[cur_id]['data']])
+ dict_ts[cur_ts].append(
+ [x * scalingFactor for x in time_series[cur_id]['data']]
+ )
# return
return dict_ts
-def get_unit_factor(unit_in, unit_out):
-
+def get_unit_factor(unit_in, unit_out): # noqa: D103
# this function is geared to the unit names in SimCenterUnitsCombo in R2D.
unit_factor = 1.0
# unit types
unit_types = globals().get('unit_types')
f_out = 1
f_in = 1
- for cur_unit, name_list in unit_types.items():
+ for cur_unit, name_list in unit_types.items(): # noqa: B007, PERF102
if unit_out in name_list:
f_out = globals().get(unit_out)
if unit_in in name_list:
f_in = globals().get(unit_in)
- unit_factor = f_in/f_out
- return unit_factor
+ unit_factor = f_in / f_out
+ return unit_factor # noqa: RET504
-def main(AIM_file, EVENT_file, IM_file, unitScaled, ampScaled, geoMean):
+def main(AIM_file, EVENT_file, IM_file, unitScaled, ampScaled, geoMean): # noqa: C901, N803, D103
# load AIM file
try:
- with open(AIM_file, 'r', encoding='utf-8') as f:
- AIM_file = json.load(f)
- except:
- raise ValueError(f"IntensityMeasureComputer: cannot load AIM file {AIM_file}")
-
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
+ AIM_file = json.load(f) # noqa: N806
+ except: # noqa: E722
+ raise ValueError( # noqa: B904, TRY003
+ f'IntensityMeasureComputer: cannot load AIM file {AIM_file}' # noqa: EM102
+ )
+
# load EVENT file
try:
- with open(EVENT_file, 'r', encoding='utf-8') as f:
+ with open(EVENT_file, encoding='utf-8') as f: # noqa: PTH123
event_file = json.load(f)
- except:
- raise ValueError(f"IntensityMeasureComputer: cannot load EVENT file {EVENT_file}")
+ except: # noqa: E722
+ raise ValueError( # noqa: B904, TRY003
+ f'IntensityMeasureComputer: cannot load EVENT file {EVENT_file}' # noqa: EM102
+ )
# get periods
- AIM_event = AIM_file['Events']
- if type(AIM_event)==list:
- AIM_event = AIM_event[0]
- periods = AIM_event.get('SpectrumPeriod',[0.01,0.02,0.03,0.04,0.05,0.75,
- 0.1,0.2,0.3,0.4,0.5,0.75,1.0,
- 2.0,3.0,4.0,5.0,7.5,10.0])
+ AIM_event = AIM_file['Events'] # noqa: N806
+ if type(AIM_event) == list: # noqa: E721
+ AIM_event = AIM_event[0] # noqa: N806
+ periods = AIM_event.get(
+ 'SpectrumPeriod',
+ [
+ 0.01,
+ 0.02,
+ 0.03,
+ 0.04,
+ 0.05,
+ 0.75,
+ 0.1,
+ 0.2,
+ 0.3,
+ 0.4,
+ 0.5,
+ 0.75,
+ 1.0,
+ 2.0,
+ 3.0,
+ 4.0,
+ 5.0,
+ 7.5,
+ 10.0,
+ ],
+ )
# get units
if unitScaled:
# corresponding to records after SimCenterEvent.py
units = AIM_file['GeneralInformation'].get('units', None)
if units is None:
- raise ValueError(f"IntensityMeasureComputer: units is not found in {AIM_file}")
+ raise ValueError( # noqa: TRY003
+ f'IntensityMeasureComputer: units is not found in {AIM_file}' # noqa: EM102
+ )
else:
# corresponding to raw records (e.g., EE-UQ)
- units = {"acceleration": "g"}
+ units = {'acceleration': 'g'}
# get IM list (will be user-defined)
- im_types = [] # IM type
- im_units = dict()
- im_names = ['Periods'] # IM name
- AIM_im = AIM_file.get('IntensityMeasure', None)
+ im_types = [] # IM type
+ im_units = dict() # noqa: C408
+ im_names = ['Periods'] # IM name
+ AIM_im = AIM_file.get('IntensityMeasure', None) # noqa: N806
output_periods = []
process_geomean = False
if AIM_im is None:
# search it again under UQ/surrogateMethodInfo
- AIM_im = AIM_file['UQ']['surrogateMethodInfo'].get('IntensityMeasure',None)
- if geoMean:
- process_geomean = AIM_file['UQ']['surrogateMethodInfo'].get('useGeoMean',False)
-
- else:
+ AIM_im = AIM_file['UQ']['surrogateMethodInfo'].get('IntensityMeasure', None) # noqa: N806
if geoMean:
- process_geomean = AIM_file['IntensityMeasure'].get('useGeoMean', None)
+ process_geomean = AIM_file['UQ']['surrogateMethodInfo'].get(
+ 'useGeoMean', False
+ )
- if AIM_im is None or len(AIM_im)==0:
+ elif geoMean:
+ process_geomean = AIM_file['IntensityMeasure'].get('useGeoMean', None)
+
+ if AIM_im is None or len(AIM_im) == 0:
# no intensity measure calculation requested
return
- else:
+ else: # noqa: RET505
for cur_im in list(AIM_im.keys()):
for ref_type in IM_TYPES:
if cur_im in IM_MAP.get(ref_type):
@@ -569,37 +710,64 @@ def main(AIM_file, EVENT_file, IM_file, unitScaled, ampScaled, geoMean):
if ref_type not in im_types:
im_types.append(ref_type)
if cur_im.startswith('PS'):
- periods = AIM_im[cur_im].get('Periods',[0.01, 0.02, 0.03, 0.04, 0.05, 0.075, 0.1, 0.2, 0.3,
- 0.4, 0.5, 0.75, 1.0, 2.0, 3.0, 4.0, 5.0, 7.5, 10.0])
+ periods = AIM_im[cur_im].get(
+ 'Periods',
+ [
+ 0.01,
+ 0.02,
+ 0.03,
+ 0.04,
+ 0.05,
+ 0.075,
+ 0.1,
+ 0.2,
+ 0.3,
+ 0.4,
+ 0.5,
+ 0.75,
+ 1.0,
+ 2.0,
+ 3.0,
+ 4.0,
+ 5.0,
+ 7.5,
+ 10.0,
+ ],
+ )
output_periods = periods
- if cur_im=='SaRatio':
- tmp = AIM_im[cur_im].get('Periods',[0.02, 1.0, 3.0])
- Ta, Tb = [np.min(tmp), np.max(tmp)]
+ if cur_im == 'SaRatio':
+ tmp = AIM_im[cur_im].get('Periods', [0.02, 1.0, 3.0])
+ Ta, Tb = [np.min(tmp), np.max(tmp)] # noqa: N806
tmp.pop(tmp.index(Ta))
tmp.pop(tmp.index(Tb))
- T1 = tmp[0]
- periods = [Ta+0.01*(x-1) for x in range(int(np.ceil((Tb-Ta)/0.01))+3)]
+ T1 = tmp[0] # noqa: N806
+ periods = [
+ Ta + 0.01 * (x - 1)
+ for x in range(int(np.ceil((Tb - Ta) / 0.01)) + 3)
+ ]
break
- for Ti in output_periods:
+ for Ti in output_periods: # noqa: N806
if Ti not in periods:
- bisect.insort(periods,Ti)
+ bisect.insort(periods, Ti)
for cur_type in im_types:
if cur_type not in IM_TYPES:
# pop the non-supported IMs
- im_types.pop(cur_type)
-
+ im_types.pop(cur_type) # noqa: B909
+
# load records
dict_time_series = load_records(event_file, ampScaled)
# intensity measure computer
- im_computer = IntensityMeasureComputer(time_hist_dict=dict_time_series, units=units, ampScaled=ampScaled)
+ im_computer = IntensityMeasureComputer(
+ time_hist_dict=dict_time_series, units=units, ampScaled=ampScaled
+ )
# compute intensity measures
if 'PeakGroundResponse' in im_types:
im_computer.compute_peak_ground_responses(im_units=im_units)
if 'PseudoSpectrum' in im_types or 'SpectralShape' in im_types:
- im_computer.compute_response_spectrum(periods=periods,im_units=im_units)
+ im_computer.compute_response_spectrum(periods=periods, im_units=im_units)
if 'AriasIntensity' in im_types or 'Duration' in im_types:
im_computer.compute_arias_intensity(im_units=im_units)
if 'SpectralShape' in im_types:
@@ -612,60 +780,79 @@ def main(AIM_file, EVENT_file, IM_file, unitScaled, ampScaled, geoMean):
# save a IM.json
out_data = {'IntensityMeasure': im_computer.intensity_measures}
- with open(IM_file, 'w', encoding='utf-8') as f:
+ with open(IM_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(out_data, f, indent=2)
# save a csv file
- csv_dict = dict()
+ csv_dict = dict() # noqa: C408
colname = []
for cur_im in im_types:
- colname = colname+IM_MAP.get(cur_im, [])
+ colname = colname + IM_MAP.get(cur_im, []) # noqa: PLR6104
im_dict = im_computer.intensity_measures
- for cur_hist_name, cur_hist in dict_time_series.items():
- cur_colname = []
+ for cur_hist_name, cur_hist in dict_time_series.items(): # noqa: PLR1702
+ cur_colname = [] # noqa: F841
cur_dof = cur_hist[0]
cur_periods = im_dict['Periods'].get(cur_hist_name)
for cur_im in im_names:
if cur_im in IM_MAP.get('PseudoSpectrum'):
if len(output_periods) > 0:
- for Ti in output_periods:
- cur_im_T = '{}({}s)'.format(cur_im, Ti)
- tmp_key = '1-{}-0-{}'.format(cur_im_T,cur_dof)
- if len(cur_periods) > 1:
+ for Ti in output_periods: # noqa: N806
+ cur_im_T = f'{cur_im}({Ti}s)' # noqa: N806
+ tmp_key = f'1-{cur_im_T}-0-{cur_dof}'
+ if len(cur_periods) > 1:
# interp
- f = interp1d(cur_periods, im_dict.get(cur_im).get(cur_hist_name))
- if tmp_key in csv_dict.keys():
+ f = interp1d(
+ cur_periods, im_dict.get(cur_im).get(cur_hist_name)
+ )
+ if tmp_key in csv_dict:
csv_dict[tmp_key].append(f(Ti))
else:
csv_dict.update({tmp_key: [f(Ti)]})
+ elif tmp_key in csv_dict:
+ csv_dict[tmp_key].append(
+ im_dict.get(cur_im).get(cur_hist_name)[
+ cur_periods.index(Ti)
+ ]
+ )
else:
- if tmp_key in csv_dict.keys():
- csv_dict[tmp_key].append(im_dict.get(cur_im).get(cur_hist_name)[cur_periods.index(Ti)])
- else:
- csv_dict.update({tmp_key: [im_dict.get(cur_im).get(cur_hist_name)[cur_periods.index(Ti)]]})
+ csv_dict.update(
+ {
+ tmp_key: [
+ im_dict.get(cur_im).get(cur_hist_name)[
+ cur_periods.index(Ti)
+ ]
+ ]
+ }
+ )
elif cur_im == 'Periods':
pass
else:
- tmp_key = '1-{}-0-{}'.format(cur_im,cur_dof)
- if tmp_key in csv_dict.keys():
+ tmp_key = f'1-{cur_im}-0-{cur_dof}'
+ if tmp_key in csv_dict:
csv_dict[tmp_key].append(im_dict.get(cur_im).get(cur_hist_name))
else:
- csv_dict.update({tmp_key: [im_dict.get(cur_im).get(cur_hist_name)]})
+ csv_dict.update(
+ {tmp_key: [im_dict.get(cur_im).get(cur_hist_name)]}
+ )
if process_geomean:
geo_csv_dict = {}
get_count_dict = {}
for key, val in csv_dict.items():
- new_key_name = key.rpartition('-')[0] + "-1" #before the last dash
+ new_key_name = key.rpartition('-')[0] + '-1' # before the last dash
if new_key_name not in geo_csv_dict:
geo_csv_dict[new_key_name] = val
get_count_dict[new_key_name] = 1
else:
- geo_csv_dict[new_key_name] = [a*b for a,b in zip(geo_csv_dict[new_key_name],val)]
+ geo_csv_dict[new_key_name] = [
+ a * b for a, b in zip(geo_csv_dict[new_key_name], val)
+ ]
get_count_dict[new_key_name] += 1
- for key, val in geo_csv_dict.items():
- geo_csv_dict[key] = [a**(1/get_count_dict[key]) for a in geo_csv_dict[key]]
+ for key, val in geo_csv_dict.items(): # noqa: B007
+ geo_csv_dict[key] = [
+ a ** (1 / get_count_dict[key]) for a in geo_csv_dict[key]
+ ]
csv_dict = geo_csv_dict
@@ -673,35 +860,54 @@ def main(AIM_file, EVENT_file, IM_file, unitScaled, ampScaled, geoMean):
csv_df = pd.DataFrame.from_dict(csv_dict)
tmp_idx = IM_file.index('.')
if tmp_idx:
- filenameCSV = IM_file[:tmp_idx]+'.csv'
+ filenameCSV = IM_file[:tmp_idx] + '.csv' # noqa: N806
else:
- filenameCSV = IM_file+'.csv'
- csv_df.to_csv(filenameCSV,index=False)
+ filenameCSV = IM_file + '.csv' # noqa: N806
+ csv_df.to_csv(filenameCSV, index=False)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser(
- "Read SimCenterEvent EVENT.json files and "
- "compute intensity measures for ground motions time histories and "
- "append intensity measures to the EVENT.json",
- allow_abbrev = False
+ 'Read SimCenterEvent EVENT.json files and '
+ 'compute intensity measures for ground motions time histories and '
+ 'append intensity measures to the EVENT.json',
+ allow_abbrev=False,
)
# AIM file - getting units
- parser.add_argument('--filenameAIM', help = "Name of the AIM file")
+ parser.add_argument('--filenameAIM', help='Name of the AIM file')
# Event file - getting time histories
- parser.add_argument('--filenameEVENT', help = "Name of the EVENT file")
+ parser.add_argument('--filenameEVENT', help='Name of the EVENT file')
# IM file - getting time histories
- parser.add_argument('--filenameIM', help = "Name of the IM file")
+ parser.add_argument('--filenameIM', help='Name of the IM file')
# unit scaled tag
- parser.add_argument('--unitScaled', default=False, help = "Records have been scaled in units")
+ parser.add_argument(
+ '--unitScaled', default=False, help='Records have been scaled in units'
+ )
# amplitude scaled tag
- parser.add_argument('--ampScaled', default=False, help="Records have been scaled in amplitudes")
-
- parser.add_argument('--geoMeanVar',default=False, help="Consider using only geometric mean", nargs='?', const=True)
+ parser.add_argument(
+ '--ampScaled', default=False, help='Records have been scaled in amplitudes'
+ )
+
+ parser.add_argument(
+ '--geoMeanVar',
+ default=False,
+ help='Consider using only geometric mean',
+ nargs='?',
+ const=True,
+ )
# parse arguments
args = parser.parse_args()
# run and return
- sys.exit(main(args.filenameAIM, args.filenameEVENT, args.filenameIM, args.unitScaled, args.ampScaled, args.geoMeanVar))
+ sys.exit(
+ main(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.filenameIM,
+ args.unitScaled,
+ args.ampScaled,
+ args.geoMeanVar,
+ )
+ )
diff --git a/modules/createEVENT/hazardBasedEvent/HazardBasedEvent.py b/modules/createEVENT/hazardBasedEvent/HazardBasedEvent.py
index 6a268760c..1c5fa2199 100644
--- a/modules/createEVENT/hazardBasedEvent/HazardBasedEvent.py
+++ b/modules/createEVENT/hazardBasedEvent/HazardBasedEvent.py
@@ -1,182 +1,215 @@
-#This python script process the input and will use it to run SHA and ground motion selection
-#In addition to providing the event file
-from __future__ import print_function # (at top of module)
+# This python script process the input and will use it to run SHA and ground motion selection # noqa: CPY001, D100, INP001
+# In addition to providing the event file
+import glob
import json
import os
-import sys
-import subprocess
-import glob
import re
+import subprocess # noqa: S404
+import sys
-def computeScenario(gmConfig, location):
- scriptDir = os.path.dirname(os.path.realpath(__file__))
- eqHazardPath = "{}/GMU/EQHazard.jar".format(scriptDir)
- simulateIMPath = "{}/GMU/SimulateIM".format(scriptDir)
- selectRecordPath = "{}/GMU/SelectRecord".format(scriptDir)
- recordDatabasePath = "{}/GMU/NGAWest2-1000.csv".format(scriptDir)
-
- #Separate Selection Config
- selectionConfig = gmConfig["RecordSelection"]
- del gmConfig["RecordSelection"]
-
- gmConfig["Site"] = {}
- gmConfig["Site"]["Type"] = "SingleLocation"
- gmConfig["Site"]["Location"] = {}
- gmConfig["Site"]["Location"]["Latitude"] = location[0]
- gmConfig["Site"]["Location"]["Longitude"] = location[1]
-
- #Adding the required output
- gmConfig["IntensityMeasure"]["EnableJsonOutput"] = True
- with open("./HazardWorkDir/Hazard_Scenario.json", 'w', encoding='utf-8') as hazardFile:
- json.dump(gmConfig, hazardFile, indent=4)
-
- #Now we need to run the EQHazard Process
- hazardCommand = ["java", "-jar", eqHazardPath, "./HazardWorkDir/Hazard_Scenario.json", "./HazardWorkDir/Hazard_Output.json"]
- hazardResult = subprocess.call(hazardCommand)
-
- if(hazardResult != 0):
- sys.stderr.write("Hazard analysis failed!")
- return -1
- #Now we need to run the SimulateIM Process
- #First we create a simulation config
- simConfig = {"GroundMotions": {"File": "./HazardWorkDir/Hazard_Output.json"}, "NumSimulations": 1, "SpatialCorrelation": True}
+def computeScenario(gmConfig, location): # noqa: N802, N803, D103
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
+ eqHazardPath = f'{scriptDir}/GMU/EQHazard.jar' # noqa: N806
+ simulateIMPath = f'{scriptDir}/GMU/SimulateIM' # noqa: N806
+ selectRecordPath = f'{scriptDir}/GMU/SelectRecord' # noqa: N806
+ recordDatabasePath = f'{scriptDir}/GMU/NGAWest2-1000.csv' # noqa: N806
+
+ # Separate Selection Config
+ selectionConfig = gmConfig['RecordSelection'] # noqa: N806
+ del gmConfig['RecordSelection']
+
+ gmConfig['Site'] = {}
+ gmConfig['Site']['Type'] = 'SingleLocation'
+ gmConfig['Site']['Location'] = {}
+ gmConfig['Site']['Location']['Latitude'] = location[0]
+ gmConfig['Site']['Location']['Longitude'] = location[1]
+
+ # Adding the required output
+ gmConfig['IntensityMeasure']['EnableJsonOutput'] = True
+ with open( # noqa: PTH123
+ './HazardWorkDir/Hazard_Scenario.json', 'w', encoding='utf-8'
+ ) as hazardFile: # noqa: N806
+ json.dump(gmConfig, hazardFile, indent=4)
+
+ # Now we need to run the EQHazard Process
+ hazardCommand = [ # noqa: N806
+ 'java',
+ '-jar',
+ eqHazardPath,
+ './HazardWorkDir/Hazard_Scenario.json',
+ './HazardWorkDir/Hazard_Output.json',
+ ]
+ hazardResult = subprocess.call(hazardCommand) # noqa: S603, N806
+
+ if hazardResult != 0:
+ sys.stderr.write('Hazard analysis failed!')
+ return -1
- with open("./HazardWorkDir/Sim_Config.json", 'w', encoding='utf-8') as simConfigFile:
- json.dump(simConfig, simConfigFile, indent=4)
- simulateCommand = [simulateIMPath, "./HazardWorkDir/Sim_Config.json", "./HazardWorkDir/Hazard_Sim.json"]
- simResult = subprocess.call(simulateCommand)
+ # Now we need to run the SimulateIM Process
+ # First we create a simulation config
+ simConfig = { # noqa: N806
+ 'GroundMotions': {'File': './HazardWorkDir/Hazard_Output.json'},
+ 'NumSimulations': 1,
+ 'SpatialCorrelation': True,
+ }
- if(simResult != 0):
- sys.stderr.write("Intensity measure simulation failed!")
+ with open( # noqa: PTH123
+ './HazardWorkDir/Sim_Config.json', 'w', encoding='utf-8'
+ ) as simConfigFile: # noqa: N806
+ json.dump(simConfig, simConfigFile, indent=4)
+ simulateCommand = [ # noqa: N806
+ simulateIMPath,
+ './HazardWorkDir/Sim_Config.json',
+ './HazardWorkDir/Hazard_Sim.json',
+ ]
+ simResult = subprocess.call(simulateCommand) # noqa: S603, N806
+
+ if simResult != 0:
+ sys.stderr.write('Intensity measure simulation failed!')
return -2
-
- #Now we can run record selection
+
+ # Now we can run record selection
#
- selectionConfig["Target"]["File"] = "./HazardWorkDir/Hazard_Sim.json"
- selectionConfig["Database"]["File"] = recordDatabasePath
- with open("./HazardWorkDir/Selection_Config.json", 'w', encoding='utf-8') as selectionConfigFile:
- json.dump(selectionConfig, selectionConfigFile, indent=4)
- selectionCommand = [selectRecordPath, "./HazardWorkDir/Selection_Config.json", "./HazardWorkDir/Records_Selection.json"]
- simResult = subprocess.call(selectionCommand)
-
- if(simResult != 0):
- sys.stderr.write("Intensity measure simulation failed!")
+ selectionConfig['Target']['File'] = './HazardWorkDir/Hazard_Sim.json'
+ selectionConfig['Database']['File'] = recordDatabasePath
+ with open( # noqa: PTH123
+ './HazardWorkDir/Selection_Config.json', 'w', encoding='utf-8'
+ ) as selectionConfigFile: # noqa: N806
+ json.dump(selectionConfig, selectionConfigFile, indent=4)
+ selectionCommand = [ # noqa: N806
+ selectRecordPath,
+ './HazardWorkDir/Selection_Config.json',
+ './HazardWorkDir/Records_Selection.json',
+ ]
+ simResult = subprocess.call(selectionCommand) # noqa: S603, N806
+
+ if simResult != 0: # noqa: RET503
+ sys.stderr.write('Intensity measure simulation failed!')
return -2
-def readNGAWest2File(ngaW2FilePath, scaleFactor):
+def readNGAWest2File(ngaW2FilePath, scaleFactor): # noqa: N802, N803, D103
series = []
dt = 0.0
- with open(ngaW2FilePath, 'r') as recordFile:
- canRead = False #We need to process the header first
+ with open(ngaW2FilePath) as recordFile: # noqa: N806, PLW1514, PTH123
+ canRead = False # We need to process the header first # noqa: N806
for line in recordFile:
- if(canRead):
- series.extend([float(value) * scaleFactor * 9.81 for value in line.split()])
+ if canRead:
+ series.extend(
+ [float(value) * scaleFactor * 9.81 for value in line.split()]
+ )
- elif("NPTS=" in line):
- dt = float(re.match(r"NPTS=.+, DT=\s+([0-9\.]+)\s+SEC", line).group(1))
- canRead = True
-
+ elif 'NPTS=' in line:
+ dt = float(
+ re.match(r'NPTS=.+, DT=\s+([0-9\.]+)\s+SEC', line).group(1)
+ )
+ canRead = True # noqa: N806
return series, dt
-def createNGAWest2Event(rsn, scaleFactor, recordsFolder, eventFilePath):
- pattern = os.path.join(recordsFolder, "RSN") + str(rsn) +"_*.AT2"
- recordFiles = glob.glob(pattern)
- if(len(recordFiles) != 2):
- print('Error finding NGA West 2 files.\n'\
- 'Please download the files for record {} '\
- 'from NGA West 2 website and place them in the records folder ({})'\
- .format(rsn, recordsFolder))
- exit(-1)
+
+def createNGAWest2Event(rsn, scaleFactor, recordsFolder, eventFilePath): # noqa: N802, N803, D103
+ pattern = os.path.join(recordsFolder, 'RSN') + str(rsn) + '_*.AT2' # noqa: PTH118
+ recordFiles = glob.glob(pattern) # noqa: PTH207, N806
+ if len(recordFiles) != 2: # noqa: PLR2004
+ print( # noqa: T201
+ 'Error finding NGA West 2 files.\n'
+ f'Please download the files for record {rsn} '
+ f'from NGA West 2 website and place them in the records folder ({recordsFolder})'
+ )
+ exit(-1) # noqa: PLR1722
h1, dt1 = readNGAWest2File(recordFiles[0], scaleFactor)
h2, dt2 = readNGAWest2File(recordFiles[1], scaleFactor)
- patternH1 = {}
- patternH1["type"] = "UniformAcceleration"
- patternH1["timeSeries"] = "accel_X"
- patternH1["dof"] = 1
+ patternH1 = {} # noqa: N806
+ patternH1['type'] = 'UniformAcceleration'
+ patternH1['timeSeries'] = 'accel_X'
+ patternH1['dof'] = 1
- patternH2 = {}
- patternH2["type"] = "UniformAcceleration"
- patternH2["timeSeries"] = "accel_Y"
- patternH2["dof"] = 2
+ patternH2 = {} # noqa: N806
+ patternH2['type'] = 'UniformAcceleration'
+ patternH2['timeSeries'] = 'accel_Y'
+ patternH2['dof'] = 2
- seriesH1 = {}
- seriesH1["name"] = "accel_X"
- seriesH1["type"] = "Value"
- seriesH1["dT"] = dt1
- seriesH1["data"] = h1
+ seriesH1 = {} # noqa: N806
+ seriesH1['name'] = 'accel_X'
+ seriesH1['type'] = 'Value'
+ seriesH1['dT'] = dt1
+ seriesH1['data'] = h1
- seriesH2 = {}
- seriesH2["name"] = "accel_Y"
- seriesH2["type"] = "Value"
- seriesH2["dT"] = dt2
- seriesH2["data"] = h2
+ seriesH2 = {} # noqa: N806
+ seriesH2['name'] = 'accel_Y'
+ seriesH2['type'] = 'Value'
+ seriesH2['dT'] = dt2
+ seriesH2['data'] = h2
event = {}
- event["name"] = "NGAW2_" + str(rsn)
- event["type"] = "Seismic"
- event["description"] = "NGA West 2 record " + str(rsn) + " scaled by a factor of " + str(scaleFactor)
- event["dT"] = dt1
- event["numSteps"] = len(h1)
- event["timeSeries"] = [seriesH1, seriesH2]
- event["pattern"] = [patternH1, patternH2]
- event["units"] = {
- "length": "m",
- "time": "sec"
- }
-
- eventsDict = {}
- eventsDict["Events"] = [event]
- eventsDict["RandomVariables"] = []
-
- with open(eventFilePath, 'w', encoding='utf-8') as eventFile:
- json.dump(eventsDict, eventFile, indent=4)
-
-
-def main():
- inputArgs = sys.argv
-
- #Process only if --getRV is passed
- if not "--getRV" in inputArgs:
+ event['name'] = 'NGAW2_' + str(rsn)
+ event['type'] = 'Seismic'
+ event['description'] = (
+ 'NGA West 2 record '
+ + str(rsn)
+ + ' scaled by a factor of '
+ + str(scaleFactor)
+ )
+ event['dT'] = dt1
+ event['numSteps'] = len(h1)
+ event['timeSeries'] = [seriesH1, seriesH2]
+ event['pattern'] = [patternH1, patternH2]
+ event['units'] = {'length': 'm', 'time': 'sec'}
+
+ eventsDict = {} # noqa: N806
+ eventsDict['Events'] = [event]
+ eventsDict['RandomVariables'] = []
+
+ with open(eventFilePath, 'w', encoding='utf-8') as eventFile: # noqa: PTH123, N806
+ json.dump(eventsDict, eventFile, indent=4)
+
+
+def main(): # noqa: D103
+ inputArgs = sys.argv # noqa: N806
+
+ # Process only if --getRV is passed
+ if '--getRV' not in inputArgs:
sys.exit(0)
- #First let's process the arguments
- argBIM = inputArgs.index("--filenameAIM") + 1
- bimFilePath = inputArgs[argBIM]
- argEVENT = inputArgs.index("--filenameEVENT") + 1
- eventFilePath = inputArgs[argEVENT]
+ # First let's process the arguments
+ argBIM = inputArgs.index('--filenameAIM') + 1 # noqa: N806
+ bimFilePath = inputArgs[argBIM] # noqa: N806
+ argEVENT = inputArgs.index('--filenameEVENT') + 1 # noqa: N806
+ eventFilePath = inputArgs[argEVENT] # noqa: N806
- #Ensure a hazard cache folder exist
- if not os.path.exists("./HazardWorkDir"):
- os.mkdir("./HazardWorkDir")
+ # Ensure a hazard cache folder exist
+ if not os.path.exists('./HazardWorkDir'): # noqa: PTH110
+ os.mkdir('./HazardWorkDir') # noqa: PTH102
- with open(bimFilePath, 'r', encoding='utf-8') as bimFile:
+ with open(bimFilePath, encoding='utf-8') as bimFile: # noqa: PTH123, N806
bim = json.load(bimFile)
- location = [bim["GeneralInformation"]["location"]["latitude"], bim["GeneralInformation"]["location"]["longitude"]]
-
- scriptDir = os.path.dirname(os.path.realpath(__file__))
- recordsFolder = "{}/GMU/NGAWest2Records".format(scriptDir)
+ location = [
+ bim['GeneralInformation']['location']['latitude'],
+ bim['GeneralInformation']['location']['longitude'],
+ ]
+
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
+ recordsFolder = f'{scriptDir}/GMU/NGAWest2Records' # noqa: N806
- computeScenario(bim["Events"][0]["GroundMotion"], location)
+ computeScenario(bim['Events'][0]['GroundMotion'], location)
-
- #We need to read the building location
-
+ # We need to read the building location
- #Now we can start processing the event
- with open("./HazardWorkDir/Records_Selection.json", 'r') as selectionFile:
- recordSelection = json.load(selectionFile)
+ # Now we can start processing the event
+ with open('./HazardWorkDir/Records_Selection.json') as selectionFile: # noqa: N806, PLW1514, PTH123
+ recordSelection = json.load(selectionFile) # noqa: N806
+
+ selectedRecord = recordSelection['GroundMotions'][0] # noqa: N806
+ rsn = selectedRecord['Record']['Id']
+ scaleFactor = selectedRecord['ScaleFactor'] # noqa: N806
- selectedRecord = recordSelection["GroundMotions"][0]
- rsn = selectedRecord["Record"]["Id"]
- scaleFactor = selectedRecord["ScaleFactor"]
-
createNGAWest2Event(rsn, scaleFactor, recordsFolder, eventFilePath)
-if __name__== "__main__":
- main()
\ No newline at end of file
+
+if __name__ == '__main__':
+ main()
diff --git a/modules/createEVENT/multiplePEER/MultiplePEER_Events.cpp b/modules/createEVENT/multiplePEER/MultiplePEER_Events.cpp
index ed8509b14..fbe69c22a 100644
--- a/modules/createEVENT/multiplePEER/MultiplePEER_Events.cpp
+++ b/modules/createEVENT/multiplePEER/MultiplePEER_Events.cpp
@@ -342,7 +342,7 @@ int createSimCenterEvent(json_t *peerEvent) {
json_t *peerRecord = 0;
int index;
- // loop over ths array of PEER events, creating a timeSeries and pattern for the event
+ // loop over this array of PEER events, creating a timeSeries and pattern for the event
double dT =0.0;
int numPoints =0;
json_array_foreach(recordsArray, index, peerRecord) {
diff --git a/modules/createEVENT/pointWindSpeed/parseHurricaneScenario.py b/modules/createEVENT/pointWindSpeed/parseHurricaneScenario.py
index 7f9287001..e35a1068d 100755
--- a/modules/createEVENT/pointWindSpeed/parseHurricaneScenario.py
+++ b/modules/createEVENT/pointWindSpeed/parseHurricaneScenario.py
@@ -1,58 +1,54 @@
-# python code to open the TPU .mat file
+# python code to open the TPU .mat file # noqa: CPY001, D100, EXE002, INP001
# and put data into a SimCenter JSON file for
# wind tunnel data
-import sys
import os
-import subprocess
-import json
-import stat
-import shutil
-import numpy as np
+import sys
+
import scipy.io as sio
-from pprint import pprint
-inputArgs = sys.argv
+inputArgs = sys.argv # noqa: N816
-print ("Number of arguments: %d" % len(sys.argv))
-print ("The arguments are: %s" %str(sys.argv))
+print('Number of arguments: %d' % len(sys.argv)) # noqa: T201
+print('The arguments are: %s' % str(sys.argv)) # noqa: T201, UP031
# set filenames
-matFileIN = sys.argv[1]
-jsonFileOUT = sys.argv[2]
-
-dataDir = os.getcwd()
-scriptDir = os.path.dirname(os.path.realpath(__file__))
-
-def parseMatFile(matFileIn, windFileOutName):
-
- file = open(windFileOutName,"w");
-
- mat_contents = sio.loadmat(matFileIn);
- print(mat_contents['wind'])
- windData = mat_contents['wind'][0][0];
- f = windData[0];
- lat = windData[1];
- long = windData[2];
- numLocations = lat.shape[0];
- print(lat.shape)
- file.write("{")
- file.write("\"wind\":[")
- for i in range(0, numLocations):
- locSpeed = f[i]
- locLat = lat[i]
- locLong = long[i]
-
- if (i == numLocations-1):
- file.write("{\"lat\":%f,\"long\":%f,\"windSpeed\":%d}]" % (locLat, locLong, locSpeed))
+matFileIN = sys.argv[1] # noqa: N816
+jsonFileOUT = sys.argv[2] # noqa: N816
+
+dataDir = os.getcwd() # noqa: PTH109, N816
+scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
+
+
+def parseMatFile(matFileIn, windFileOutName): # noqa: N802, N803, D103
+ file = open(windFileOutName, 'w') # noqa: PLW1514, PTH123, SIM115
+ mat_contents = sio.loadmat(matFileIn)
+ print(mat_contents['wind']) # noqa: T201
+ windData = mat_contents['wind'][0][0] # noqa: N806
+ f = windData[0]
+ lat = windData[1]
+ long = windData[2]
+ numLocations = lat.shape[0] # noqa: N806
+ print(lat.shape) # noqa: T201
+ file.write('{')
+ file.write('"wind":[')
+ for i in range(numLocations):
+ locSpeed = f[i] # noqa: N806
+ locLat = lat[i] # noqa: N806
+ locLong = long[i] # noqa: N806
+
+ if i == numLocations - 1:
+ file.write(
+ '{"lat":%f,"long":%f,"windSpeed":%d}]' % (locLat, locLong, locSpeed)
+ )
else:
- file.write("{\"lat\":%f,\"long\":%f,\"windSpeed\":%d}," % (locLat, locLong, locSpeed))
+ file.write(
+ '{"lat":%f,"long":%f,"windSpeed":%d},' % (locLat, locLong, locSpeed)
+ )
-
- file.write("}")
+ file.write('}')
file.close()
-if __name__ == '__main__':
- parseMatFile(matFileIN,jsonFileOUT)
-
+if __name__ == '__main__':
+ parseMatFile(matFileIN, jsonFileOUT)
diff --git a/modules/createEVENT/siteResponse/Gauss1D.py b/modules/createEVENT/siteResponse/Gauss1D.py
index 3561c79eb..b4ab531eb 100644
--- a/modules/createEVENT/siteResponse/Gauss1D.py
+++ b/modules/createEVENT/siteResponse/Gauss1D.py
@@ -1,10 +1,10 @@
-from cmath import pi, exp, sqrt
+from cmath import exp, pi, sqrt # noqa: CPY001, D100, INP001
+
import numpy as np
-import sys
-class gauss1D:
- def __init__(self, Ly, Ny, sigma = 1.0, d = 1.0):
+class gauss1D: # noqa: D101
+ def __init__(self, Ly, Ny, sigma=1.0, d=1.0): # noqa: N803
# overall length in x-direction
self.Lx = 1
# overall length in y-direction
@@ -31,7 +31,7 @@ def __init__(self, Ly, Ny, sigma = 1.0, d = 1.0):
self.kxu = self.Nx * self.dkx
self.kyu = self.Ny * self.dky
- def calculate(self):
+ def calculate(self): # noqa: D102
# matrix of random phase angles
phi = 2 * pi * np.random.rand(self.Mx, self.My)
psi = 2 * pi * np.random.rand(self.Mx, self.My)
@@ -40,32 +40,42 @@ def calculate(self):
f2 = f4 = np.zeros(self.My, dtype=complex)
part1 = part2 = np.zeros(self.Mx)
- for pp in range(0, self.Mx):
+ for pp in range(self.Mx):
xp = pp * self.dx
- for qq in range(0, self.My):
+ for qq in range(self.My):
yq = qq * self.dy
- for kk in range(0, self.Mx):
+ for kk in range(self.Mx):
kxk = kk * self.dkx
f1[kk] = exp(1j * kxk * xp)
- for ll in range(0, self.My):
+ for ll in range(self.My):
kyl = ll * self.dky
- kappa = sqrt(kxk ** 2 + kyl ** 2)
- Sgg = self.sigma ** 2 * self.d ** 2 * exp(-self.d ** 2 *
- abs(kappa) ** 2 / 4.0) / 4.0 / pi
- Akl = sqrt(2 * Sgg * self.dkx * self.dky)
+ kappa = sqrt(kxk**2 + kyl**2)
+ Sgg = ( # noqa: N806
+ self.sigma**2
+ * self.d**2
+ * exp(-(self.d**2) * abs(kappa) ** 2 / 4.0)
+ / 4.0
+ / pi
+ )
+ Akl = sqrt(2 * Sgg * self.dkx * self.dky) # noqa: N806
f2[ll] = Akl * exp(1j * phi[kk, ll]) * exp(1j * kyl * yq)
f2sum = np.sum(f2)
part1[kk] = np.real(sqrt(2) * np.sum(f2sum * f1[kk]))
- for kk in range(0, self.Mx):
+ for kk in range(self.Mx):
kxk = kk * self.dkx
f3[kk] = exp(1j * kxk * xp)
- for ll in range(0, self.My):
+ for ll in range(self.My):
kyl = ll * self.dky
- kappa = sqrt(kxk ** 2 + kyl ** 2)
- Sgg = self.sigma ** 2 * self.d ** 2 * exp(-self.d ** 2 *
- abs(kappa) ** 2 / 4.0) / 4.0 / pi
- Akl = sqrt(2 * Sgg * self.dkx * self.dky)
+ kappa = sqrt(kxk**2 + kyl**2)
+ Sgg = ( # noqa: N806
+ self.sigma**2
+ * self.d**2
+ * exp(-(self.d**2) * abs(kappa) ** 2 / 4.0)
+ / 4.0
+ / pi
+ )
+ Akl = sqrt(2 * Sgg * self.dkx * self.dky) # noqa: N806
f4[ll] = Akl * exp(1j * psi[kk, ll]) * exp(-1j * kyl * yq)
f4sum = np.sum(f4)
part2[kk] = np.real(sqrt(2) * np.sum(f4sum * f3[kk]))
@@ -73,19 +83,16 @@ def calculate(self):
self.f[pp, qq] = part1.sum() + part2.sum()
-def printField(self):
- print(self.f)
+def printField(self): # noqa: N802, D103
+ print(self.f) # noqa: T201
+
-if __name__ == "__main__":
+if __name__ == '__main__':
Ly = 6.0
Ny = 6.0
sigma = 1.0
d = 1.0
a = gauss1D(Ly, Ny, sigma, d)
a.calculate()
- F = a.f.reshape((-1,1))
+ F = a.f.reshape((-1, 1))
Y = np.linspace(0, a.Ly, a.My)
-
-
-
-
diff --git a/modules/createEVENT/siteResponse/RegionalSiteResponse.py b/modules/createEVENT/siteResponse/RegionalSiteResponse.py
index 789ffa570..5a01d37e0 100644
--- a/modules/createEVENT/siteResponse/RegionalSiteResponse.py
+++ b/modules/createEVENT/siteResponse/RegionalSiteResponse.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,78 +37,71 @@
# Long Chen
-import os
-import sys
-import argparse, posixpath
+import argparse
import json
-import subprocess
+import os
+import posixpath
import shutil
-from scipy import integrate
-import numpy as np
+import subprocess # noqa: S404
+import sys
from math import pi
# import the common constants and methods
from pathlib import Path
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+
+import numpy as np
+from scipy import integrate
+
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import *
+from simcenter_common import * # noqa: E402, F403
-convert_EDP = {
- 'max_abs_acceleration': 'PGA'
-}
+convert_EDP = {'max_abs_acceleration': 'PGA'} # noqa: N816
-gravityG = 9.81 # m/s2
+gravityG = 9.81 # m/s2 # noqa: N816
# default element size before wave length check
-elementSize = 0.5 # m
+elementSize = 0.5 # m # noqa: N816
# site class B, m/s
-VsRock = 760
-plotFlag = False
+VsRock = 760
+plotFlag = False # noqa: N816
-def get_scale_factors(input_units, output_units):
- """
- Determine the scale factor to convert input event to internal event data
-
- """
+def get_scale_factors(input_units, output_units): # noqa: C901
+ """Determine the scale factor to convert input event to internal event data""" # noqa: D400
# special case: if the input unit is not specified then do not do any scaling
if input_units is None:
-
scale_factors = {'ALL': 1.0}
else:
-
# parse output units:
# if no length unit is specified, 'inch' is assumed
unit_length = output_units.get('length', 'inch')
f_length = globals().get(unit_length, None)
if f_length is None:
- raise ValueError(
- f"Specified length unit not recognized: {unit_length}")
+ raise ValueError(f'Specified length unit not recognized: {unit_length}') # noqa: DOC501, EM102, TRY003
# if no time unit is specified, 'sec' is assumed
unit_time = output_units.get('time', 'sec')
f_time = globals().get(unit_time, None)
if f_time is None:
- raise ValueError(
- f"Specified time unit not recognized: {unit_time}")
+ raise ValueError(f'Specified time unit not recognized: {unit_time}') # noqa: DOC501, EM102, TRY003
scale_factors = {}
for input_name, input_unit in input_units.items():
-
# exceptions
- if input_name in ['factor', ]:
+ if input_name == 'factor':
f_scale = 1.0
else:
-
# get the scale factor to standard units
f_in = globals().get(input_unit, None)
if f_in is None:
- raise ValueError(
- f"Input unit for event files not recognized: {input_unit}")
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Input unit for event files not recognized: {input_unit}' # noqa: EM102
+ )
unit_type = None
for base_unit_type, unit_set in globals()['unit_types'].items():
@@ -117,11 +109,11 @@ def get_scale_factors(input_units, output_units):
unit_type = base_unit_type
if unit_type is None:
- raise ValueError(f"Failed to identify unit type: {input_unit}")
+ raise ValueError(f'Failed to identify unit type: {input_unit}') # noqa: DOC501, EM102, TRY003
# the output unit depends on the unit type
if unit_type == 'acceleration':
- f_out = f_time ** 2.0 / f_length
+ f_out = f_time**2.0 / f_length
elif unit_type == 'speed':
f_out = f_time / f_length
@@ -130,7 +122,9 @@ def get_scale_factors(input_units, output_units):
f_out = 1.0 / f_length
else:
- raise ValueError(f"Unexpected unit type in workflow: {unit_type}")
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Unexpected unit type in workflow: {unit_type}' # noqa: EM102
+ )
# the scale factor is the product of input and output scaling
f_scale = f_in * f_out
@@ -139,10 +133,10 @@ def get_scale_factors(input_units, output_units):
return scale_factors
-def postProcess(evtName, input_units, f_scale_units):
+def postProcess(evtName, input_units, f_scale_units): # noqa: N802, N803, D103
# if f_scale_units is None
- if None in [input_units, f_scale_units]:
+ if None in [input_units, f_scale_units]: # noqa: PLR6201
f_scale = 1.0
else:
for cur_var in list(f_scale_units.keys()):
@@ -154,45 +148,37 @@ def postProcess(evtName, input_units, f_scale_units):
if unit_type == 'acceleration':
f_scale = f_scale_units.get(cur_var)
- acc = np.loadtxt("acceleration.out")
- #os.remove("acceleration.out") # remove acceleration file to save space
- #acc = np.loadtxt("out_tcl/acceleration.out")
- #shutil.rmtree("out_tcl") # remove output files to save space
+ acc = np.loadtxt('acceleration.out')
+ # os.remove("acceleration.out") # remove acceleration file to save space
+ # acc = np.loadtxt("out_tcl/acceleration.out")
+ # shutil.rmtree("out_tcl") # remove output files to save space
# KZ, 01/17/2022: I corrected the acc_surf from [:,-2] to [:,-3] (horizontal x direction)
- time = acc[:,0]
- #acc_surf = acc[:,-2] / 9.81
+ time = acc[:, 0]
+ # acc_surf = acc[:,-2] / 9.81
# KZ, 03/07/2022: removed the unit conversion here (did in createGM4BIM)
- acc_surf = acc[:,-3]
- dT = time[1] - time[0]
-
- timeSeries = dict(
- name = "accel_X",
- type = "Value",
- dT = dT,
- data = [x*f_scale for x in acc_surf.tolist()]
+ acc_surf = acc[:, -3]
+ dT = time[1] - time[0] # noqa: N806
+
+ timeSeries = dict( # noqa: C408, N806
+ name='accel_X',
+ type='Value',
+ dT=dT,
+ data=[x * f_scale for x in acc_surf.tolist()],
)
- patterns = dict(
- type = "UniformAcceleration",
- timeSeries = "accel_X",
- dof = 1
- )
+ patterns = dict(type='UniformAcceleration', timeSeries='accel_X', dof=1) # noqa: C408
# KZ, 01/17/2022: I added global y direction
# KZ, 03/07/2022: removed the unit conversion here (did in createGM4BIM)
- acc_surf_y = acc[:,-1]
- timeSeries_y = dict(
- name = "accel_Y",
- type = "Value",
- dT = dT,
- data = [y*f_scale for y in acc_surf_y.tolist()]
+ acc_surf_y = acc[:, -1]
+ timeSeries_y = dict( # noqa: C408, N806
+ name='accel_Y',
+ type='Value',
+ dT=dT,
+ data=[y * f_scale for y in acc_surf_y.tolist()],
)
- patterns_y = dict(
- type = "UniformAcceleration",
- timeSeries = "accel_Y",
- dof = 2
- )
+ patterns_y = dict(type='UniformAcceleration', timeSeries='accel_Y', dof=2) # noqa: C408
# KZ, 01/17/2022: I updated this section accordingly
"""
@@ -207,39 +193,46 @@ def postProcess(evtName, input_units, f_scale_units):
pattern = [patterns]
)
"""
- evts = dict(
- RandomVariables = [],
- name = "SiteResponseTool",
- type = "Seismic",
- description = "Surface acceleration",
- dT = dT,
- numSteps = len(acc_surf),
- timeSeries = [timeSeries, timeSeries_y],
- pattern = [patterns, patterns_y]
+ evts = dict( # noqa: C408
+ RandomVariables=[],
+ name='SiteResponseTool',
+ type='Seismic',
+ description='Surface acceleration',
+ dT=dT,
+ numSteps=len(acc_surf),
+ timeSeries=[timeSeries, timeSeries_y],
+ pattern=[patterns, patterns_y],
)
- dataToWrite = dict(Events = [evts])
+ dataToWrite = dict(Events=[evts]) # noqa: C408, N806
- with open(evtName, "w") as outfile:
+ with open(evtName, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(dataToWrite, outfile, indent=4)
- print("DONE postProcess")
-
+ print('DONE postProcess') # noqa: T201
+
return 0
-def run_opensees(BIM_file, EVENT_file, event_path, model_script, model_script_path, ndm, getRV):
-
- sys.path.insert(0, os.getcwd())
+def run_opensees( # noqa: D103
+ BIM_file, # noqa: N803
+ EVENT_file, # noqa: N803
+ event_path,
+ model_script,
+ model_script_path,
+ ndm,
+ getRV, # noqa: N803
+):
+ sys.path.insert(0, os.getcwd()) # noqa: PTH109
- print("**************** run_opensees ****************")
+ print('**************** run_opensees ****************') # noqa: T201
# load the model builder script
- with open(BIM_file, 'r') as f:
- BIM_in = json.load(f)
+ with open(BIM_file) as f: # noqa: PLW1514, PTH123
+ BIM_in = json.load(f) # noqa: N806
model_params = BIM_in['GeneralInformation']
- model_units = BIM_in['GeneralInformation']['units']
- location = BIM_in['GeneralInformation']['location']
+ model_units = BIM_in['GeneralInformation']['units'] # noqa: F841
+ location = BIM_in['GeneralInformation']['location'] # noqa: F841
# convert units if necessary
# KZ, 01/17/2022: Vs30 and DepthToRock are not subjected to the model_units for now...
@@ -260,17 +253,16 @@ def run_opensees(BIM_file, EVENT_file, event_path, model_script, model_script_pa
else:
get_records(BIM_file, EVENT_file, event_path)
# load the event file
- with open(EVENT_file, 'r') as f:
- EVENT_in_All = json.load(f)
- EVENT_in = EVENT_in_All['Events'][0]
+ with open(EVENT_file) as f: # noqa: PLW1514, PTH123
+ EVENT_in_All = json.load(f) # noqa: N806
+ EVENT_in = EVENT_in_All['Events'][0] # noqa: N806
event_list = EVENT_in['timeSeries']
- pattern_list = EVENT_in['pattern']
+ pattern_list = EVENT_in['pattern'] # noqa: F841
- fileNames = ['xInput', 'yInput']
+ fileNames = ['xInput', 'yInput'] # noqa: N806
# define the time series
for evt_i, event in enumerate(event_list):
-
acc = event['data']
vel = integrate.cumtrapz(acc, dx=event['dT']) * gravityG
vel = np.insert(vel, 0, 0.0)
@@ -283,15 +275,17 @@ def run_opensees(BIM_file, EVENT_file, event_path, model_script, model_script_pa
np.savetxt(fileNames[evt_i] + '.time', time)
# run the analysis
- shutil.copyfile(os.path.join(model_script_path, model_script), os.path.join(
- os.getcwd(), model_script))
+ shutil.copyfile(
+ os.path.join(model_script_path, model_script), # noqa: PTH118
+ os.path.join(os.getcwd(), model_script), # noqa: PTH109, PTH118
+ )
build_model(model_params, int(ndm) - 1)
- subprocess.Popen('OpenSees ' + model_script, shell=True).wait()
+ subprocess.Popen('OpenSees ' + model_script, shell=True).wait() # noqa: S602
# FMK
- # update Event file with acceleration recorded at surface
+ # update Event file with acceleration recorded at surface
# acc = np.loadtxt('accelerationElasAct.out')
# acc_surf_x = acc[:, -3] / gravityG
# EVENT_in_All['Events'][0]['timeSeries'][0]['data'] = acc_surf_x.tolist()
@@ -305,79 +299,78 @@ def run_opensees(BIM_file, EVENT_file, event_path, model_script, model_script_pa
# with open(EVENT_file, 'w') as f:
# json.dump(EVENT_in_All, f, indent=2)
- # KZ, 01/17/2022: get unit scaling factor (this is a temporary patch as we assume to have "g"
+ # KZ, 01/17/2022: get unit scaling factor (this is a temporary patch as we assume to have "g"
# as the output acceleration from the site response analysis- need to discuss with Frank/Pedro/Steve
# about this...)
# scale the input data to the event unit used internally
- input_units = {"AccelerationEvent": "g"}
+ input_units = {'AccelerationEvent': 'g'}
output_units = BIM_in.get('units', None)
f_scale_units = get_scale_factors(input_units, output_units)
-
- postProcess("fmkEVENT", input_units, f_scale_units)
-
-def get_records(BIM_file, EVENT_file, data_dir):
+ postProcess('fmkEVENT', input_units, f_scale_units)
+
- with open(BIM_file, 'r') as f:
+def get_records(BIM_file, EVENT_file, data_dir): # noqa: N803, D103
+ with open(BIM_file) as f: # noqa: PLW1514, PTH123
bim_file = json.load(f)
- with open(EVENT_file, 'r') as f:
+ with open(EVENT_file) as f: # noqa: PLW1514, PTH123
event_file = json.load(f)
event_id = event_file['Events'][0]['event_id']
# get the scale factor if a user specified it (KZ, 01/17/2022)
try:
- event_data = np.array(bim_file["Events"]["Events"]).T
+ event_data = np.array(bim_file['Events']['Events']).T
event_loc = np.where(event_data == event_id)[0][1]
f_scale_user = float(event_data.T[event_loc][1])
- except:
+ except: # noqa: E722
f_scale_user = 1.0
# FMK scale_factor = dict([(evt['fileName'], evt.get('factor',1.0)) for evt in bim_file["Events"]["Events"]])[event_id]
# KZ: multiply the scale_factor by f_scale_user
scale_factor = 1.0 * f_scale_user
- event_file['Events'][0].update(
- load_record(event_id, data_dir, scale_factor))
+ event_file['Events'][0].update(load_record(event_id, data_dir, scale_factor))
- with open(EVENT_file, 'w') as f:
+ with open(EVENT_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(event_file, f, indent=2)
-def write_RV(BIM_file, EVENT_file, data_dir):
+def write_RV(BIM_file, EVENT_file, data_dir): # noqa: N802, N803, D103
# Copied from SimCenterEvent, write name of motions
- with open(BIM_file, 'r') as f:
+ with open(BIM_file) as f: # noqa: PLW1514, PTH123
bim_data = json.load(f)
- event_file = {
- 'randomVariables': [],
- 'Events': []
- }
+ event_file = {'randomVariables': [], 'Events': []}
- #events = bim_data['Events']['Events']
+ # events = bim_data['Events']['Events']
events = bim_data['Events'][0]['Events']
if len(events) > 1:
- event_file['randomVariables'].append({
- 'distribution': 'discrete_design_set_string',
- 'name': 'eventID',
- 'value': 'RV.eventID',
- 'elements': []
- })
- event_file['Events'].append({
- #'type': 'Seismic',
- #'type': bim_data['Events']['type'],
- 'type': bim_data['Events'][0]['type'],
- 'event_id': 'RV.eventID',
- 'unitScaleFactor': 1.0,
- 'data_dir': data_dir
- })
-
- RV_elements = np.array(events).T[0].tolist()
- #RV_elements = []
- #for event in events:
+ event_file['randomVariables'].append(
+ {
+ 'distribution': 'discrete_design_set_string',
+ 'name': 'eventID',
+ 'value': 'RV.eventID',
+ 'elements': [],
+ }
+ )
+ event_file['Events'].append(
+ {
+ # 'type': 'Seismic',
+ # 'type': bim_data['Events']['type'],
+ 'type': bim_data['Events'][0]['type'],
+ 'event_id': 'RV.eventID',
+ 'unitScaleFactor': 1.0,
+ 'data_dir': data_dir,
+ }
+ )
+
+ RV_elements = np.array(events).T[0].tolist() # noqa: N806
+ # RV_elements = []
+ # for event in events:
# if event['EventClassification'] == 'Earthquake':
# RV_elements.append(event['fileName'])
# elif event['EventClassification'] == 'Hurricane':
@@ -387,92 +380,97 @@ def write_RV(BIM_file, EVENT_file, data_dir):
event_file['randomVariables'][0]['elements'] = RV_elements
else:
- event_file['Events'].append({
- #'type': bim_data['Events']['type'],
- 'type': bim_data['Events'][0]['type'],
- 'event_id': events[0]['fileName'],
- 'unitScaleFactor': 1.0,
- 'data_dir': str(data_dir)
- })
+ event_file['Events'].append(
+ {
+ # 'type': bim_data['Events']['type'],
+ 'type': bim_data['Events'][0]['type'],
+ 'event_id': events[0]['fileName'],
+ 'unitScaleFactor': 1.0,
+ 'data_dir': str(data_dir),
+ }
+ )
# if time histories are used, then load the first event
- #if bim_data['Events']['type'] == 'timeHistory':
+ # if bim_data['Events']['type'] == 'timeHistory':
if bim_data['Events'][0]['type'] == 'timeHistory':
- event_file['Events'][0].update(load_record(events[0][0],
- data_dir,
- empty=len(events) > 1))
+ event_file['Events'][0].update(
+ load_record(events[0][0], data_dir, empty=len(events) > 1)
+ )
- with open(EVENT_file, 'w') as f:
+ with open(EVENT_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(event_file, f, indent=2)
-
-def load_record(fileName, data_dir, scale_factor=1.0, empty=False):
+def load_record(fileName, data_dir, scale_factor=1.0, empty=False): # noqa: FBT002, N803, D103
# Copied from SimCenterEvent, write data of motions into Event
- fileName = fileName.split('x')[0]
+ fileName = fileName.split('x')[0] # noqa: N806
- with open(posixpath.join(data_dir,'{}.json'.format(fileName)), 'r') as f:
+ with open(posixpath.join(data_dir, f'{fileName}.json')) as f: # noqa: PLW1514, PTH123
event_data = json.load(f)
event_dic = {
'name': fileName,
- 'dT' : event_data['dT'],
+ 'dT': event_data['dT'],
'numSteps': len(event_data['data_x']),
'timeSeries': [],
- 'pattern': []
+ 'pattern': [],
}
if not empty:
- for i, (src_label, tar_label) in enumerate(zip(['data_x', 'data_y'],
- ['accel_X', 'accel_Y'])):
- if src_label in event_data.keys():
-
- event_dic['timeSeries'].append({
- 'name': tar_label,
- 'type': 'Value',
- 'dT': event_data['dT'],
- 'data': list(np.array(event_data[src_label])*scale_factor)
- })
- event_dic['pattern'].append({
- 'type': 'UniformAcceleration',
- 'timeSeries': tar_label,
- 'dof': i+1
- })
+ for i, (src_label, tar_label) in enumerate(
+ zip(['data_x', 'data_y'], ['accel_X', 'accel_Y'])
+ ):
+ if src_label in event_data.keys(): # noqa: SIM118
+ event_dic['timeSeries'].append(
+ {
+ 'name': tar_label,
+ 'type': 'Value',
+ 'dT': event_data['dT'],
+ 'data': list(np.array(event_data[src_label]) * scale_factor),
+ }
+ )
+ event_dic['pattern'].append(
+ {
+ 'type': 'UniformAcceleration',
+ 'timeSeries': tar_label,
+ 'dof': i + 1,
+ }
+ )
return event_dic
-def build_model(model_params, numEvt):
-
+def build_model(model_params, numEvt): # noqa: N803, D103
try:
- depthToRock = model_params['DepthToRock']
- except:
- depthToRock = 0
- Vs30 = model_params['Vs30']
+ depthToRock = model_params['DepthToRock'] # noqa: N806
+ except: # noqa: E722
+ depthToRock = 0 # noqa: N806
+ Vs30 = model_params['Vs30'] # noqa: N806
# Vs30 model
- thickness, Vs = SVM(Vs30, depthToRock, VsRock, elementSize)
+ thickness, Vs = SVM(Vs30, depthToRock, VsRock, elementSize) # noqa: N806
- numElems = len(Vs)
+ numElems = len(Vs) # noqa: N806
# Config model
- f = open('freefield_config.tcl', 'w')
+ f = open('freefield_config.tcl', 'w') # noqa: PLW1514, PTH123, SIM115
f.write('# site response configuration file\n')
- f.write('set soilThick {:.1f}\n'.format(thickness))
- f.write('set numLayers {:d}\n'.format(numElems))
+ f.write(f'set soilThick {thickness:.1f}\n')
+ f.write(f'set numLayers {numElems:d}\n')
f.write('# layer thickness - bottom to top\n')
- eleVsize = thickness/numElems
- travelTime = 0
+ eleVsize = thickness / numElems # noqa: N806
+ travelTime = 0 # noqa: N806
for ii in range(numElems):
- f.write('set layerThick({:d}) {:.2f}\n'.format(ii+1, eleVsize))
- f.write('set nElemY({:d}) 1\n'.format(ii+1))
- f.write('set sElemY({:d}) {:.3f}\n'.format(ii+1, eleVsize))
- travelTime += eleVsize / Vs[ii]
+ f.write(f'set layerThick({ii + 1:d}) {eleVsize:.2f}\n')
+ f.write(f'set nElemY({ii + 1:d}) 1\n')
+ f.write(f'set sElemY({ii + 1:d}) {eleVsize:.3f}\n')
+ travelTime += eleVsize / Vs[ii] # noqa: N806
- averageVs = thickness / travelTime # time averaged shear wave velocity
- naturalFrequency = averageVs / 4 / thickness # Vs/4H
+ # time averaged shear wave velocity
+ averageVs = thickness / travelTime # noqa: N806
+ naturalFrequency = averageVs / 4 / thickness # Vs/4H # noqa: N806
- f.write('set nElemT {:d}\n'.format(numElems))
+ f.write(f'set nElemT {numElems:d}\n')
f.write('# motion file (used if the input arguments do not include motion)\n')
f.write('set accFile xInput.acc\n')
f.write('set dispFile xInput.disp\n')
@@ -487,83 +485,102 @@ def build_model(model_params, numEvt):
else:
f.write('set numEvt 1\n')
- f.write('set rockVs {:.1f}\n'.format(VsRock))
- f.write('set omega1 {:.2f}\n'.format(2.0 * pi * naturalFrequency))
- f.write('set omega2 {:.2f}\n'.format(2.0 * pi * naturalFrequency * 5.0))
+ f.write(f'set rockVs {VsRock:.1f}\n')
+ f.write(f'set omega1 {2.0 * pi * naturalFrequency:.2f}\n')
+ f.write(f'set omega2 {2.0 * pi * naturalFrequency * 5.0:.2f}\n')
f.close()
# Create Material
- f = open('freefield_material.tcl', 'w')
+ f = open('freefield_material.tcl', 'w') # noqa: PLW1514, PTH123, SIM115
if model_params['Model'] in 'BA':
# Borja and Amies 1994 J2 model
- rhoSoil = model_params['Den']
+ rhoSoil = model_params['Den'] # noqa: N806
poisson = 0.3
sig_v = rhoSoil * gravityG * eleVsize * 0.5
for ii in range(numElems):
- f.write('set rho({:d}) {:.1f}\n'.format(ii+1, rhoSoil))
- shearG = rhoSoil * Vs[ii] * Vs[ii]
- bulkK = shearG * 2.0 * (1 + poisson) / 3.0 / (1.0 - 2.0 * poisson)
- f.write('set shearG({:d}) {:.2f}\n'.format(ii+1, shearG))
- f.write('set bulkK({:d}) {:.2f}\n'.format(ii+1, bulkK))
- f.write('set su({:d}) {:.2f}\n'.format(
- ii+1, model_params['Su_rat'] * sig_v))
- sig_v = sig_v + rhoSoil * gravityG * eleVsize
- f.write('set h({:d}) {:.2f}\n'.format(
- ii+1, shearG * model_params['h/G']))
- f.write('set m({:d}) {:.2f}\n'.format(ii+1, model_params['m']))
- f.write('set h0({:d}) {:.2f}\n'.format(ii+1, model_params['h0']))
- f.write('set chi({:d}) {:.2f}\n'.format(ii+1, model_params['chi']))
- f.write('set mat({:d}) "J2CyclicBoundingSurface {:d} $shearG({:d}) $bulkK({:d}) $su({:d}) $rho({:d}) $h({:d}) $m({:d}) $h0({:d}) $chi({:d}) 0.5"\n\n\n'.format(
- ii+1, ii+1, ii+1, ii+1, ii+1, ii+1, ii+1, ii+1, ii+1, ii+1))
+ f.write(f'set rho({ii + 1:d}) {rhoSoil:.1f}\n')
+ shearG = rhoSoil * Vs[ii] * Vs[ii] # noqa: N806
+ bulkK = shearG * 2.0 * (1 + poisson) / 3.0 / (1.0 - 2.0 * poisson) # noqa: N806
+ f.write(f'set shearG({ii + 1:d}) {shearG:.2f}\n')
+ f.write(f'set bulkK({ii + 1:d}) {bulkK:.2f}\n')
+ f.write(
+ 'set su({:d}) {:.2f}\n'.format(
+ ii + 1, model_params['Su_rat'] * sig_v
+ )
+ )
+ sig_v = sig_v + rhoSoil * gravityG * eleVsize # noqa: PLR6104
+ f.write(
+ 'set h({:d}) {:.2f}\n'.format(ii + 1, shearG * model_params['h/G'])
+ )
+ f.write('set m({:d}) {:.2f}\n'.format(ii + 1, model_params['m']))
+ f.write('set h0({:d}) {:.2f}\n'.format(ii + 1, model_params['h0']))
+ f.write('set chi({:d}) {:.2f}\n'.format(ii + 1, model_params['chi']))
+ f.write(
+ f'set mat({ii + 1:d}) "J2CyclicBoundingSurface {ii + 1:d} $shearG({ii + 1:d}) $bulkK({ii + 1:d}) $su({ii + 1:d}) $rho({ii + 1:d}) $h({ii + 1:d}) $m({ii + 1:d}) $h0({ii + 1:d}) $chi({ii + 1:d}) 0.5"\n\n\n'
+ )
elif model_params['Model'] in 'PIMY':
# PIMY model
- rhoSoil = model_params['Den']
+ rhoSoil = model_params['Den'] # noqa: N806
poisson = 0.3
sig_v = rhoSoil * gravityG * eleVsize * 0.5
for ii in range(numElems):
- f.write('set rho({:d}) {:.1f}\n'.format(numElems-ii, rhoSoil))
- shearG = rhoSoil * Vs[ii] * Vs[ii]
- bulkK = shearG * 2.0 * (1 + poisson) / 3.0 / (1.0 - 2.0 * poisson)
- f.write('set Vs({:d}) {:.2f}\n'.format(numElems-ii, Vs[ii]))
- f.write('set shearG({:d}) {:.2f}\n'.format(numElems-ii, shearG))
- f.write('set bulkK({:d}) {:.2f}\n'.format(numElems-ii, bulkK))
- f.write('set su({:d}) {:.2f}\n'.format(numElems-ii, model_params['Su_rat'] * sig_v))
- sig_v = sig_v + rhoSoil * gravityG * eleVsize
- f.write('set h({:d}) {:.2f}\n'.format(numElems-ii, shearG * model_params['h/G']))
- f.write('set m({:d}) {:.2f}\n'.format(numElems-ii, model_params['m']))
- f.write('set h0({:d}) {:.2f}\n'.format(numElems-ii, model_params['h0']))
- f.write('set chi({:d}) {:.2f}\n'.format(numElems-ii, model_params['chi']))
- f.write('set mat({:d}) "PressureIndependMultiYield {:d} 3 $rho({:d}) $shearG({:d}) $bulkK({:d}) $su({:d}) 0.1 0.0 2116.0 0.0 31"\n\n\n'.format(
- numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii, numElems-ii))
+ f.write(f'set rho({numElems - ii:d}) {rhoSoil:.1f}\n')
+ shearG = rhoSoil * Vs[ii] * Vs[ii] # noqa: N806
+ bulkK = shearG * 2.0 * (1 + poisson) / 3.0 / (1.0 - 2.0 * poisson) # noqa: N806
+ f.write(f'set Vs({numElems - ii:d}) {Vs[ii]:.2f}\n')
+ f.write(f'set shearG({numElems - ii:d}) {shearG:.2f}\n')
+ f.write(f'set bulkK({numElems - ii:d}) {bulkK:.2f}\n')
+ f.write(
+ 'set su({:d}) {:.2f}\n'.format(
+ numElems - ii, model_params['Su_rat'] * sig_v
+ )
+ )
+ sig_v = sig_v + rhoSoil * gravityG * eleVsize # noqa: PLR6104
+ f.write(
+ 'set h({:d}) {:.2f}\n'.format(
+ numElems - ii, shearG * model_params['h/G']
+ )
+ )
+ f.write('set m({:d}) {:.2f}\n'.format(numElems - ii, model_params['m']))
+ f.write(
+ 'set h0({:d}) {:.2f}\n'.format(numElems - ii, model_params['h0'])
+ )
+ f.write(
+ 'set chi({:d}) {:.2f}\n'.format(numElems - ii, model_params['chi'])
+ )
+ f.write(
+ f'set mat({numElems - ii:d}) "PressureIndependMultiYield {numElems - ii:d} 3 $rho({numElems - ii:d}) $shearG({numElems - ii:d}) $bulkK({numElems - ii:d}) $su({numElems - ii:d}) 0.1 0.0 2116.0 0.0 31"\n\n\n'
+ )
else:
- rhoSoil = model_params['Den']
+ rhoSoil = model_params['Den'] # noqa: N806
poisson = 0.3
for ii in range(numElems):
- f.write('set rho({:d}) {:.1f}\n'.format(ii+1, rhoSoil))
- f.write('set shearG({:d}) {:.2f}\n'.format(
- ii+1, rhoSoil * Vs[ii] * Vs[ii]))
- f.write('set nu({:d}) {:.2f}\n'.format(ii+1, poisson))
- f.write('set E({:d}) {:.2f}\n\n'.format(
- ii+1, 2 * rhoSoil * Vs[ii] * Vs[ii] * (1 + poisson)))
- f.write('set mat({:d}) "ElasticIsotropic {:d} $E({:d}) $nu({:d}) $rho({:d})"\n\n\n'.format(
- ii+1, ii+1, ii+1, ii+1, ii+1))
+ f.write(f'set rho({ii + 1:d}) {rhoSoil:.1f}\n')
+ f.write(f'set shearG({ii + 1:d}) {rhoSoil * Vs[ii] * Vs[ii]:.2f}\n')
+ f.write(f'set nu({ii + 1:d}) {poisson:.2f}\n')
+ f.write(
+ f'set E({ii + 1:d}) {2 * rhoSoil * Vs[ii] * Vs[ii] * (1 + poisson):.2f}\n\n'
+ )
+ f.write(
+ f'set mat({ii + 1:d}) "ElasticIsotropic {ii + 1:d} $E({ii + 1:d}) $nu({ii + 1:d}) $rho({ii + 1:d})"\n\n\n'
+ )
f.close()
-def SVM(Vs30, depthToRock, VsRock, elementSize):
+def SVM(Vs30, depthToRock, VsRock, elementSize): # noqa: N802, N803, D103
# Sediment Velocity Model (SVM)
# Developed by Jian Shi and Domniki Asimaki (2018)
# Generates a shear velocity profile from Vs30 for shallow crust profiles
# Valid for 173.1 m/s < Vs30 < 1000 m/s
# Check Vs30
- if Vs30 < 173.1 or Vs30 > 1000:
- print('Caution: Vs30 {} is not within the valid range of the SVM! \n'.format(Vs30))
+ if Vs30 < 173.1 or Vs30 > 1000: # noqa: PLR2004
+ print(f'Caution: Vs30 {Vs30} is not within the valid range of the SVM! \n') # noqa: T201
# Parameters specific to: California
- z_star = 2.5 # [m] depth considered to have constant Vs
+ z_star = 2.5 # [m] depth considered to have constant Vs
p1 = -2.1688e-4
p2 = 0.5182
p3 = 69.452
@@ -576,13 +593,13 @@ def SVM(Vs30, depthToRock, VsRock, elementSize):
s4 = -7.6187e-3
# SVM Parameters f(Vs30)
- Vs0 = p1 * (Vs30 ** 2) + p2 * Vs30 + p3
- k = np.exp(r1 * (Vs30 ** r2) + r3)
+ Vs0 = p1 * (Vs30**2) + p2 * Vs30 + p3 # noqa: N806
+ k = np.exp(r1 * (Vs30**r2) + r3)
n = s1 * np.exp(s2 * Vs30) + s3 * np.exp(s4 * Vs30)
# Check element size for max. frequency
- maxFrequency = 50 # Hz
- waveLength = Vs0 / maxFrequency
+ maxFrequency = 50 # Hz # noqa: N806
+ waveLength = Vs0 / maxFrequency # noqa: N806
# Need four elements per wavelength
if 4.0 * elementSize <= waveLength:
step_size = elementSize
@@ -590,11 +607,12 @@ def SVM(Vs30, depthToRock, VsRock, elementSize):
step_size = waveLength / 4.0
depth = max(30.0, depthToRock)
- z = np.linspace(0.0 + 0.5 * step_size, depth - 0.5 * step_size,
- int(depth / step_size)) # discretize depth to bedrock
+ z = np.linspace(
+ 0.0 + 0.5 * step_size, depth - 0.5 * step_size, int(depth / step_size)
+ ) # discretize depth to bedrock
# Vs Profile
- Vs = np.zeros(len(z))
+ Vs = np.zeros(len(z)) # noqa: N806
Vs[0] = Vs0
for ii in range(1, len(z)):
if z[ii] <= z_star:
@@ -604,22 +622,23 @@ def SVM(Vs30, depthToRock, VsRock, elementSize):
if depthToRock > 0:
thickness = depthToRock
- Vs_cropped = Vs[np.where(z <= depthToRock)]
+ Vs_cropped = Vs[np.where(z <= depthToRock)] # noqa: N806
else:
- Vs_cropped = Vs[np.where(Vs <= VsRock)]
+ Vs_cropped = Vs[np.where(Vs <= VsRock)] # noqa: N806
thickness = z[len(Vs_cropped) - 1] + 0.5 * step_size
if plotFlag:
- import matplotlib.pyplot as plt
+ import matplotlib.pyplot as plt # noqa: PLC0415
+
fig = plt.figure()
plt.plot(Vs, z, label='Vs profile')
- plt.plot(Vs_cropped, z[0: len(Vs_cropped)], label='Vs profile to bedrock')
- plt.grid(True)
+ plt.plot(Vs_cropped, z[0 : len(Vs_cropped)], label='Vs profile to bedrock')
+ plt.grid(True) # noqa: FBT003
ax = plt.gca()
ax.invert_yaxis()
plt.legend()
- plt.text(100, 12.5, 'Vs30 = {:.1f}m/s'.format(Vs30))
- plt.text(100, 17.5, 'Depth to bedrock = {:.1f}m'.format(depthToRock))
+ plt.text(100, 12.5, f'Vs30 = {Vs30:.1f}m/s')
+ plt.text(100, 17.5, f'Depth to bedrock = {depthToRock:.1f}m')
ax.set_xlabel('Vs (m/s)')
ax.set_ylabel('Depth (m)')
ax.set_xlim(left=0)
@@ -630,7 +649,6 @@ def SVM(Vs30, depthToRock, VsRock, elementSize):
if __name__ == '__main__':
-
# SVM(380, 0, 360, 0.5)
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM', default=None)
@@ -642,6 +660,14 @@ def SVM(Vs30, depthToRock, VsRock, elementSize):
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
- sys.exit(run_opensees(
- args.filenameAIM, args.filenameEVENT, args.pathEventData, args.mainScript,
- args.modelPath, args.ndm, args.getRV))
+ sys.exit(
+ run_opensees(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.pathEventData,
+ args.mainScript,
+ args.modelPath,
+ args.ndm,
+ args.getRV,
+ )
+ )
diff --git a/modules/createEVENT/siteResponse/SiteResponse.py b/modules/createEVENT/siteResponse/SiteResponse.py
index 1ee304e85..d542e3383 100644
--- a/modules/createEVENT/siteResponse/SiteResponse.py
+++ b/modules/createEVENT/siteResponse/SiteResponse.py
@@ -1,36 +1,40 @@
+import json # noqa: CPY001, D100, INP001
+import subprocess # noqa: S404
import sys
-import os
-import subprocess
-import json
+
from calibration import createMaterial
from postProcess import postProcess
-def main(args):
+def main(args): # noqa: D103
# set filenames
- srtName = args[1]
- evtName = args[3]
+ srtName = args[1] # noqa: N806
+ evtName = args[3] # noqa: N806
- RFflag = False
+ RFflag = False # noqa: N806
- with open(srtName, 'r', encoding='utf-8') as json_file:
+ with open(srtName, encoding='utf-8') as json_file: # noqa: PTH123
data = json.load(json_file)
- for material in data["Events"][0]["materials"]:
- if material["type"] == "PM4Sand_Random" or material["type"] == "PDMY03_Random" or material["type"] == "Elastic_Random":
- RFflag = True
+ for material in data['Events'][0]['materials']:
+ if (
+ material['type'] == 'PM4Sand_Random'
+ or material['type'] == 'PDMY03_Random'
+ or material['type'] == 'Elastic_Random'
+ ):
+ RFflag = True # noqa: N806
break
if RFflag:
- #create material file based on 1D Gaussian field
- soilData = data["Events"][0]
+ # create material file based on 1D Gaussian field
+ soilData = data['Events'][0] # noqa: N806
createMaterial(soilData)
- #Run OpenSees
- subprocess.Popen("OpenSees model.tcl", shell=True).wait()
+ # Run OpenSees
+ subprocess.Popen('OpenSees model.tcl', shell=True).wait() # noqa: S602, S607
- #Run postprocessor to create EVENT.json
+ # Run postprocessor to create EVENT.json
postProcess(evtName)
-if __name__ == '__main__':
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/createEVENT/siteResponse/calibration.py b/modules/createEVENT/siteResponse/calibration.py
index af9251ba2..1779b3adb 100644
--- a/modules/createEVENT/siteResponse/calibration.py
+++ b/modules/createEVENT/siteResponse/calibration.py
@@ -1,176 +1,373 @@
-from Gauss1D import gauss1D
-from scipy.interpolate import interp1d
+import json # noqa: CPY001, D100, INP001
+import sys
+
import numpy as np
import pandas as pd
-import json
-import sys
+from Gauss1D import gauss1D
+from scipy.interpolate import interp1d
+
+
+def materialPM4(baseInputs, matTag, fn): # noqa: N802, N803, D103
+ fn.write(
+ 'nDMaterial PM4Sand {} {:.3f} {:.2f} {:.3f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} \n'.format(
+ matTag,
+ baseInputs['Dr'],
+ baseInputs['Go'],
+ baseInputs['hpo'],
+ baseInputs['rho'],
+ baseInputs['P_atm'],
+ baseInputs['h0'],
+ baseInputs['emax'],
+ baseInputs['emin'],
+ baseInputs['nb'],
+ baseInputs['nd'],
+ baseInputs['Ado'],
+ baseInputs['z_max'],
+ baseInputs['cz'],
+ baseInputs['ce'],
+ baseInputs['phic'],
+ baseInputs['nu'],
+ baseInputs['cgd'],
+ baseInputs['cdr'],
+ baseInputs['ckaf'],
+ baseInputs['Q'],
+ baseInputs['R'],
+ baseInputs['m'],
+ baseInputs['Fsed_min'],
+ baseInputs['p_sedo'],
+ )
+ )
-def materialPM4(baseInputs, matTag, fn):
- fn.write("nDMaterial PM4Sand {} {:.3f} {:.2f} {:.3f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} \
- {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} \n".format(matTag, baseInputs["Dr"], baseInputs["Go"], baseInputs["hpo"], baseInputs["rho"], baseInputs["P_atm"], baseInputs["h0"], baseInputs["emax"],
- baseInputs["emin" ], baseInputs["nb"], baseInputs["nd"], baseInputs["Ado"], baseInputs["z_max"], baseInputs["cz"], baseInputs["ce"], baseInputs["phic"],
- baseInputs["nu"], baseInputs["cgd"], baseInputs["cdr"], baseInputs["ckaf"], baseInputs["Q"], baseInputs["R"], baseInputs["m"], baseInputs["Fsed_min"], baseInputs["p_sedo"]))
+def materialPDMY03(baseInputs, matTag, fn): # noqa: N802, N803, D103
+ fn.write(
+ 'nDMaterial PressureDependMultiYield03 {} {} {:.2f} {:.3e} {:.3e} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {} {:.3f} {:.3f} {:.3f} {:.3f} \n'.format(
+ matTag,
+ baseInputs['nd'],
+ baseInputs['rho'],
+ baseInputs['refShearModul'],
+ baseInputs['refBulkModul'],
+ baseInputs['frictionAng'],
+ baseInputs['peakShearStra'],
+ baseInputs['refPress'],
+ baseInputs['pressDependCoe'],
+ baseInputs['PTAng'],
+ baseInputs['mType'],
+ baseInputs['ca'],
+ baseInputs['cb'],
+ baseInputs['cc'],
+ baseInputs['cd'],
+ baseInputs['ce'],
+ baseInputs['da'],
+ baseInputs['db'],
+ baseInputs['dc'],
+ baseInputs['noYieldSurf'],
+ baseInputs['liquefac1'],
+ baseInputs['liquefac2'],
+ baseInputs['pa'],
+ baseInputs['s0'],
+ )
+ )
-def materialPDMY03(baseInputs, matTag, fn):
- fn.write("nDMaterial PressureDependMultiYield03 {} {} {:.2f} {:.3e} {:.3e} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} \
- {} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {:.3f} {} {:.3f} {:.3f} {:.3f} {:.3f} \n" \
- .format(matTag, baseInputs["nd"], baseInputs["rho"], baseInputs["refShearModul"], baseInputs["refBulkModul"], baseInputs["frictionAng"],
- baseInputs["peakShearStra"], baseInputs["refPress"], baseInputs["pressDependCoe"], baseInputs["PTAng"], baseInputs["mType"],
- baseInputs["ca"], baseInputs["cb"], baseInputs["cc"], baseInputs["cd"], baseInputs["ce"], baseInputs["da"], baseInputs["db"], baseInputs["dc"],
- baseInputs["noYieldSurf"], baseInputs["liquefac1"], baseInputs["liquefac2"], baseInputs["pa"], baseInputs["s0"]))
+def materialElastic(baseInputs, matTag, fn): # noqa: N802, N803, D103
+ fn.write(
+ 'nDMaterial ElasticIsotropic {} {:.3e} {:.3f} {:.2f} \n'.format(
+ matTag, baseInputs['E'], baseInputs['poisson'], baseInputs['density']
+ )
+ )
-def materialElastic(baseInputs, matTag, fn):
- fn.write("nDMaterial ElasticIsotropic {} {:.3e} {:.3f} {:.2f} \n" \
- .format(matTag, baseInputs["E"], baseInputs["poisson"], baseInputs["density"]))
-def calibration(variables, inputParameters, fn):
+def calibration(variables, inputParameters, fn): # noqa: C901, N803, D103
# This function contains two parts: call gauss1D to generate 1D random field; generate material based on random field
# Currently only relative density is supported
# Calibration of PM4Sand is based on a parametric study that produces hpo = f(Dr, Go, CRR)
# Calibration of PDMY03 is based on interpolation of pre calibrated parameters for a range of Dr
- if variables["materialType"] == "PM4Sand_Random":
+ if variables['materialType'] == 'PM4Sand_Random':
# PM4Sand
- baseInputs = {"Dr":0.65, "Go":600.0, "hpo":0.08, "rho":2.0, "P_atm":101.3, "h0":-1.0, "emax":0.800,
- "emin" :0.500, "nb":0.50, "nd":0.10, "Ado": -1.0, "z_max":-1.0, "cz": 250.0, "ce": -1.0, "phic":33.0,
- "nu":0.30, "cgd": 2.0, "cdr": -1.0, "ckaf": -1.0, "Q": 10.0, "R":1.5, "m": 0.01, "Fsed_min": -1.0,
- "p_sedo":-1.0}
- elif variables["materialType"] == "PDMY03_Random":
+ baseInputs = { # noqa: N806
+ 'Dr': 0.65,
+ 'Go': 600.0,
+ 'hpo': 0.08,
+ 'rho': 2.0,
+ 'P_atm': 101.3,
+ 'h0': -1.0,
+ 'emax': 0.800,
+ 'emin': 0.500,
+ 'nb': 0.50,
+ 'nd': 0.10,
+ 'Ado': -1.0,
+ 'z_max': -1.0,
+ 'cz': 250.0,
+ 'ce': -1.0,
+ 'phic': 33.0,
+ 'nu': 0.30,
+ 'cgd': 2.0,
+ 'cdr': -1.0,
+ 'ckaf': -1.0,
+ 'Q': 10.0,
+ 'R': 1.5,
+ 'm': 0.01,
+ 'Fsed_min': -1.0,
+ 'p_sedo': -1.0,
+ }
+ elif variables['materialType'] == 'PDMY03_Random':
# PDMY03
- baseInputs = {"nd": 2, "rho" : 1.5, "refShearModul" :4.69e4, "refBulkModul" :1.251e5, "frictionAng": 30.0, "peakShearStra" :0.1, "refPress" : 101.3,
- "pressDependCoe" : 0.5, "PTAng" : 20.4, "mType": 0, "ca" : 0.03, "cb" : 5, "cc" : 0.2, "cd" : 16.0, "ce" : 2.000000, "da" : 0.150,
- "db" : 3.0000, "dc" : -0.2, "noYieldSurf" : 20, "liquefac1" : 1.0, "liquefac2" : 0.0, "pa" : 101.3, "s0" : 1.73}
- elif variables["materialType"] == "Elastic_Random":
+ baseInputs = { # noqa: N806
+ 'nd': 2,
+ 'rho': 1.5,
+ 'refShearModul': 4.69e4,
+ 'refBulkModul': 1.251e5,
+ 'frictionAng': 30.0,
+ 'peakShearStra': 0.1,
+ 'refPress': 101.3,
+ 'pressDependCoe': 0.5,
+ 'PTAng': 20.4,
+ 'mType': 0,
+ 'ca': 0.03,
+ 'cb': 5,
+ 'cc': 0.2,
+ 'cd': 16.0,
+ 'ce': 2.000000,
+ 'da': 0.150,
+ 'db': 3.0000,
+ 'dc': -0.2,
+ 'noYieldSurf': 20,
+ 'liquefac1': 1.0,
+ 'liquefac2': 0.0,
+ 'pa': 101.3,
+ 's0': 1.73,
+ }
+ elif variables['materialType'] == 'Elastic_Random':
# Elastic
- baseInputs = {"E": 168480, "poisson" : 0.3, "density" : 2.0}
+ baseInputs = {'E': 168480, 'poisson': 0.3, 'density': 2.0} # noqa: N806
for keys in baseInputs:
baseInputs[keys] = inputParameters[keys]
- # calcualte random field
+ # calculate random field
# size of mesh
- thickness = variables["thickness"]
- waveLength = variables["Ly"]
+ thickness = variables['thickness']
+ waveLength = variables['Ly'] # noqa: N806
# Number of wave number increments in y-direction
- Ny = thickness / waveLength
+ Ny = thickness / waveLength # noqa: N806
rd = gauss1D(thickness, Ny)
rd.calculate()
- F = np.squeeze(rd.f.reshape((-1,1)))
- Y = np.linspace(0, rd.Ly, rd.My)
- f = interp1d(Y, F, kind="cubic")
+ F = np.squeeze(rd.f.reshape((-1, 1))) # noqa: N806
+ Y = np.linspace(0, rd.Ly, rd.My) # noqa: N806
+ f = interp1d(Y, F, kind='cubic')
# mapping from random field to mesh
- elemID = np.arange(variables["eleStart"], variables["eleEnd"] + 1, 1)
- elementY = np.linspace(variables["elevationStart"], variables["elevationEnd"], len(elemID))
+ elemID = np.arange(variables['eleStart'], variables['eleEnd'] + 1, 1) # noqa: N806
+ elementY = np.linspace( # noqa: N806
+ variables['elevationStart'], variables['elevationEnd'], len(elemID)
+ )
- for matTag in elemID:
- residual = variables["mean"] * f(elementY[matTag - variables["eleStart"]]) * variables["COV"]
- print()
- if variables["name"] == "Dr":
+ for matTag in elemID: # noqa: N806
+ residual = (
+ variables['mean']
+ * f(elementY[matTag - variables['eleStart']])
+ * variables['COV']
+ )
+ print() # noqa: T201
+ if variables['name'] == 'Dr':
# bound Dr between 0.2 and 0.95
- Dr = min(max(0.2, variables["mean"] + residual), 0.95)
- if Dr != Dr:
- Dr = 0.2
- if variables["materialType"] == "PM4Sand_Random":
- baseInputs["Dr"] = Dr
- Go = baseInputs["Go"]
+ Dr = min(max(0.2, variables['mean'] + residual), 0.95) # noqa: N806
+ if Dr != Dr: # noqa: PLR0124
+ Dr = 0.2 # noqa: N806
+ if variables['materialType'] == 'PM4Sand_Random':
+ baseInputs['Dr'] = Dr
+ Go = baseInputs['Go'] # noqa: N806
# CPT and SPT Based Liquefaction Triggering Procedures (Boulanger and Idriss 2014)
- Cd = 46.0
- N160 = Dr ** 2 * Cd
- CRR_IB = np.exp(N160 / 14.1 + (N160 / 126) ** 2 - (N160 / 23.6) ** 3 + (N160 / 25.4) ** 4 - 2.8)
- # Implementaion, Verification, and Validation of PM4Sand in OpenSees, Long Chen and Pedro Arduino, PEER Report, 2020
+ Cd = 46.0 # noqa: N806
+ N160 = Dr**2 * Cd # noqa: N806
+ CRR_IB = np.exp( # noqa: N806
+ N160 / 14.1
+ + (N160 / 126) ** 2
+ - (N160 / 23.6) ** 3
+ + (N160 / 25.4) ** 4
+ - 2.8
+ )
+ # Implementation, Verification, and Validation of PM4Sand in OpenSees, Long Chen and Pedro Arduino, PEER Report, 2020
# Based on a parametric study using quoFEM
a = -0.06438
b = 0.079598 + 0.12406 * Dr
- c = 0.12194 - 0.47627 * Dr - 0.000047009 * Go - CRR_IB + 0.00014048 * Dr * Go + 0.71347 * Dr ** 2
- hpo = (-b + np.sqrt(b ** 2 - 4 * a * c)) / (2 * a)
- if hpo != hpo:
+ c = (
+ 0.12194
+ - 0.47627 * Dr
+ - 0.000047009 * Go
+ - CRR_IB
+ + 0.00014048 * Dr * Go
+ + 0.71347 * Dr**2
+ )
+ hpo = (-b + np.sqrt(b**2 - 4 * a * c)) / (2 * a)
+ if hpo != hpo: # noqa: PLR0124
hpo = 0.4
- CRR_prediction = 0.114 - 0.44844 * Dr - (4.2648e-5) * Go + 0.079849 * hpo + (1.2811e-4) * Dr * Go \
- + 0.12136 * Dr * hpo + 0.69676 * Dr ** 2 - 0.06381 * hpo ** 2
+ CRR_prediction = ( # noqa: N806
+ 0.114
+ - 0.44844 * Dr
+ - (4.2648e-5) * Go
+ + 0.079849 * hpo
+ + (1.2811e-4) * Dr * Go
+ + 0.12136 * Dr * hpo
+ + 0.69676 * Dr**2
+ - 0.06381 * hpo**2
+ )
# bound hpo between 0.05 and 1.0
if CRR_prediction > CRR_IB:
hpo = 0.05
else:
hpo = 1.0
- baseInputs["hpo"] = hpo
+ baseInputs['hpo'] = hpo
materialPM4(baseInputs, matTag, fn)
- elif variables["materialType"] == "PDMY03_Random":
- Dr = max(min(Dr, 0.87), 0.33)
- baseInputs["Dr"] = Dr
+ elif variables['materialType'] == 'PDMY03_Random':
+ Dr = max(min(Dr, 0.87), 0.33) # noqa: N806
+ baseInputs['Dr'] = Dr
# interpolation using Khosravifar, A., Elgamal, A., Lu, J., and Li, J. [2018].
# "A 3D model for earthquake-induced liquefaction triggering and post-liquefaction response."
# Soil Dynamics and Earthquake Engineering, 110, 43-52
drs = np.array([0.33, 0.57, 0.74, 0.87])
- df = pd.DataFrame([(46900,125100,25.4,20.4,0.03,5,0.2,16,2,0.15,3,-0.2),
- (7.37e4, 1.968e5, 30.3, 25.3, 0.012, 3.0, 0.4, 9.0, 0.0, 0.3, 3.0, -0.3),
- (9.46e4, 2.526e5, 35.8, 30.8, 0.005, 1.0, 0.6, 4.6, -1.0, 0.45, 3.0, -0.4),
- (1.119e5, 2.983e5, 42.2, 37.2, 0.001, 0.0, 0.8, 2.2, 0.0, 0.6, 3.0, -0.5)],
- columns=("refShearModul", "refBulkModul", "frictionAng", "PTAng", "ca", "cb", "cc", \
- "cd", "ce", "da", "db", "dc"))
- for (columnName, columnData) in df.iteritems():
- f_Dr = interp1d(drs, df[columnName], kind="cubic")
+ df = pd.DataFrame( # noqa: PD901
+ [
+ (
+ 46900,
+ 125100,
+ 25.4,
+ 20.4,
+ 0.03,
+ 5,
+ 0.2,
+ 16,
+ 2,
+ 0.15,
+ 3,
+ -0.2,
+ ),
+ (
+ 7.37e4,
+ 1.968e5,
+ 30.3,
+ 25.3,
+ 0.012,
+ 3.0,
+ 0.4,
+ 9.0,
+ 0.0,
+ 0.3,
+ 3.0,
+ -0.3,
+ ),
+ (
+ 9.46e4,
+ 2.526e5,
+ 35.8,
+ 30.8,
+ 0.005,
+ 1.0,
+ 0.6,
+ 4.6,
+ -1.0,
+ 0.45,
+ 3.0,
+ -0.4,
+ ),
+ (
+ 1.119e5,
+ 2.983e5,
+ 42.2,
+ 37.2,
+ 0.001,
+ 0.0,
+ 0.8,
+ 2.2,
+ 0.0,
+ 0.6,
+ 3.0,
+ -0.5,
+ ),
+ ],
+ columns=(
+ 'refShearModul',
+ 'refBulkModul',
+ 'frictionAng',
+ 'PTAng',
+ 'ca',
+ 'cb',
+ 'cc',
+ 'cd',
+ 'ce',
+ 'da',
+ 'db',
+ 'dc',
+ ),
+ )
+ for columnName, columnData in df.iteritems(): # noqa: B007, N806
+ f_Dr = interp1d(drs, df[columnName], kind='cubic') # noqa: N806
baseInputs[columnName] = f_Dr(Dr)
materialPDMY03(baseInputs, matTag, fn)
- elif variables["name"] == "Vs":
- if variables["materialType"] == "Elastic_Random":
+ elif variables['name'] == 'Vs':
+ if variables['materialType'] == 'Elastic_Random':
# bound Dr between 50 and 1500
- Vs = min(max(50, variables["mean"] + residual), 1500)
- baseInputs["E"] = 2.0 * baseInputs["density"] * Vs * Vs * (1.0 + baseInputs["poisson"])
- fn.write("#Vs = {:.2f}\n".format(Vs))
+ Vs = min(max(50, variables['mean'] + residual), 1500) # noqa: N806
+ baseInputs['E'] = (
+ 2.0
+ * baseInputs['density']
+ * Vs
+ * Vs
+ * (1.0 + baseInputs['poisson'])
+ )
+ fn.write(f'#Vs = {Vs:.2f}\n')
materialElastic(baseInputs, matTag, fn)
-def createMaterial(data):
-
- eleStart = 0
- eleEnd = 0
- elevationStart = 0
- elevationEnd = 0
- numElems = 0
- totalHeight = 0
- randomMaterialList = ["PM4Sand_Random", "PDMY03_Random", "Elastic_Random"]
- fn = open("material.tcl", "w")
-
- for layer in reversed(data["soilProfile"]["soilLayers"]):
- if layer["eSize"] != 0:
- eleStart = numElems + 1
- numElemsLayer = round(layer["thickness"] / layer["eSize"])
- numElems += numElemsLayer
- eleSize = layer["thickness"] / numElemsLayer
- elevationStart = eleSize / 2.0
- totalHeight += layer["thickness"]
- eleEnd = numElems
- elevationEnd = layer["thickness"] - eleSize / 2.0
- if data["materials"][layer["material"] - 1]["type"] in randomMaterialList:
- variables = dict(
- materialType = data["materials"][layer["material"] - 1]["type"],
- name = data["materials"][layer["material"] - 1]["Variable"],
- mean = data["materials"][layer["material"] - 1]["mean"],
- COV = data["materials"][layer["material"] - 1]["COV"],
- Ly = data["materials"][layer["material"] - 1]["Ly"],
- thickness = layer["thickness"],
- eleStart = eleStart,
- eleEnd = eleEnd,
- elevationStart = elevationStart, # location of first Gauss Point respect to layer base
- elevationEnd = elevationEnd # location of last Gauss Point respect to layer base
+
+def createMaterial(data): # noqa: N802, D103
+ eleStart = 0 # noqa: N806
+ eleEnd = 0 # noqa: N806
+ elevationStart = 0 # noqa: N806
+ elevationEnd = 0 # noqa: N806
+ numElems = 0 # noqa: N806
+ totalHeight = 0 # noqa: N806
+ randomMaterialList = ['PM4Sand_Random', 'PDMY03_Random', 'Elastic_Random'] # noqa: N806
+ fn = open('material.tcl', 'w') # noqa: PLW1514, PTH123, SIM115
+
+ for layer in reversed(data['soilProfile']['soilLayers']):
+ if layer['eSize'] != 0:
+ eleStart = numElems + 1 # noqa: N806
+ numElemsLayer = round(layer['thickness'] / layer['eSize']) # noqa: N806
+ numElems += numElemsLayer # noqa: N806
+ eleSize = layer['thickness'] / numElemsLayer # noqa: N806
+ elevationStart = eleSize / 2.0 # noqa: N806
+ totalHeight += layer['thickness'] # noqa: N806
+ eleEnd = numElems # noqa: N806
+ elevationEnd = layer['thickness'] - eleSize / 2.0 # noqa: N806
+ if data['materials'][layer['material'] - 1]['type'] in randomMaterialList:
+ variables = dict( # noqa: C408
+ materialType=data['materials'][layer['material'] - 1]['type'],
+ name=data['materials'][layer['material'] - 1]['Variable'],
+ mean=data['materials'][layer['material'] - 1]['mean'],
+ COV=data['materials'][layer['material'] - 1]['COV'],
+ Ly=data['materials'][layer['material'] - 1]['Ly'],
+ thickness=layer['thickness'],
+ eleStart=eleStart,
+ eleEnd=eleEnd,
+ elevationStart=elevationStart, # location of first Gauss Point respect to layer base
+ elevationEnd=elevationEnd, # location of last Gauss Point respect to layer base
)
- inputParameters = data["materials"][layer["material"] - 1]
+ inputParameters = data['materials'][layer['material'] - 1] # noqa: N806
calibration(variables, inputParameters, fn)
fn.close()
-if __name__ == "__main__":
-
- srtName = sys.argv[0]
+if __name__ == '__main__':
+ srtName = sys.argv[0] # noqa: N816
- ## data obtained from user input
+ # data obtained from user input
# define the random field
- with open(srtName) as json_file:
+ with open(srtName) as json_file: # noqa: PLW1514, PTH123
data = json.load(json_file)
- eventData = data["Events"][0]
+ eventData = data['Events'][0] # noqa: N816
createMaterial(eventData)
diff --git a/modules/createEVENT/siteResponse/createGM4BIM.py b/modules/createEVENT/siteResponse/createGM4BIM.py
index 7d18aacf0..9af983415 100644
--- a/modules/createEVENT/siteResponse/createGM4BIM.py
+++ b/modules/createEVENT/siteResponse/createGM4BIM.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
#
# This file is part of the RDT Application.
@@ -37,100 +36,100 @@
# Chaofeng Wang
# fmk
-import numpy as np
+import argparse
import json
import os
import shutil
from glob import glob
-import argparse
+
import pandas as pd
-def createFilesForEventGrid(inputDir, outputDir, removeInputDir):
- if not os.path.isdir(inputDir):
- print(f"input dir: {inputDir} does not exist")
+def createFilesForEventGrid(inputDir, outputDir, removeInputDir): # noqa: N802, N803, D103
+ if not os.path.isdir(inputDir): # noqa: PTH112
+ print(f'input dir: {inputDir} does not exist') # noqa: T201
return 0
-
- if not os.path.exists(outputDir):
- os.mkdir(outputDir)
-
- siteFiles = glob(f"{inputDir}/*BIM.json")
+ if not os.path.exists(outputDir): # noqa: PTH110
+ os.mkdir(outputDir) # noqa: PTH102
- GP_file = []
- Longitude = []
- Latitude = []
- id = []
+ siteFiles = glob(f'{inputDir}/*BIM.json') # noqa: PTH207, N806
+
+ GP_file = [] # noqa: N806, F841
+ Longitude = [] # noqa: N806
+ Latitude = [] # noqa: N806
+ id = [] # noqa: A001
sites = []
for site in siteFiles:
-
- with open(site, 'r') as f:
-
- All_json = json.load(f)
- generalInfo = All_json['GeneralInformation']
+ with open(site) as f: # noqa: PLW1514, PTH123
+ All_json = json.load(f) # noqa: N806
+ generalInfo = All_json['GeneralInformation'] # noqa: N806
Longitude.append(generalInfo['Longitude'])
Latitude.append(generalInfo['Latitude'])
- siteID = generalInfo['BIM_id']
+ siteID = generalInfo['BIM_id'] # noqa: N806
id.append(siteID)
-
- siteFileName = f"Site_{siteID}.csv"
+
+ siteFileName = f'Site_{siteID}.csv' # noqa: N806
sites.append(siteFileName)
-
- workdirs = glob(f"{inputDir}/{siteID}/workdir.*")
- siteEventFiles = []
- siteEventFactors = []
-
- for workdir in workdirs:
- head, sep, sampleID = workdir.partition('workdir.')
- print(sampleID)
+ workdirs = glob(f'{inputDir}/{siteID}/workdir.*') # noqa: PTH207
+ siteEventFiles = [] # noqa: N806
+ siteEventFactors = [] # noqa: N806
+
+ for workdir in workdirs:
+ head, sep, sampleID = workdir.partition('workdir.') # noqa: F841, N806
+ print(sampleID) # noqa: T201
- eventName = f"Event_{siteID}_{sampleID}.json"
- print(eventName)
- shutil.copy(f"{workdir}/fmkEVENT", f"{outputDir}/{eventName}")
+ eventName = f'Event_{siteID}_{sampleID}.json' # noqa: N806
+ print(eventName) # noqa: T201
+ shutil.copy(f'{workdir}/fmkEVENT', f'{outputDir}/{eventName}')
siteEventFiles.append(eventName)
siteEventFactors.append(1)
- siteDF = pd.DataFrame(list(zip(siteEventFiles, siteEventFactors)), columns =['TH_file', 'factor'])
- siteDF.to_csv(f"{outputDir}/{siteFileName}", index=False)
-
+ siteDF = pd.DataFrame( # noqa: N806
+ list(zip(siteEventFiles, siteEventFactors)),
+ columns=['TH_file', 'factor'],
+ )
+ siteDF.to_csv(f'{outputDir}/{siteFileName}', index=False)
# create the EventFile
- gridDF = pd.DataFrame(list(zip(sites, Longitude, Latitude)), columns =['GP_file', 'Longitude', 'Latitude'])
+ gridDF = pd.DataFrame( # noqa: N806
+ list(zip(sites, Longitude, Latitude)),
+ columns=['GP_file', 'Longitude', 'Latitude'],
+ )
- gridDF.to_csv(f"{outputDir}/EventGrid.csv", index=False)
-
+ gridDF.to_csv(f'{outputDir}/EventGrid.csv', index=False)
# remove original files
- if removeInputDir:
+ if removeInputDir:
shutil.rmtree(inputDir)
-
+
return 0
-if __name__ == "__main__":
- #Defining the command line arguments
+if __name__ == '__main__':
+ # Defining the command line arguments
- workflowArgParser = argparse.ArgumentParser(
- "Create ground motions for BIM.",
- allow_abbrev=False)
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Create ground motions for BIM.', allow_abbrev=False
+ )
- workflowArgParser.add_argument("-i", "--inputDir",
- help="Dir containing results of siteResponseWhale.")
+ workflowArgParser.add_argument(
+ '-i', '--inputDir', help='Dir containing results of siteResponseWhale.'
+ )
- workflowArgParser.add_argument("-o", "--outputDir",
- help="Dir where results to be stored.")
+ workflowArgParser.add_argument(
+ '-o', '--outputDir', help='Dir where results to be stored.'
+ )
- workflowArgParser.add_argument("--removeInput", action='store_true')
+ workflowArgParser.add_argument('--removeInput', action='store_true')
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
- print(wfArgs)
- #Calling the main function
+ print(wfArgs) # noqa: T201
+ # Calling the main function
createFilesForEventGrid(wfArgs.inputDir, wfArgs.outputDir, wfArgs.removeInput)
-
-
diff --git a/modules/createEVENT/siteResponse/postProcess.py b/modules/createEVENT/siteResponse/postProcess.py
index 0dcae4c9d..abd1d58ba 100644
--- a/modules/createEVENT/siteResponse/postProcess.py
+++ b/modules/createEVENT/siteResponse/postProcess.py
@@ -1,49 +1,41 @@
-# This script create evt.j for workflow
-import numpy as np
+# This script create evt.j for workflow # noqa: CPY001, D100, INP001
import json
-import os
import shutil
-def postProcess(evtName):
- #acc = np.loadtxt("acceleration.out")
- #os.remove("acceleration.out") # remove acceleration file to save space
- acc = np.loadtxt("out_tcl/acceleration.out")
- shutil.rmtree("out_tcl") # remove output files to save space
- time = acc[:,0]
- acc_surf = acc[:,-2] / 9.81
- dT = time[1] - time[0]
-
- timeSeries = dict(
- name = "accel_X",
- type = "Value",
- dT = dT,
- data = acc_surf.tolist()
- )
+import numpy as np
- patterns = dict(
- type = "UniformAcceleration",
- timeSeries = "accel_X",
- dof = 1
- )
- evts = dict(
- RandomVariables = [],
- name = "SiteResponseTool",
- type = "Seismic",
- description = "Surface acceleration",
- dT = dT,
- numSteps = len(acc_surf),
- timeSeries = [timeSeries],
- pattern = [patterns]
+def postProcess(evtName): # noqa: N802, N803, D103
+ # acc = np.loadtxt("acceleration.out")
+ # os.remove("acceleration.out") # remove acceleration file to save space
+ acc = np.loadtxt('out_tcl/acceleration.out')
+ shutil.rmtree('out_tcl') # remove output files to save space
+ time = acc[:, 0]
+ acc_surf = acc[:, -2] / 9.81
+ dT = time[1] - time[0] # noqa: N806
+
+ timeSeries = dict(name='accel_X', type='Value', dT=dT, data=acc_surf.tolist()) # noqa: C408, N806
+
+ patterns = dict(type='UniformAcceleration', timeSeries='accel_X', dof=1) # noqa: C408
+
+ evts = dict( # noqa: C408
+ RandomVariables=[],
+ name='SiteResponseTool',
+ type='Seismic',
+ description='Surface acceleration',
+ dT=dT,
+ numSteps=len(acc_surf),
+ timeSeries=[timeSeries],
+ pattern=[patterns],
)
- dataToWrite = dict(Events = [evts])
+ dataToWrite = dict(Events=[evts]) # noqa: C408, N806
- with open(evtName, "w") as outfile:
+ with open(evtName, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(dataToWrite, outfile, indent=4)
return 0
-if __name__ == "__main__":
- postProcess("EVENT.json")
+if __name__ == '__main__':
+ postProcess('EVENT.json')
diff --git a/modules/createEVENT/siteResponse/postProcessRegional.py b/modules/createEVENT/siteResponse/postProcessRegional.py
index 85dcccee5..592f532ad 100644
--- a/modules/createEVENT/siteResponse/postProcessRegional.py
+++ b/modules/createEVENT/siteResponse/postProcessRegional.py
@@ -1,49 +1,42 @@
-# This script create evt.j for workflow
-import numpy as np
+# This script create evt.j for workflow # noqa: CPY001, D100, INP001
import json
import os
-import shutil
-
-def postProcess(evtName):
- acc = np.loadtxt("acceleration.out")
- os.remove("acceleration.out") # remove acceleration file to save space
- #acc = np.loadtxt("out_tcl/acceleration.out")
- #shutil.rmtree("out_tcl") # remove output files to save space
- time = acc[:,0]
- acc_surf = acc[:,-2] / 9.81
- dT = time[1] - time[0]
-
- timeSeries = dict(
- name = "accel_X",
- type = "Value",
- dT = dT,
- data = acc_surf.tolist()
- )
- patterns = dict(
- type = "UniformAcceleration",
- timeSeries = "accel_X",
- dof = 1
- )
+import numpy as np
+
- evts = dict(
- RandomVariables = [],
- name = "SiteResponseTool",
- type = "Seismic",
- description = "Surface acceleration",
- dT = dT,
- numSteps = len(acc_surf),
- timeSeries = [timeSeries],
- pattern = [patterns]
+def postProcess(evtName): # noqa: N802, N803, D103
+ acc = np.loadtxt('acceleration.out')
+ # remove acceleration file to save space
+ os.remove('acceleration.out') # noqa: PTH107
+ # acc = np.loadtxt("out_tcl/acceleration.out")
+ # shutil.rmtree("out_tcl") # remove output files to save space
+ time = acc[:, 0]
+ acc_surf = acc[:, -2] / 9.81
+ dT = time[1] - time[0] # noqa: N806
+
+ timeSeries = dict(name='accel_X', type='Value', dT=dT, data=acc_surf.tolist()) # noqa: C408, N806
+
+ patterns = dict(type='UniformAcceleration', timeSeries='accel_X', dof=1) # noqa: C408
+
+ evts = dict( # noqa: C408
+ RandomVariables=[],
+ name='SiteResponseTool',
+ type='Seismic',
+ description='Surface acceleration',
+ dT=dT,
+ numSteps=len(acc_surf),
+ timeSeries=[timeSeries],
+ pattern=[patterns],
)
- dataToWrite = dict(Events = [evts])
+ dataToWrite = dict(Events=[evts]) # noqa: C408, N806
- with open(evtName, "w") as outfile:
+ with open(evtName, 'w') as outfile: # noqa: PLW1514, PTH123
json.dump(dataToWrite, outfile, indent=4)
return 0
-if __name__ == "__main__":
- postProcess("EVENT.json")
+if __name__ == '__main__':
+ postProcess('EVENT.json')
diff --git a/modules/createEVENT/stochasticGroundMotion/command_parser.cpp b/modules/createEVENT/stochasticGroundMotion/command_parser.cpp
index b6d2dcabe..fd0838b5b 100644
--- a/modules/createEVENT/stochasticGroundMotion/command_parser.cpp
+++ b/modules/createEVENT/stochasticGroundMotion/command_parser.cpp
@@ -35,13 +35,13 @@ CommandParser::CommandParser(int& number_of_arguments, char* arguments[]) {
auto result = command_parser_.parse(clara::detail::Args(number_of_arguments, arguments));
- // Check whether command line was succussfully parsed
+ // Check whether command line was successfully parsed
if (!result) {
std::cerr << "ERROR: In command line inputs: " << result.errorMessage() << std::endl;
throw std::invalid_argument("ERROR: In CommandParser::CommandParser: Command line inputs error");
}
- // If help flag passed, pring usage and exit
+ // If help flag passed, print usage and exit
if (configuration_.help) {
std::cout << command_parser_ << std::endl;
}
diff --git a/modules/createEVENT/stochasticGroundMotion/command_parser.h b/modules/createEVENT/stochasticGroundMotion/command_parser.h
index e997072db..6b85c08ff 100644
--- a/modules/createEVENT/stochasticGroundMotion/command_parser.h
+++ b/modules/createEVENT/stochasticGroundMotion/command_parser.h
@@ -12,7 +12,7 @@
class CommandParser {
public:
/**
- * @constructor Default contstructor
+ * @constructor Default constructor
*/
CommandParser() = default;
diff --git a/modules/createEVENT/stochasticGroundMotion/main.cpp b/modules/createEVENT/stochasticGroundMotion/main.cpp
index b124d0723..4c064f9e1 100644
--- a/modules/createEVENT/stochasticGroundMotion/main.cpp
+++ b/modules/createEVENT/stochasticGroundMotion/main.cpp
@@ -114,7 +114,7 @@ int main(int argc, char** argv) {
//
// Sang-ri - checking if key seed exists in AIM.json. We are not anymore getting the seed
- // from the commend line.
+ // from the command line.
//
@@ -163,7 +163,7 @@ int main(int argc, char** argv) {
// it->at("ruptureDist"), it->at("vs30"), inputs.get_seed());
eq_generator = std::make_shared(
inputs.get_model_name(), it->at("momentMagnitude"),
- it->at("ruptureDist"), it->at("vs30"), mySeed); // May need to update smelt not to cut of seed nubmers
+ it->at("ruptureDist"), it->at("vs30"), mySeed); // May need to update smelt not to cut of seed numbers
} else if (model_name == "DabaghiDerKiureghianNFGM") {
//eq_generator = std::make_shared(
// inputs.get_model_name(), it->at("faultType"),
@@ -299,4 +299,4 @@ void throwError(std::string msg){
exit(-1);
return;
-}
\ No newline at end of file
+}
diff --git a/modules/createEVENT/stochasticWave/Ex1_WaveKinematics.py b/modules/createEVENT/stochasticWave/Ex1_WaveKinematics.py
index 68b4d8f82..2dac13aee 100644
--- a/modules/createEVENT/stochasticWave/Ex1_WaveKinematics.py
+++ b/modules/createEVENT/stochasticWave/Ex1_WaveKinematics.py
@@ -1,53 +1,48 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3 # noqa: CPY001, EXE001
-"""
-Plot the wave kinematics (elevation, velocity, acceleration) for linear waves
+"""Plot the wave kinematics (elevation, velocity, acceleration) for linear waves
Different locations, times and superposition of frequencies can be used.
-"""
-from __future__ import print_function
-import numpy as np
-import pandas as pd
+""" # noqa: D205
+
import matplotlib.pyplot as plt
-from fractions import Fraction
-import matplotlib as mpl
-import os, sys
-import re
-import json
-import argparse
-
-# Local
-from welib.tools.figure import defaultRC; defaultRC();
-from welib.tools.colors import python_colors
-from welib.hydro.wavekin import elevation2d, wavenumber, kinematics2d
-from welib.hydro.wavekin import *
-from welib.hydro.morison import *
-
-
-fig,axes = plt.subplots(2, 2, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8)
-fig.subplots_adjust(left=0.10, right=0.95, top=0.91, bottom=0.09, hspace=0.29, wspace=0.46)
+import numpy as np
+
+# Local
+from welib.tools.figure import defaultRC
+
+defaultRC()
+from welib.hydro.morison import * # noqa: E402, F403
+from welib.hydro.wavekin import * # noqa: E402, F403
+from welib.hydro.wavekin import elevation2d, kinematics2d, wavenumber # noqa: E402
+from welib.tools.colors import python_colors # noqa: E402
+
+fig, axes = plt.subplots(2, 2, sharey=False, figsize=(6.4, 4.8)) # (6.4,4.8)
+fig.subplots_adjust(
+ left=0.10, right=0.95, top=0.91, bottom=0.09, hspace=0.29, wspace=0.46
+)
plt.suptitle('Hydro - Wave kinematics')
-g = 9.81 # gravity [m/s^2]
-h = 30. # water depth [m]
+g = 9.81 # gravity [m/s^2]
+h = 30.0 # water depth [m]
# --- (Top Left) Example for one frequency, one point, multiple times
-a = 8.1 # wave peak amplitude [m]
-x, z = 0, 0 # position where kinematics are evaluated [m]
-T = 12.7 # period [s]
-eps = 0 # phase shift [rad]
-f = 1./T
-k = wavenumber(f, h, g)
-time = np.arange(0,2*T,T/101)
+a = 8.1 # wave peak amplitude [m]
+x, z = 0, 0 # position where kinematics are evaluated [m]
+T = 12.7 # period [s]
+eps = 0 # phase shift [rad]
+f = 1.0 / T
+k = wavenumber(f, h, g)
+time = np.arange(0, 2 * T, T / 101)
# Wave kinematics
-vel,acc = kinematics2d(a, f, k, eps, h, time, z, x)
-eta = elevation2d(a, f, k, eps, time, x)
+vel, acc = kinematics2d(a, f, k, eps, h, time, z, x)
+eta = elevation2d(a, f, k, eps, time, x)
# Plot
-ax=axes[0,0]
-ax.plot(time, eta , label=r'Elevation [m]')
-ax.plot(time, vel , label=r'Velocity [m/s]')
-ax.plot(time, acc , label=r'Acceleration [m$^2$/s]')
+ax = axes[0, 0]
+ax.plot(time, eta, label=r'Elevation [m]')
+ax.plot(time, vel, label=r'Velocity [m/s]')
+ax.plot(time, acc, label=r'Acceleration [m$^2$/s]')
ax.set_xlabel('Time [s]')
ax.set_ylabel('Kinematics')
ax.legend(fontsize=8, loc='lower center')
@@ -55,73 +50,95 @@
# --- (Bottom Left) Example for one frequencies, multiple points(1d array), multiple times
-a = np.array([3, ]) # wave peak amplitude [m]
-T = np.array([12.]) # period [s]
-eps = np.array([0 ]) # phase shift [rad]
-z = np.linspace(-h, 0, 10) # position where kinematics are evaluated
-x = z*0
-f = 1./T
-k = wavenumber(f, h, g)
-time = np.linspace(0,2*T[0]/2,5)
-vel,acc = kinematics2d(a, f, k, eps, h, time, z, x)
-#eta = elevation2d(a, f, k, eps, time, x)
-ax=axes[1,0]
-sT = ['0','T/4','T/2','3T/4']
-for it,t in enumerate(time[:-1]):
- ax.plot(vel[:,it], z, ['-','-','-','--'][it], c=python_colors(it), label='vel, t={:}'.format(sT[it]))
-for it,t in enumerate(time[:-1]):
- ax.plot(acc[:,it], z, ['o','.','.','.'][it], c=python_colors(it), label='acc, t={:}'.format(sT[it]))
+a = np.array(
+ [
+ 3,
+ ]
+) # wave peak amplitude [m]
+T = np.array([12.0]) # period [s]
+eps = np.array([0]) # phase shift [rad]
+z = np.linspace(-h, 0, 10) # position where kinematics are evaluated
+x = z * 0
+f = 1.0 / T
+k = wavenumber(f, h, g)
+time = np.linspace(0, 2 * T[0] / 2, 5)
+vel, acc = kinematics2d(a, f, k, eps, h, time, z, x)
+# eta = elevation2d(a, f, k, eps, time, x)
+ax = axes[1, 0]
+sT = ['0', 'T/4', 'T/2', '3T/4'] # noqa: N816
+for it, t in enumerate(time[:-1]): # noqa: B007
+ ax.plot(
+ vel[:, it],
+ z,
+ ['-', '-', '-', '--'][it],
+ c=python_colors(it),
+ label=f'vel, t={sT[it]}',
+ )
+for it, t in enumerate(time[:-1]): # noqa: B007
+ ax.plot(
+ acc[:, it],
+ z,
+ ['o', '.', '.', '.'][it],
+ c=python_colors(it),
+ label=f'acc, t={sT[it]}',
+ )
ax.set_ylabel('Water depth [m]')
ax.set_xlabel('Velocity and acceleration')
ax.legend(fontsize=8, ncol=2, loc='lower center')
# --- (Top Right) Example for multiple frequencies, one point, multiple times
-a = np.array([1., 3., 5., 0.5]) # wave peak amplitude [m]
-T = np.array([20., 12.,9., 3.]) # period [s]
-eps = np.array([np.pi/4, 0, np.pi/2, 0]) # phase shift [rad]
-x, z = 0, 0 # position where kinematics are evaluated
-f = 1./T
-k = wavenumber(f, h, g)
-time = np.arange(0,2*T[0],T[0]/101)
-vel,acc = kinematics2d(a, f, k, eps, h, time, z, x)
+a = np.array([1.0, 3.0, 5.0, 0.5]) # wave peak amplitude [m]
+T = np.array([20.0, 12.0, 9.0, 3.0]) # period [s]
+eps = np.array([np.pi / 4, 0, np.pi / 2, 0]) # phase shift [rad]
+x, z = 0, 0 # position where kinematics are evaluated
+f = 1.0 / T
+k = wavenumber(f, h, g)
+time = np.arange(0, 2 * T[0], T[0] / 101)
+vel, acc = kinematics2d(a, f, k, eps, h, time, z, x)
eta = elevation2d(a, f, k, eps, time, x)
# Plot
-ax=axes[0,1]
-ax.plot(time, eta , label=r'Elevation [m]')
-ax.plot(time, vel , label=r'Velocity [m/s]')
-ax.plot(time, acc , label=r'Acceleration [m$^2$/s]')
+ax = axes[0, 1]
+ax.plot(time, eta, label=r'Elevation [m]')
+ax.plot(time, vel, label=r'Velocity [m/s]')
+ax.plot(time, acc, label=r'Acceleration [m$^2$/s]')
ax.set_xlabel('Time [s]')
ax.set_ylabel('Kinematics')
ax.legend(fontsize=8, loc='lower center')
ax.tick_params(direction='in')
-ax.set_ylim([-8,8])
+ax.set_ylim([-8, 8])
ax.set_title('Multiple frequencies')
# --- (Bottom Left) multiple frequencies, multiple points (2d array), multiple times
-a = np.array([1., 3., 5., 0.5]) # wave peak amplitude [m]
-T = np.array([20., 12.,9., 3.]) # period [s]
-eps = np.array([np.pi/4, 0, np.pi/2, 0]) # phase shift [rad]
-vz = np.linspace(-h, 0, 2) # position where kinematics are evaluated
-vx = np.linspace(-10,10,3)
-X,Z = np.meshgrid(vx,vz)
-f = 1./T
-k = wavenumber(f, h, g)
-time = np.arange(0,2*T[0],T[0]/101)
-vel,acc = kinematics2d(a, f, k, eps, h, time, Z, X)
-#eta = elevation2d(a, f, k, eps, time, x)
+a = np.array([1.0, 3.0, 5.0, 0.5]) # wave peak amplitude [m]
+T = np.array([20.0, 12.0, 9.0, 3.0]) # period [s]
+eps = np.array([np.pi / 4, 0, np.pi / 2, 0]) # phase shift [rad]
+vz = np.linspace(-h, 0, 2) # position where kinematics are evaluated
+vx = np.linspace(-10, 10, 3)
+X, Z = np.meshgrid(vx, vz)
+f = 1.0 / T
+k = wavenumber(f, h, g)
+time = np.arange(0, 2 * T[0], T[0] / 101)
+vel, acc = kinematics2d(a, f, k, eps, h, time, Z, X)
+# eta = elevation2d(a, f, k, eps, time, x)
# --- Plot
-ax=axes[1,1]
-for i,z in enumerate(vz):
- for j,x in enumerate(vx):
- ax.plot(time, vel[i,j,:], ['--','-',':'][j], c=python_colors(i), label='z={:.0f} x={:.0f}'.format(z,x))
+ax = axes[1, 1]
+for i, z in enumerate(vz):
+ for j, x in enumerate(vx):
+ ax.plot(
+ time,
+ vel[i, j, :],
+ ['--', '-', ':'][j],
+ c=python_colors(i),
+ label=f'z={z:.0f} x={x:.0f}',
+ )
ax.set_ylabel('Velocity [m/s]')
ax.set_xlabel('Time [s]')
ax.legend(fontsize=8, loc='lower center', ncol=2)
-ax.set_ylim([-8,8])
+ax.set_ylim([-8, 8])
ax.tick_params(direction='in')
@@ -132,12 +149,13 @@
# plt.show()
-if __name__=="__main__":
+if __name__ == '__main__':
pass
-
-if __name__=="__test__":
+
+if __name__ == '__test__':
pass
-if __name__=="__export__":
+if __name__ == '__export__':
# fig.close()
from welib.tools.repo import export_figs_callback
+
export_figs_callback(__file__)
diff --git a/modules/createEVENT/stochasticWave/Ex2_Jonswap_spectrum.py b/modules/createEVENT/stochasticWave/Ex2_Jonswap_spectrum.py
index e5b4070df..53752131d 100644
--- a/modules/createEVENT/stochasticWave/Ex2_Jonswap_spectrum.py
+++ b/modules/createEVENT/stochasticWave/Ex2_Jonswap_spectrum.py
@@ -1,50 +1,41 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3 # noqa: CPY001, EXE001
-"""
-Plot the JONSWAP spectrum for a given sea state
-"""
+"""Plot the JONSWAP spectrum for a given sea state""" # noqa: D400
-from __future__ import print_function
-import numpy as np
-import pandas as pd
import matplotlib.pyplot as plt
-from fractions import Fraction
-import matplotlib as mpl
-import os, sys
-import re
-import json
-import argparse
-
-# Local
-from welib.tools.figure import defaultRC; defaultRC();
-from welib.tools.colors import python_colors
-from welib.hydro.wavekin import *
-from welib.hydro.morison import *
-from welib.hydro.spectra import jonswap
+import numpy as np
+# Local
+from welib.tools.figure import defaultRC
+defaultRC()
+from welib.hydro.morison import * # noqa: E402, F403
+from welib.hydro.spectra import jonswap # noqa: E402
+from welib.hydro.wavekin import * # noqa: E402, F403
# --- Parameters
-t = np.arange(0,3600.1,1) # time vector [s]
-dt = t[1]-t[0] # timestep [s]
-Hs = 8.1 # Significant wave height [m]
-Tp = 12.7 # Peak period [s]
+t = np.arange(0, 3600.1, 1) # time vector [s]
+dt = t[1] - t[0] # timestep [s]
+Hs = 8.1 # Significant wave height [m]
+Tp = 12.7 # Peak period [s]
# --- Derived parameters
-df = 1./np.max(t) # Step size for frequency
-fMax = (1./dt)/2 # Highest frequency
-freq = np.arange(df, fMax+df/2, df)
+df = 1.0 / np.max(t) # Step size for frequency # noqa: PD901
+fMax = (1.0 / dt) / 2 # Highest frequency # noqa: N816
+freq = np.arange(df, fMax + df / 2, df)
# --- Spectrum and amplitude
-S = jonswap(freq, Hs, Tp) # Spectral density [m^2.s]
-ap = np.sqrt(2*S*df) # Wave amplitude [m]
+S = jonswap(freq, Hs, Tp) # Spectral density [m^2.s]
+ap = np.sqrt(2 * S * df) # Wave amplitude [m]
# Find location of maximum energy
-iMax = np.argmax(S)
+iMax = np.argmax(S) # noqa: N816
# --- Plots
-fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8)
-fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20)
+fig, ax = plt.subplots(1, 1, sharey=False, figsize=(6.4, 4.8)) # (6.4,4.8)
+fig.subplots_adjust(
+ left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20
+)
ax.plot(freq, S)
ax.plot(freq[iMax], S[iMax], 'ko')
ax.set_xlabel('Frequency [Hz]')
@@ -56,12 +47,11 @@
# plt.show()
-if __name__=="__main__":
+if __name__ == '__main__':
pass
-if __name__=="__test__":
+if __name__ == '__test__':
np.testing.assert_almost_equal(S[iMax], 113.8770176)
-if __name__=="__export__":
+if __name__ == '__export__':
from welib.tools.repo import export_figs_callback
- export_figs_callback(__file__)
-
+ export_figs_callback(__file__)
diff --git a/modules/createEVENT/stochasticWave/Ex3_WaveTimeSeries.py b/modules/createEVENT/stochasticWave/Ex3_WaveTimeSeries.py
index 5cc71b9a3..df113a67c 100644
--- a/modules/createEVENT/stochasticWave/Ex3_WaveTimeSeries.py
+++ b/modules/createEVENT/stochasticWave/Ex3_WaveTimeSeries.py
@@ -1,68 +1,60 @@
-#!/usr/bin/env python3
-
-"""
-Generate wave time series based on the Jonswap spectrum
-"""
+#!/usr/bin/env python3 # noqa: CPY001, EXE001
-from __future__ import print_function
-import numpy as np
-from numpy.random import uniform, seed
-import matplotlib.pyplot as plt
-import matplotlib as mpl
-import pandas as pd
-from fractions import Fraction
-import os, sys
-import re
-import json
-import argparse
+"""Generate wave time series based on the Jonswap spectrum""" # noqa: D400
-# Local
-from welib.tools.figure import defaultRC; defaultRC();
-from welib.tools.colors import python_colors
-from welib.hydro.spectra import jonswap
-from welib.hydro.wavekin import elevation2d, wavenumber
-from welib.tools.spectral import fft_wrap
-from welib.hydro.wavekin import *
-from welib.hydro.morison import *
+import matplotlib.pyplot as plt
+import numpy as np
+from numpy.random import seed, uniform
+# Local
+from welib.tools.figure import defaultRC
+defaultRC()
+from welib.hydro.morison import * # noqa: E402, F403
+from welib.hydro.spectra import jonswap # noqa: E402
+from welib.hydro.wavekin import * # noqa: E402, F403
+from welib.hydro.wavekin import elevation2d, wavenumber # noqa: E402
+from welib.tools.spectral import fft_wrap # noqa: E402
# --- Random seed
seed(None)
# --- Parameters
# t = np.linspace(0,600,601) # time vector [s]
-t = np.linspace(0,60.0, 6001) # time vector [s]
-Hs = 8.1 # Significant wave height [m]
-Tp = 12.7 # Peak period [s]
-h = 30. # Water depth [m]
-g = 9.80665 # Gravity[m/s2]
+t = np.linspace(0, 60.0, 6001) # time vector [s]
+Hs = 8.1 # Significant wave height [m]
+Tp = 12.7 # Peak period [s]
+h = 30.0 # Water depth [m]
+g = 9.80665 # Gravity[m/s2]
# --- Jonswap spectrum
-dt = t[1]-t[0] # timestep [s]
-df = 1/np.max(t) # step size for frequency
-fHighCut = 1/(dt)/2. # Highest frequency in calculations
-freq = np.arange(df, fHighCut, df)
+dt = t[1] - t[0] # timestep [s]
+df = 1 / np.max(t) # step size for frequency # noqa: PD901
+fHighCut = 1 / (dt) / 2.0 # Highest frequency in calculations # noqa: N816
+freq = np.arange(df, fHighCut, df)
S = jonswap(freq, Hs, Tp=Tp, g=9.80665)
# --- Solve dispersion relation
k = wavenumber(freq, h, g)
# --- Compute wave elevation based on amplitudes and random phases
-eps = uniform(0,2*np.pi,len(freq)) # random phases between 0 and 2pi
-a = np.sqrt(2*S*df) # wave amplitudes based on spectrum
-x = 0 # longitudinal distance where wave is evaluated [m]
-eta = elevation2d(a, freq, k, eps, t, x)
+# random phases between 0 and 2pi
+eps = uniform(0, 2 * np.pi, len(freq))
+a = np.sqrt(2 * S * df) # wave amplitudes based on spectrum
+x = 0 # longitudinal distance where wave is evaluated [m]
+eta = elevation2d(a, freq, k, eps, t, x)
# --- Compute FFT of wave elevation
f_fft, S_fft, Info = fft_wrap(t, eta, output_type='PSD', averaging='none')
-
+
# --- Plots
-fig,axes = plt.subplots(2, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8)
-fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.30, wspace=0.20)
+fig, axes = plt.subplots(2, 1, sharey=False, figsize=(6.4, 4.8)) # (6.4,4.8)
+fig.subplots_adjust(
+ left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.30, wspace=0.20
+)
-ax=axes[0]
+ax = axes[0]
ax.plot(t, eta)
ax.tick_params(direction='in')
ax.autoscale(enable=True, axis='both', tight=True)
@@ -70,9 +62,9 @@
ax.set_ylabel(r'Wave elevation [m]')
ax.set_title('Wave Generation')
-ax=axes[1]
+ax = axes[1]
ax.plot(f_fft, S_fft, '-', label='Generated')
-ax.plot(freq, S , 'k', label='Jonswap')
+ax.plot(freq, S, 'k', label='Jonswap')
ax.legend()
ax.set_xlabel('Frequency [Hz]')
ax.set_ylabel(r'Spectral density [m$^2$ s]')
@@ -82,14 +74,11 @@
# fig.savefig('WaveTimeSeries.webp')
# plt.show()
-if __name__=="__main__":
+if __name__ == '__main__':
pass
-if __name__=="__test__":
+if __name__ == '__test__':
pass
-if __name__=="__export__":
+if __name__ == '__export__':
from welib.tools.repo import export_figs_callback
- export_figs_callback(__file__)
-
-
-
+ export_figs_callback(__file__)
diff --git a/modules/createEVENT/stochasticWave/Ex4_WaveLoads.py b/modules/createEVENT/stochasticWave/Ex4_WaveLoads.py
index 87298e389..23e6e4a5f 100644
--- a/modules/createEVENT/stochasticWave/Ex4_WaveLoads.py
+++ b/modules/createEVENT/stochasticWave/Ex4_WaveLoads.py
@@ -1,125 +1,127 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3 # noqa: CPY001, EXE001
-"""
-Compute inline/total hydrodynamic force and moments on a monopile using Morison's equation
-"""
-from __future__ import print_function
+"""Compute inline/total hydrodynamic force and moments on a monopile using Morison's equation""" # noqa: D400
+
+import argparse
+from fractions import Fraction
+
+import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
-import matplotlib.pyplot as plt
-from fractions import Fraction
-import matplotlib as mpl
-import os, sys
-import re
-import json
-import argparse
-# Local
-from welib.tools.figure import defaultRC; defaultRC();
-from welib.tools.colors import python_colors
-from welib.hydro.wavekin import *
-from welib.hydro.morison import *
+# Local
+from welib.tools.figure import defaultRC
+defaultRC()
+from welib.hydro.morison import * # noqa: E402, F403
+from welib.hydro.wavekin import * # noqa: E402, F403
+from welib.tools.colors import python_colors # noqa: E402
# --- Parameters
-g = 9.81 # gravity [m/s^2]
-h = 30. # water depth [m]
-rho = 1000 # water density
-D = 6 # monopile diameter [m]
-CD = 1 # given
-CM = 2 #
-a = 3 # wave peak amplitude [m]
-T = 12. # period [s]
-eps = 0 # phase shift [rad]
-f = 1./T
-k = wavenumber(f, h, g)
-
-nz = 30 # number of points used in the z direction to compute loads
-
-z_ref = -h # reference point for moment calculation
+g = 9.81 # gravity [m/s^2]
+h = 30.0 # water depth [m]
+rho = 1000 # water density
+D = 6 # monopile diameter [m]
+CD = 1 # given
+CM = 2
+a = 3 # wave peak amplitude [m]
+T = 12.0 # period [s]
+eps = 0 # phase shift [rad]
+f = 1.0 / T
+k = wavenumber(f, h, g) # noqa: F405
+
+nz = 30 # number of points used in the z direction to compute loads
+
+z_ref = -h # reference point for moment calculation
# --------------------------------------------------------------------------------}
# --- Inline force and moments as function of time, with or without Wheeler stretching
# --------------------------------------------------------------------------------{
-time = np.linspace(0,T,9)
+time = np.linspace(0, T, 9)
-fig1,axes1 = plt.subplots(2, 4, sharex=True, sharey=True, figsize=(12.8,4.8)) # (6.4,4.8)
-fig1.subplots_adjust(left=0.05, right=0.99, top=0.95, bottom=0.09, hspace=0.26, wspace=0.11)
+fig1, axes1 = plt.subplots(
+ 2, 4, sharex=True, sharey=True, figsize=(12.8, 4.8)
+) # (6.4,4.8)
+fig1.subplots_adjust(
+ left=0.05, right=0.99, top=0.95, bottom=0.09, hspace=0.26, wspace=0.11
+)
-fig2,axes2 = plt.subplots(2, 4, sharex=True, sharey=True, figsize=(12.8,4.8)) # (6.4,4.8)
-fig2.subplots_adjust(left=0.05, right=0.99, top=0.95, bottom=0.09, hspace=0.26, wspace=0.11)
+fig2, axes2 = plt.subplots(
+ 2, 4, sharex=True, sharey=True, figsize=(12.8, 4.8)
+) # (6.4,4.8)
+fig2.subplots_adjust(
+ left=0.05, right=0.99, top=0.95, bottom=0.09, hspace=0.26, wspace=0.11
+)
-XLIM =[-75,75] # For inline force
-XLIMM=[-2500,2500] # For inline moment
+XLIM = [-75, 75] # For inline force
+XLIMM = [-2500, 2500] # For inline moment
for it, t in enumerate(time[:-1]):
# Wave kinematics
- eta = elevation2d(a, f, k, eps, t, x=0)
- z = np.linspace(-h, eta, nz)
- u, du = kinematics2d(a, f, k, eps, h, t, z, Wheeler=True, eta=eta)
- u0, du0 = kinematics2d(a, f, k, eps, h, t, z)
+ eta = elevation2d(a, f, k, eps, t, x=0) # noqa: F405
+ z = np.linspace(-h, eta, nz)
+ u, du = kinematics2d(a, f, k, eps, h, t, z, Wheeler=True, eta=eta) # noqa: F405
+ u0, du0 = kinematics2d(a, f, k, eps, h, t, z) # noqa: F405
# Wave loads with wheeler
- p_tot = inline_load(u, du, D, CD , CM , rho)
- p_inertia = inline_load(u, du, D, CD*0, CM , rho)
- p_drag = inline_load(u, du, D, CD , CM*0, rho)
- dM = p_tot * (z-z_ref) # [Nm/m]
+ p_tot = inline_load(u, du, D, CD, CM, rho) # noqa: F405
+ p_inertia = inline_load(u, du, D, CD * 0, CM, rho) # noqa: F405
+ p_drag = inline_load(u, du, D, CD, CM * 0, rho) # noqa: F405
+ dM = p_tot * (z - z_ref) # [Nm/m] # noqa: N816
# Wave loads without Wheeler
- p_tot0 = inline_load(u0, du0, D, CD , CM , rho)
- p_inertia0= inline_load(u0, du0, D, CD*0, CM , rho)
- p_drag0 = inline_load(u0, du0, D, CD , CM*0, rho)
- dM0 = p_tot0* (z-z_ref) # [Nm/m]
-
+ p_tot0 = inline_load(u0, du0, D, CD, CM, rho) # noqa: F405
+ p_inertia0 = inline_load(u0, du0, D, CD * 0, CM, rho) # noqa: F405
+ p_drag0 = inline_load(u0, du0, D, CD, CM * 0, rho) # noqa: F405
+ dM0 = p_tot0 * (z - z_ref) # [Nm/m] # noqa: N816
# Plot inline force
- ax=axes1[int(it/4),np.mod(it,4)]
- ax.plot(p_inertia/1000,z, '-', c=python_colors(0), label = r'$f_{inertia}$')
- ax.plot(p_drag/1000 ,z, '-', c=python_colors(3), label = r'$f_{drag}$')
- ax.plot(p_tot/1000 ,z, 'k-' , label = r'$f_{tot}$')
- ax.plot(p_inertia0/1000,z, '+', c=python_colors(0))
- ax.plot(p_drag0/1000 ,z, '+', c=python_colors(3))
- ax.plot(p_tot0/1000 ,z, 'k+' )
- ax.set_title('t/T={}'.format(Fraction(t/T)))
- if it==0:
+ ax = axes1[int(it / 4), np.mod(it, 4)]
+ ax.plot(p_inertia / 1000, z, '-', c=python_colors(0), label=r'$f_{inertia}$')
+ ax.plot(p_drag / 1000, z, '-', c=python_colors(3), label=r'$f_{drag}$')
+ ax.plot(p_tot / 1000, z, 'k-', label=r'$f_{tot}$')
+ ax.plot(p_inertia0 / 1000, z, '+', c=python_colors(0))
+ ax.plot(p_drag0 / 1000, z, '+', c=python_colors(3))
+ ax.plot(p_tot0 / 1000, z, 'k+')
+ ax.set_title(f't/T={Fraction(t / T)}')
+ if it == 0:
ax.legend()
- ax.plot(XLIM,[0,0],'k')
- ax.plot(XLIM,[a,a],'k--')
- ax.plot(XLIM,[-a,-a],'k--')
-
+ ax.plot(XLIM, [0, 0], 'k')
+ ax.plot(XLIM, [a, a], 'k--')
+ ax.plot(XLIM, [-a, -a], 'k--')
# Plot inline moment
- ax=axes2[int(it/4),np.mod(it,4)]
- ax.plot(dM/1000 ,z, 'k-', label = r'$dM_{tot}$ with Wheeler')
- ax.plot(dM0/1000 ,z, 'k+', label = r'$dM_{tot}$ no-correction')
- ax.set_title('t/T={}'.format(Fraction(t/T)))
- if it==0:
+ ax = axes2[int(it / 4), np.mod(it, 4)]
+ ax.plot(dM / 1000, z, 'k-', label=r'$dM_{tot}$ with Wheeler')
+ ax.plot(dM0 / 1000, z, 'k+', label=r'$dM_{tot}$ no-correction')
+ ax.set_title(f't/T={Fraction(t / T)}')
+ if it == 0:
ax.legend()
- ax.plot(XLIMM,[0,0],'k')
- ax.plot(XLIMM,[a,a],'k--')
- ax.plot(XLIMM,[-a,-a],'k--')
-
-
-axes1[0,0].set_xlim(XLIM)
-axes1[0,0].set_ylim([-h,a+1])
-axes1[0,0].set_ylabel('Depth z [m]')
-axes1[1,0].set_ylabel('Depth z [m]')
-axes1[1,0].set_xlabel('Inline force [kN/m]')
-axes1[1,1].set_xlabel('Inline force [kN/m]')
-axes1[1,2].set_xlabel('Inline force [kN/m]')
-axes1[1,3].set_xlabel('Inline force [kN/m]')
+ ax.plot(XLIMM, [0, 0], 'k')
+ ax.plot(XLIMM, [a, a], 'k--')
+ ax.plot(XLIMM, [-a, -a], 'k--')
+
+
+axes1[0, 0].set_xlim(XLIM)
+axes1[0, 0].set_ylim([-h, a + 1])
+axes1[0, 0].set_ylabel('Depth z [m]')
+axes1[1, 0].set_ylabel('Depth z [m]')
+axes1[1, 0].set_xlabel('Inline force [kN/m]')
+axes1[1, 1].set_xlabel('Inline force [kN/m]')
+axes1[1, 2].set_xlabel('Inline force [kN/m]')
+axes1[1, 3].set_xlabel('Inline force [kN/m]')
fig1.savefig('forces.png')
# fig1.savefig('forces.webp')
# fig1.show()
-axes2[0,0].set_xlim(XLIMM)
-axes2[0,0].set_ylim([-h,a+1])
-axes2[0,0].set_ylabel('Depth z [m]')
-axes2[1,0].set_ylabel('Depth z [m]')
-axes2[1,0].set_xlabel('Inline moment [kNm/m]')
-axes2[1,1].set_xlabel('Inline moment [kNm/m]')
-axes2[1,2].set_xlabel('Inline moment [kNm/m]')
-axes2[1,3].set_xlabel('Inline moment [kNm/m]')
+axes2[0, 0].set_xlim(XLIMM)
+axes2[0, 0].set_ylim([-h, a + 1])
+axes2[0, 0].set_ylabel('Depth z [m]')
+axes2[1, 0].set_ylabel('Depth z [m]')
+axes2[1, 0].set_xlabel('Inline moment [kNm/m]')
+axes2[1, 1].set_xlabel('Inline moment [kNm/m]')
+axes2[1, 2].set_xlabel('Inline moment [kNm/m]')
+axes2[1, 3].set_xlabel('Inline moment [kNm/m]')
fig2.savefig('moments.png')
# fig2.savefig('moments.webp')
@@ -127,68 +129,70 @@
# --------------------------------------------------------------------------------}
# --- Integrated force and sea bed moment over a period
# --------------------------------------------------------------------------------{
-time = np.linspace(0,60.0,6001)
+time = np.linspace(0, 60.0, 6001)
veta = np.zeros(time.shape)
-vF = np.zeros(time.shape)
-vM = np.zeros(time.shape)
-vF0 = np.zeros(time.shape)
-vM0 = np.zeros(time.shape)
+vF = np.zeros(time.shape) # noqa: N816
+vM = np.zeros(time.shape) # noqa: N816
+vF0 = np.zeros(time.shape) # noqa: N816
+vM0 = np.zeros(time.shape) # noqa: N816
-XLIM =[-75,75] # For inline force
-XLIMM=[-2500,2500] # For inline moment
+XLIM = [-75, 75] # For inline force
+XLIMM = [-2500, 2500] # For inline moment
# a=6 # NOTE: increased amplitude here to see Effect of Wheeler
-elevation = np.zeros((len(time),nz))
-velocity = np.zeros((len(time),nz))
-accel = np.zeros((len(time),nz))
-force = np.zeros((len(time),nz))
+elevation = np.zeros((len(time), nz))
+velocity = np.zeros((len(time), nz))
+accel = np.zeros((len(time), nz))
+force = np.zeros((len(time), nz))
for it, t in enumerate(time):
# Wave kinematics
- veta[it] = elevation2d(a, f, k, eps, t, x = 0)
- z = np.linspace(-h, veta[it], nz)
- u, du = kinematics2d(a, f, k, eps, h, t, z, Wheeler = True, eta = veta[it])
- u0, du0 = kinematics2d(a, f, k, eps, h, t, z)
+ veta[it] = elevation2d(a, f, k, eps, t, x=0) # noqa: F405
+ z = np.linspace(-h, veta[it], nz)
+ u, du = kinematics2d(a, f, k, eps, h, t, z, Wheeler=True, eta=veta[it]) # noqa: F405
+ u0, du0 = kinematics2d(a, f, k, eps, h, t, z) # noqa: F405
# Wave loads with Wheeler
- p_tot = inline_load(u, du, D, CD , CM , rho)
- vF[it] = np.trapz(p_tot , z) # [N]
- vM[it] = np.trapz(p_tot * (z-z_ref), z) # [Nm]
+ p_tot = inline_load(u, du, D, CD, CM, rho) # noqa: F405
+ vF[it] = np.trapz(p_tot, z) # [N] # noqa: NPY201
+ vM[it] = np.trapz(p_tot * (z - z_ref), z) # [Nm] # noqa: NPY201
# Wave loads without Wheeler
- p_tot0 = inline_load(u0, du0, D, CD, CM, rho)
- vF0[it] = np.trapz(p_tot0 , z) # [N]
- vM0[it] = np.trapz(p_tot0 * (z-z_ref), z) # [Nm]
-
- elevation[it,:] = z.copy()
- velocity[it,:] = u.copy()
- accel[it,:] = du.copy()
- force[it,:] = p_tot.copy()
+ p_tot0 = inline_load(u0, du0, D, CD, CM, rho) # noqa: F405
+ vF0[it] = np.trapz(p_tot0, z) # [N] # noqa: NPY201
+ vM0[it] = np.trapz(p_tot0 * (z - z_ref), z) # [Nm] # noqa: NPY201
+
+ elevation[it, :] = z.copy()
+ velocity[it, :] = u.copy()
+ accel[it, :] = du.copy()
+ force[it, :] = p_tot.copy()
# Plot
-fig, axes = plt.subplots(3, 1, sharex=True, figsize=(6.4,4.8)) # (6.4,4.8)
-fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.14, wspace=0.20)
+fig, axes = plt.subplots(3, 1, sharex=True, figsize=(6.4, 4.8)) # (6.4,4.8)
+fig.subplots_adjust(
+ left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.14, wspace=0.20
+)
# axes[0] = axes[0]
-axes[0].plot(time/T, veta,'k-')
+axes[0].plot(time / T, veta, 'k-')
axes[0].set_ylabel('Elevation [m]')
-axes[0].grid(True)
+axes[0].grid(True) # noqa: FBT003
# axes[1] = axes[1]
-axes[1].plot(time/T, vF0/1e6 , label='Standard')
-axes[1].plot(time/T, vF /1e6,'k-', label='Wheeler Correction')
+axes[1].plot(time / T, vF0 / 1e6, label='Standard')
+axes[1].plot(time / T, vF / 1e6, 'k-', label='Wheeler Correction')
axes[1].set_ylabel('Streamwise Load, Cumulative [MN]')
axes[1].legend()
-axes[1].grid(True)
+axes[1].grid(True) # noqa: FBT003
# axes[2] = axes[2]
-axes[2].plot(time/T, vM0/1e6 , label='Standard')
-axes[2].plot(time/T, vM /1e6,'k-', label='Wheeler Correction')
+axes[2].plot(time / T, vM0 / 1e6, label='Standard')
+axes[2].plot(time / T, vM / 1e6, 'k-', label='Wheeler Correction')
axes[2].set_ylabel('Sea-Bed Moment [MNm]')
axes[2].set_xlabel('Dimensionless Time, t/T [-]')
axes[2].legend()
-axes[2].grid(True)
+axes[2].grid(True) # noqa: FBT003
# fig.savefig('IntegratedPileLoads.png')
# fig.savefig('IntegratedPileLoads.webp')
fig.savefig('IntegratedPileLoads.png')
-#fig.show()
+# fig.show()
# now save csv of the velocity, acceleration, force, and moment
veta_df = pd.DataFrame()
@@ -196,70 +200,103 @@
du_df = pd.DataFrame()
for i in range(nz):
dof = 1
- name = 'Disp_' + str(i+1) + '_' + str(dof)
- veta_df[name] = elevation[:,i]
- name = 'Vel_' + str(i+1) + '_' + str(dof)
- u_df[name] = velocity[:,i]
- name = 'RMSA_' + str(i+1) + '_' + str(dof)
- du_df[name] = accel[:,i]
-
+ name = 'Disp_' + str(i + 1) + '_' + str(dof)
+ veta_df[name] = elevation[:, i]
+ name = 'Vel_' + str(i + 1) + '_' + str(dof)
+ u_df[name] = velocity[:, i]
+ name = 'RMSA_' + str(i + 1) + '_' + str(dof)
+ du_df[name] = accel[:, i]
+
# transpose the dataframe so one recorder occupies a row, not a column (which are timesteps)
# veta_df = veta_df.T
# u_df = u_df.T
# du_df = du_df.T
-
+
# add column per each force recorder
result_df = pd.DataFrame()
for i in range(nz):
dof = 1
# name = 'Node_' + str(i+1) + '_' + str(dof)
- name = 'Force_' + str(i+1) + '_' + str(dof)
- result_df[name] = force[:,i]
+ name = 'Force_' + str(i + 1) + '_' + str(dof)
+ result_df[name] = force[:, i]
# transpose the dataframe
# result_df = result_df.T
-
+
# make sure there are no headers or indices
-
+
# write columns to columns in csv files
(veta_df.T).to_csv('disp.evt', sep=' ', encoding='utf-8', index=False, header=False)
(u_df.T).to_csv('vel.evt', sep=' ', encoding='utf-8', index=False, header=False)
(du_df.T).to_csv('accel.evt', sep=' ', encoding='utf-8', index=False, header=False)
-(result_df.T).to_csv('forces.evt', sep=' ', encoding='utf-8', index=False, header=False)
+(result_df.T).to_csv(
+ 'forces.evt', sep=' ', encoding='utf-8', index=False, header=False
+)
# write columns to columns in csv files
(veta_df.T).to_csv('disp.out', sep=' ', encoding='utf-8', index=False, header=False)
(u_df.T).to_csv('vel.out', sep=' ', encoding='utf-8', index=False, header=False)
(du_df.T).to_csv('accel.out', sep=' ', encoding='utf-8', index=False, header=False)
-(result_df.T).to_csv('forces.out', sep=' ', encoding='utf-8', index=False, header=False)
-(result_df.T).to_csv('node.out', sep=' ', encoding='utf-8', index=False, header=False)
+(result_df.T).to_csv(
+ 'forces.out', sep=' ', encoding='utf-8', index=False, header=False
+)
+(result_df.T).to_csv(
+ 'node.out', sep=' ', encoding='utf-8', index=False, header=False
+)
# make results.out dataframe with 3 columns and one row, no header. Each element is separated by a space
# results_df = pd.DataFrame({'total_impulse':vF[-1], 'max_force':vM[-1], 'total_disp':vF0[-1]}, index=[0])
# results_df.to_csv('results.out', sep=' ', encoding='utf-8', header=False, index=False)
-
-def main(df=None):
+def main(df=None): # noqa: D103
return df
-if __name__ == '__main__':
-
- parser = argparse.ArgumentParser(description='Compute inline/total hydrodynamic force and moments on a monopile using Morisons equation')
- parser.add_argument('-hw', '--water_depth', type=float, default=30.0, help='Water depth [m]')
- parser.add_argument('-Tp', '--peak_period', type=float, default=12.7, help='Wave period [s]')
- parser.add_argument('-Hs', '--significant_wave_height', type=float, default=5.0, help='Significant wave height [m]')
- parser.add_argument('-Dp', '--pile_diameter', type=float, default=1.0, help='Monopile diameter [m]')
- parser.add_argument('-Cd', '--drag_coefficient', type=float, default=2.1, help='Drag coefficient')
- parser.add_argument('-Cm', '--mass_coefficient', type=float, default=2.0, help='Mass coefficient')
- parser.add_argument('-nz', '--number_of_recorders_z', type=int, default=4, help='Number of points used in the z direction to compute loads')
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='Compute inline/total hydrodynamic force and moments on a monopile using Morisons equation'
+ )
+
+ parser.add_argument(
+ '-hw', '--water_depth', type=float, default=30.0, help='Water depth [m]'
+ )
+ parser.add_argument(
+ '-Tp', '--peak_period', type=float, default=12.7, help='Wave period [s]'
+ )
+ parser.add_argument(
+ '-Hs',
+ '--significant_wave_height',
+ type=float,
+ default=5.0,
+ help='Significant wave height [m]',
+ )
+ parser.add_argument(
+ '-Dp',
+ '--pile_diameter',
+ type=float,
+ default=1.0,
+ help='Monopile diameter [m]',
+ )
+ parser.add_argument(
+ '-Cd', '--drag_coefficient', type=float, default=2.1, help='Drag coefficient'
+ )
+ parser.add_argument(
+ '-Cm', '--mass_coefficient', type=float, default=2.0, help='Mass coefficient'
+ )
+ parser.add_argument(
+ '-nz',
+ '--number_of_recorders_z',
+ type=int,
+ default=4,
+ help='Number of points used in the z direction to compute loads',
+ )
parser.add_argument('-t', '--time', type=float, default=1.0, help='Time [s]')
-
+
arguments, unknowns = parser.parse_known_args()
-
+
# hw = arguments.water_depth
# Tp = arguments.peak_period
# Hs = arguments.significant_wave_height
@@ -268,7 +305,7 @@ def main(df=None):
# # CM = arguments.mass_coefficient
# nz = arguments.number_of_recorders_z
# t = arguments.time
-
+
# # --- Derived parameters
# h = hw
# # T = Tp
@@ -279,27 +316,23 @@ def main(df=None):
# z_ref = -h # reference point for moment calculation
# eps = 0 # phase shift [rad]
# rho = 1000 # water density
-
+
# --- Wave kinematics
-
-
+
# fig.show()
-
+
# --------------------------------------------------------------------------------}
-
-
# plt.suptitle('Hydro - Morison loads on monopile')
# plt.savefig('MorisonLoads.png')
# plt.show()
- print('End of __main__ in Ex4_WaveLoads.py')
+ print('End of __main__ in Ex4_WaveLoads.py') # noqa: T201
main()
-
-if __name__=="__test__":
+if __name__ == '__test__':
pass
if __name__ == '__export__':
@@ -307,5 +340,5 @@ def main(df=None):
# plt.close(fig2)
# plt.close(fig)
from welib.tools.repo import export_figs_callback
- export_figs_callback(__file__)
+ export_figs_callback(__file__)
diff --git a/modules/createEVENT/stochasticWave/StochasticWave.py b/modules/createEVENT/stochasticWave/StochasticWave.py
index cde9264b9..4c22a4d03 100644
--- a/modules/createEVENT/stochasticWave/StochasticWave.py
+++ b/modules/createEVENT/stochasticWave/StochasticWave.py
@@ -1,32 +1,22 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3 # noqa: CPY001, D100, EXE001
-from __future__ import print_function
-import os, sys
-import re
-import json
import argparse
+import json
+import re
-from fractions import Fraction
-import numpy as np
-import pandas as pd
-import matplotlib.pyplot as plt
-import matplotlib as mpl
-
-from welib.tools.figure import defaultRC; defaultRC();
-from welib.tools.colors import python_colors
-from welib.hydro.wavekin import *
-from welib.hydro.morison import *
+from welib.tools.figure import defaultRC
+defaultRC()
+from welib.hydro.morison import * # noqa: E402, F403
+from welib.hydro.wavekin import * # noqa: E402, F403
-import Ex1_WaveKinematics
-import Ex2_Jonswap_spectrum
-import Ex3_WaveTimeSeries
-import Ex4_WaveLoads
-class FloorForces:
- def __init__(self, recorderID=-1):
+class FloorForces: # noqa: D101
+ def __init__(self, recorderID=-1): # noqa: N803
if recorderID < 0:
- print("No recorder ID, or a negative ID, provided, defaulting to 0 for all forces.")
+ print( # noqa: T201
+ 'No recorder ID, or a negative ID, provided, defaulting to 0 for all forces.'
+ )
self.X = [0.0]
self.Y = [0.0]
self.Z = [0.0]
@@ -35,28 +25,28 @@ def __init__(self, recorderID=-1):
self.Y = []
self.Z = []
# prepend zeros to the list to account for the timeSeries transient analysis req in OpenSees
- prependZero = False
+ prependZero = False # noqa: N806
if prependZero:
- self.X.append(float(0.0))
- self.Y.append(float(0.0))
- self.Z.append(float(0.0))
-
+ self.X.append(0.0)
+ self.Y.append(0.0)
+ self.Z.append(0.0)
+
# Read in forces.[out or evt] file and add to EVENT.json
- # now using intermediary forces.evt for output of preceeding Python calcs,
+ # now using intermediary forces.evt for output of preceding Python calcs,
# prevents confusion with forces.out made by FEM tab
- with open("forces.evt", "r") as file:
- print("Reading forces from forces.evt to EVENT.json")
+ with open('forces.evt') as file: # noqa: PLW1514, PTH123
+ print('Reading forces from forces.evt to EVENT.json') # noqa: T201
lines = file.readlines()
j = 0
for line in lines:
# Ensure not empty line
strip_line = line.strip()
if not strip_line:
- print('Empty line found in forces.evt... skip')
+ print('Empty line found in forces.evt... skip') # noqa: T201
continue
# Assume there is no header in the file
# Assume recorder IDs are sequential, starting from 1
- if ((j+1) == recorderID):
+ if (j + 1) == recorderID:
# Strip away leading / trailing white-space,
# Delimit by regex to capture " ", \s, " ", tabs, etc.
# Each value should be a number, rep. the force on recorder j at a time-step i
@@ -67,142 +57,145 @@ def __init__(self, recorderID=-1):
for k in range(len(clean_line)):
self.X.append(float(clean_line[k]))
- self.Y.append(float(0.0))
- self.Z.append(float(0.0))
- j = j + 1
-
+ self.Y.append(0.0)
+ self.Z.append(0.0)
+ j = j + 1 # noqa: PLR6104
+
# must not have empty lists for max and min
if len(self.X) == 0:
- print("No forces found in the file for recorder ", recorderID, ", defaulting to 0.0 for all forces.")
+ print( # noqa: T201
+ 'No forces found in the file for recorder ',
+ recorderID,
+ ', defaulting to 0.0 for all forces.',
+ )
self.X = [0.0]
self.Y = [0.0]
self.Z = [0.0]
else:
# for a timeSeries with N elements, we append an element at N+1 to represent the max force of the series
- self.X.append( max(self.X) )
- self.Y.append( max(self.Y) )
- self.Z.append( max(self.Z) )
-
- print("Length: " , len(self.X), ", Max force: ", max(self.X), max(self.Y), max(self.Z), ", Min force: ", min(self.X), min(self.Y), min(self.Z), ", Last force: ", self.X[-1], self.Y[-1], self.Z[-1])
+ self.X.append(max(self.X))
+ self.Y.append(max(self.Y))
+ self.Z.append(max(self.Z))
+
+ print( # noqa: T201
+ 'Length: ',
+ len(self.X),
+ ', Max force: ',
+ max(self.X),
+ max(self.Y),
+ max(self.Z),
+ ', Min force: ',
+ min(self.X),
+ min(self.Y),
+ min(self.Z),
+ ', Last force: ',
+ self.X[-1],
+ self.Y[-1],
+ self.Z[-1],
+ )
file.close()
-def directionToDof(direction):
- """
- Converts direction to degree of freedom
- """
- directioMap = {
- "X": 1,
- "Y": 2,
- "Z": 3
- }
+
+def directionToDof(direction): # noqa: N802
+ """Converts direction to degree of freedom""" # noqa: D400, D401
+ directioMap = {'X': 1, 'Y': 2, 'Z': 3} # noqa: N806
return directioMap[direction]
-def addFloorForceToEvent(patternsList, timeSeriesList, force, direction, floor):
- """
- Add force (one component) time series and pattern in the event file
+def addFloorForceToEvent(patternsList, timeSeriesList, force, direction, floor): # noqa: N802, N803
+ """Add force (one component) time series and pattern in the event file
Use of Wind is just a placeholder for now, since its more developed than Hydro
- """
-
- seriesName = "1"
- patternName = "1"
- seriesName = "WindForceSeries_" + str(floor) + direction
- patternName = "WindForcePattern_" + str(floor) + direction
-
+ """ # noqa: D205, D400
+ seriesName = '1' # noqa: N806
+ patternName = '1' # noqa: N806
+ seriesName = 'WindForceSeries_' + str(floor) + direction # noqa: N806
+ patternName = 'WindForcePattern_' + str(floor) + direction # noqa: N806
+
pattern = {
- "name": patternName,
- "timeSeries": seriesName,
- "numSteps": len(force.X),
- "dT": 0.01,
- "type": "WindFloorLoad",
- "floor": str(floor),
- "story": str(floor),
- "dof": directionToDof(direction),
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
+ 'name': patternName,
+ 'timeSeries': seriesName,
+ 'numSteps': len(force.X),
+ 'dT': 0.01,
+ 'type': 'WindFloorLoad',
+ 'floor': str(floor),
+ 'story': str(floor),
+ 'dof': directionToDof(direction),
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
}
- sensorData = {
- "name": seriesName,
- "pattern": patternName,
- "type": "Value",
- "dof": directionToDof(direction),
- "floor": str(floor),
- "story": str(floor),
- "dT": 0.01,
- "dt": 0.01,
- "numSteps": len(force.X),
- "data": force.X
+ sensorData = { # noqa: N806
+ 'name': seriesName,
+ 'pattern': patternName,
+ 'type': 'Value',
+ 'dof': directionToDof(direction),
+ 'floor': str(floor),
+ 'story': str(floor),
+ 'dT': 0.01,
+ 'dt': 0.01,
+ 'numSteps': len(force.X),
+ 'data': force.X,
}
-
+
patternsList.append(pattern)
timeSeriesList.append(sensorData)
-
-def writeEVENT(forces, eventFilePath="EVENT.json", floorsCount=1):
- """
- This method writes the EVENT.json file
- """
- #Adding floor forces
- patternsArray = []
- timeSeriesArray = []
+def writeEVENT(forces, eventFilePath='EVENT.json', floorsCount=1): # noqa: N802, N803
+ """This method writes the EVENT.json file""" # noqa: D400, D401, D404
+ # Adding floor forces
+ patternsArray = [] # noqa: N806
+ timeSeriesArray = [] # noqa: N806
# timeSeriesType = "Value" # ? saw in old evt files
-
+
# pressure = [{"pressure": [0.0, 0.0], "story": 1}]
pressure = []
for it in range(floorsCount):
- floorForces = forces[it]
- addFloorForceToEvent(patternsArray, timeSeriesArray, floorForces, "X", it+1)
+ floorForces = forces[it] # noqa: N806
+ addFloorForceToEvent(
+ patternsArray, timeSeriesArray, floorForces, 'X', it + 1
+ )
# subtype = "StochasticWindModel-KwonKareem2006"
- eventClassification = "Hydro"
- eventType = "StochasticWave"
- eventSubtype = "StochasticWaveJonswap"
+ eventClassification = 'Hydro' # noqa: N806
+ eventType = 'StochasticWave' # noqa: N806
+ eventSubtype = 'StochasticWaveJonswap' # noqa: N806, F841
# subtype = "StochasticWaveJonswap" # ?
# timeSeriesName = "HydroForceSeries_1X"
# patternName = "HydroForcePattern_1X"
-
- hydroEventJson = {
- "type" : eventClassification,
- "subtype": eventType,
- "eventClassification": eventClassification,
- "pattern": patternsArray,
- "timeSeries": timeSeriesArray,
- "pressure": pressure,
- "numSteps": len(forces[0].X),
- "dT": 0.01,
- "dt": 0.01,
- "units": {
- "force": "Newton",
- "length": "Meter",
- "time": "Sec"
- }
- }
- #Creating the event dictionary that will be used to export the EVENT json file
- eventDict = {"randomVariables":[], "Events": [hydroEventJson]}
+ hydroEventJson = { # noqa: N806
+ 'type': eventClassification,
+ 'subtype': eventType,
+ 'eventClassification': eventClassification,
+ 'pattern': patternsArray,
+ 'timeSeries': timeSeriesArray,
+ 'pressure': pressure,
+ 'numSteps': len(forces[0].X),
+ 'dT': 0.01,
+ 'dt': 0.01,
+ 'units': {'force': 'Newton', 'length': 'Meter', 'time': 'Sec'},
+ }
+ # Creating the event dictionary that will be used to export the EVENT json file
+ eventDict = {'randomVariables': [], 'Events': [hydroEventJson]} # noqa: N806
- filePath = eventFilePath
- with open(filePath, "w", encoding='utf-8') as file:
+ filePath = eventFilePath # noqa: N806
+ with open(filePath, 'w', encoding='utf-8') as file: # noqa: PTH123
json.dump(eventDict, file)
file.close()
-
-
-def GetFloorsCount(BIMFilePath):
- filePath = BIMFilePath
- with open(filePath,'r', encoding='utf-8') as file:
- bim = json.load(file)
- file.close
-
- return int(bim["GeneralInformation"]["stories"])
-
-def main():
+
+
+def GetFloorsCount(BIMFilePath): # noqa: N802, N803, D103
+ filePath = BIMFilePath # noqa: N806
+ with open(filePath, encoding='utf-8') as file: # noqa: PTH123
+ bim = json.load(file)
+ file.close # noqa: B018
+
+ return int(bim['GeneralInformation']['stories'])
+
+
+def main(): # noqa: D103
return 0
# """
# Entry point to generate event file using Stochastic Waves
@@ -221,12 +214,10 @@ def main():
# exec(open("Ex3_WaveTimeSeries.py").read())
# # exec(open("Ex4_WaveLoads.py").read())
-
# # Run Ex4_WaveLoads.py with the given parameters
# # result = Ex4_WaveLoads.main(arguments.water_depth, arguments.peak_period, arguments.significant_wave_height, arguments.pile_diameter, arguments.drag_coefficient, arguments.mass_coefficient, arguments.number_of_recorders_z, arguments.time)
# import subprocess
- # result = subprocess.run(["python", "Ex4_WaveLoads.py", "-hw", 30.0, "-Tp", 12.7, "-Hs", 5.0, "-Dp", 1.0, "-Cd", 2.1, "-Cm", 2.1, "-nz", GetFloorsCount(arguments.filenameAIM), "-t", 10.0], stdout=subprocess.PIPE)
-
+ # result = subprocess.run(["python", "Ex4_WaveLoads.py", "-hw", 30.0, "-Tp", 12.7, "-Hs", 5.0, "-Dp", 1.0, "-Cd", 2.1, "-Cm", 2.1, "-nz", GetFloorsCount(arguments.filenameAIM), "-t", 10.0], stdout=subprocess.PIPE)
# if arguments.getRV == True:
# #Read the number of floors
@@ -237,63 +228,69 @@ def main():
# #write the event file
# writeEVENT(forces, arguments.filenameEVENT)
-
-
-if __name__ == "__main__":
+
+
+if __name__ == '__main__':
"""
Entry point to generate event file using Stochastic Waves
"""
- #CLI parser
- parser = argparse.ArgumentParser(description="Get sample EVENT file produced by StochasticWave")
- parser.add_argument('-b', '--filenameAIM', help="BIM File", required=True, default="AIM.json")
- parser.add_argument('-e', '--filenameEVENT', help= "Event File", required=True, default="EVENT.json")
- parser.add_argument('--getRV', help= "getRV", required=False, action='store_true')
+ # CLI parser
+ parser = argparse.ArgumentParser(
+ description='Get sample EVENT file produced by StochasticWave'
+ )
+ parser.add_argument(
+ '-b', '--filenameAIM', help='BIM File', required=True, default='AIM.json'
+ )
+ parser.add_argument(
+ '-e',
+ '--filenameEVENT',
+ help='Event File',
+ required=True,
+ default='EVENT.json',
+ )
+ parser.add_argument('--getRV', help='getRV', required=False, action='store_true')
# parser.add_argument('--filenameSAM', default=None)
- #parsing arguments
+ # parsing arguments
arguments, unknowns = parser.parse_known_args()
# Run Ex4_WaveLoads.py with the given parameters
# result = Ex4_WaveLoads.main(arguments.water_depth, arguments.peak_period, arguments.significant_wave_height, arguments.pile_diameter, arguments.drag_coefficient, arguments.mass_coefficient, arguments.number_of_recorders_z, arguments.time)
# import subprocess
- # result = subprocess.run(["python", f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex4_WaveLoads.py", "-hw", 30.0, "-Tp", 12.7, "-Hs", 5.0, "-Dp", 1.0, "-Cd", 2.1, "-Cm", 2.1, "-nz", floorsCount, "-t", 10.0], stdout=subprocess.PIPE)
+ # result = subprocess.run(["python", f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex4_WaveLoads.py", "-hw", 30.0, "-Tp", 12.7, "-Hs", 5.0, "-Dp", 1.0, "-Cd", 2.1, "-Cm", 2.1, "-nz", floorsCount, "-t", 10.0], stdout=subprocess.PIPE)
+ if arguments.getRV == True: # noqa: E712
+ print('RVs requested in StochasticWave.py') # noqa: T201
+ # Read the number of floors
+ floorsCount = GetFloorsCount(arguments.filenameAIM) # noqa: N816
+ filenameEVENT = arguments.filenameEVENT # noqa: N816
- if arguments.getRV == True:
- print("RVs requested in StochasticWave.py")
- #Read the number of floors
- floorsCount = GetFloorsCount(arguments.filenameAIM)
- filenameEVENT = arguments.filenameEVENT
-
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex1_WaveKinematics.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex2_Jonswap_spectrum.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex3_WaveTimeSeries.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex4_WaveLoads.py").read())
-
+
forces = []
for i in range(floorsCount):
- forces.append(FloorForces(recorderID=(i+1)))
+ forces.append(FloorForces(recorderID=(i + 1))) # noqa: PERF401
- #write the event file
+ # write the event file
writeEVENT(forces, filenameEVENT, floorsCount)
-
+
else:
- print("No RVs requested in StochasticWave.py")
- filenameEVENT = arguments.filenameEVENT
+ print('No RVs requested in StochasticWave.py') # noqa: T201
+ filenameEVENT = arguments.filenameEVENT # noqa: N816
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex1_WaveKinematics.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex2_Jonswap_spectrum.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex3_WaveTimeSeries.py").read())
# exec(open(f"{os.path.realpath(os.path.dirname(__file__))}"+"/Ex4_WaveLoads.py").read())
-
+
forces = []
- floorsCount = 1
+ floorsCount = 1 # noqa: N816
for i in range(floorsCount):
- forces.append(FloorForces(recorderID=(i+1)))
-
- #write the event file
+ forces.append(FloorForces(recorderID=(i + 1)))
+
+ # write the event file
writeEVENT(forces, filenameEVENT, floorsCount=floorsCount)
# writeEVENT(forces, arguments.filenameEVENT)
-
-
-
diff --git a/modules/createEVENT/stochasticWind/command_parser.cpp b/modules/createEVENT/stochasticWind/command_parser.cpp
index 1a2da3a09..53eaac02e 100644
--- a/modules/createEVENT/stochasticWind/command_parser.cpp
+++ b/modules/createEVENT/stochasticWind/command_parser.cpp
@@ -28,13 +28,13 @@ CommandParser::CommandParser(int& number_of_arguments, char* arguments[]) {
auto result = command_parser_.parse(clara::detail::Args(number_of_arguments, arguments));
- // Check whether command line was succussfully parsed
+ // Check whether command line was successfully parsed
if (!result) {
std::cerr << "ERROR: In command line inputs: " << result.errorMessage() << std::endl;
throw std::invalid_argument("ERROR: In CommandParser::CommandParser: Command line inputs error");
}
- // If help flag passed, pring usage and exit
+ // If help flag passed, print usage and exit
if (configuration_.help) {
std::cout << command_parser_ << std::endl;
}
diff --git a/modules/createEVENT/stochasticWind/command_parser.h b/modules/createEVENT/stochasticWind/command_parser.h
index 36acd85d0..54f9ef84c 100644
--- a/modules/createEVENT/stochasticWind/command_parser.h
+++ b/modules/createEVENT/stochasticWind/command_parser.h
@@ -12,7 +12,7 @@
class CommandParser {
public:
/**
- * @constructor Default contstructor
+ * @constructor Default constructor
*/
CommandParser() = default;
diff --git a/modules/createEVENT/uniformPEER/gridGroundMoion.py b/modules/createEVENT/uniformPEER/gridGroundMoion.py
index d3257dab4..df6e5e497 100644
--- a/modules/createEVENT/uniformPEER/gridGroundMoion.py
+++ b/modules/createEVENT/uniformPEER/gridGroundMoion.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2021 Leland Stanford Junior University
# Copyright (c) 2021 The Regents of the University of California
#
@@ -44,40 +43,37 @@
#
-#TODO recommended ranges???
+# TODO recommended ranges??? # noqa: TD002, TD004
+# import matplotlib.pyplot as plt
+# import matplotlib.ticker as mticker
+# from matplotlib.colors import LinearSegmentedColormap
import json
-import sys
-import numpy as np
-from scipy.stats import qmc
-from scipy.stats import gmean
-from scipy.spatial import distance_matrix
import os
+import sys
-#import matplotlib.pyplot as plt
-#import matplotlib.ticker as mticker
-#from matplotlib.colors import LinearSegmentedColormap
-import itertools
-import pandas as pd # later clear packages
+import numpy as np
import plotly.express as px
+from scipy.spatial import distance_matrix
+from scipy.stats import gmean, qmc
-def main(inputArgs,err):
- gms = gmCluster(inputArgs,err)
+def main(inputArgs, err): # noqa: N803, D103
+ gms = gmCluster(inputArgs, err) # noqa: F841
-class gmCluster():
- def __init__(self, inputArgs,err):
+class gmCluster: # noqa: D101
+ def __init__(self, inputArgs, err): # noqa: ARG002, C901, N803, PLR0912, PLR0914, PLR0915
np.random.seed(seed=42)
- curDir = os.path.dirname(__file__)
- gmDataBaseDir = os.path.join(curDir,"gmdata.json")
- inputJsonPath = inputArgs[1]
+ curDir = os.path.dirname(__file__) # noqa: PTH120, N806
+ gmDataBaseDir = os.path.join(curDir, 'gmdata.json') # noqa: PTH118, N806
+ inputJsonPath = inputArgs[1] # noqa: N806
- with open(inputJsonPath,'r') as fj:
- inputJson = json.load(fj)
+ with open(inputJsonPath) as fj: # noqa: PLW1514, PTH123
+ inputJson = json.load(fj) # noqa: N806
- nim = len(inputJson["IM"])
+ nim = len(inputJson['IM'])
im_ub = np.zeros((nim,))
im_lb = np.zeros((nim,))
@@ -85,149 +81,191 @@ def __init__(self, inputArgs,err):
im_names = []
im_periods = []
i = 0
- for imName, value in inputJson["IM"].items():
+ for imName, value in inputJson['IM'].items(): # noqa: N806
im_names += [imName]
- im_ub[i] = float(value["upperBound"])
- im_lb[i] = float(value["lowerBound"])
- im_nbins[i] = int(value["numBins"])
- im_periods += [value["Periods"]]
-
- if not (im_ub[i]>im_lb[i]):
- msg = "error parsing IMs: lowerbound of " + imName + " should be smaller than upperbound"
- print(msg)
- print(im_ub[i])
- print(im_lb[i])
+ im_ub[i] = float(value['upperBound'])
+ im_lb[i] = float(value['lowerBound'])
+ im_nbins[i] = int(value['numBins'])
+ im_periods += [value['Periods']]
+
+ if not (im_ub[i] > im_lb[i]):
+ msg = (
+ 'error parsing IMs: lowerbound of '
+ + imName
+ + ' should be smaller than upperbound'
+ )
+ print(msg) # noqa: T201
+ print(im_ub[i]) # noqa: T201
+ print(im_lb[i]) # noqa: T201
errf.write(msg)
errf.close()
- exit(-1)
-
- i +=1
+ exit(-1) # noqa: PLR1722
+
+ i += 1 # noqa: SIM113
- npergrid = int(inputJson["numSampPerBin"])
+ npergrid = int(inputJson['numSampPerBin'])
- # TODO: Convert the units... Or fix the units......
+ # TODO: Convert the units... Or fix the units...... # noqa: TD002
- #nim = len(im_names)
+ # nim = len(im_names)
ngrid = np.prod(im_nbins)
#
# Clustring parameters
#
- numEQmax = int(max(1,round(ngrid/10))) # Maximum number of records from the single earthquake
- #numEQmax = 1
+ numEQmax = int( # noqa: N806
+ max(1, round(ngrid / 10))
+ ) # Maximum number of records from the single earthquake
+ # numEQmax = 1
#
# Get grid of IMs - change everything to log-space
#
log_im_ub = np.log(im_ub)
log_im_lb = np.log(im_lb)
- log_im_range = log_im_ub-log_im_lb
+ log_im_range = log_im_ub - log_im_lb
# X is log-IM
id_im_scaling_ancher = -1
found_scaling_anchor = False
nim_eff = nim
- ## For the scaling anchor, we prioritize PSA and PGA
+ # For the scaling anchor, we prioritize PSA and PGA
for ni in range(len(im_names)):
- if (im_names[ni].startswith("PSA") or im_names[ni].startswith("PGA")):
+ if im_names[ni].startswith('PSA') or im_names[ni].startswith('PGA'):
# scaling anchor
if not found_scaling_anchor:
- id_im_scaling_ancher = ni # TODO
+ id_im_scaling_ancher = ni # TODO # noqa: TD002, TD004
found_scaling_anchor = True
- nim_eff = nim-1
+ nim_eff = nim - 1
- ## Only if we didn't find PSA or PGA, we consider PGV, PGD, Ia as scaling anchor
+ # Only if we didn't find PSA or PGA, we consider PGV, PGD, Ia as scaling anchor
if not found_scaling_anchor:
for ni in range(len(im_names)):
- if (im_names[ni].startswith("PG") or im_names[ni].startswith("Ia")):
+ if im_names[ni].startswith('PG') or im_names[ni].startswith('Ia'):
if not found_scaling_anchor:
- id_im_scaling_ancher = ni # TODO
+ id_im_scaling_ancher = ni # TODO # noqa: TD002, TD004
found_scaling_anchor = True
nim_eff = nim - 1
- if nim<=0:
+ if nim <= 0:
# ERROR
- msg = "number of IMs should be greater than 1"
- print(msg)
+ msg = 'number of IMs should be greater than 1'
+ print(msg) # noqa: T201
errf.write(msg)
errf.close()
- exit(-1)
+ exit(-1) # noqa: PLR1722
- elif nim_eff ==0:
+ elif nim_eff == 0:
# One variable we have is the scaling anchor
- myID = [1]
- Scaling_ref = np.linspace(log_im_lb[0], log_im_ub[0], int(im_nbins[0]))
- IM_log_ref = np.zeros(0); # dummy
- isGrid = True
+ myID = [1] # noqa: N806
+ Scaling_ref = np.linspace(log_im_lb[0], log_im_ub[0], int(im_nbins[0])) # noqa: N806
+ IM_log_ref = np.zeros(0) # dummy # noqa: N806
+ isGrid = True # noqa: N806
- elif nim_eff ==1:
+ elif nim_eff == 1:
if found_scaling_anchor:
if found_scaling_anchor:
- myID = np.delete([0, 1], id_im_scaling_ancher)
- Scaling_ref = np.linspace(log_im_lb[id_im_scaling_ancher], log_im_ub[id_im_scaling_ancher],
- int(im_nbins[id_im_scaling_ancher]))
+ myID = np.delete([0, 1], id_im_scaling_ancher) # noqa: N806
+ Scaling_ref = np.linspace( # noqa: N806
+ log_im_lb[id_im_scaling_ancher],
+ log_im_ub[id_im_scaling_ancher],
+ int(im_nbins[id_im_scaling_ancher]),
+ )
else:
- myID = [0]
- X = np.linspace(log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]]))
- IM_log_ref = X[np.newaxis].T
- isGrid = True
-
- elif nim_eff ==2:
+ myID = [0] # noqa: N806
+ X = np.linspace( # noqa: N806
+ log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]])
+ )
+ IM_log_ref = X[np.newaxis].T # noqa: N806
+ isGrid = True # noqa: N806
+
+ elif nim_eff == 2: # noqa: PLR2004
if found_scaling_anchor:
- myID = np.delete([0,1,2],id_im_scaling_ancher)
- Scaling_ref = np.linspace(log_im_lb[id_im_scaling_ancher], log_im_ub[id_im_scaling_ancher], int(im_nbins[id_im_scaling_ancher]))
+ myID = np.delete([0, 1, 2], id_im_scaling_ancher) # noqa: N806
+ Scaling_ref = np.linspace( # noqa: N806
+ log_im_lb[id_im_scaling_ancher],
+ log_im_ub[id_im_scaling_ancher],
+ int(im_nbins[id_im_scaling_ancher]),
+ )
else:
- myID = [0,1]
+ myID = [0, 1] # noqa: N806
- X,Y = np.meshgrid(np.linspace(log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]])), np.linspace(log_im_lb[myID[1]], log_im_ub[myID[1]], int(im_nbins[myID[1]])))
- IM_log_ref = np.vstack([X.reshape(-1), Y.reshape(-1)]).T
- isGrid = True
- elif nim_eff ==3:
+ X, Y = np.meshgrid( # noqa: N806
+ np.linspace(
+ log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]])
+ ),
+ np.linspace(
+ log_im_lb[myID[1]], log_im_ub[myID[1]], int(im_nbins[myID[1]])
+ ),
+ )
+ IM_log_ref = np.vstack([X.reshape(-1), Y.reshape(-1)]).T # noqa: N806
+ isGrid = True # noqa: N806
+ elif nim_eff == 3: # noqa: PLR2004
if found_scaling_anchor:
- myID = np.delete([0,1,2,3],id_im_scaling_ancher)
- Scaling_ref = np.linspace(log_im_lb[id_im_scaling_ancher], log_im_ub[id_im_scaling_ancher], int(im_nbins[id_im_scaling_ancher]))
+ myID = np.delete([0, 1, 2, 3], id_im_scaling_ancher) # noqa: N806
+ Scaling_ref = np.linspace( # noqa: N806
+ log_im_lb[id_im_scaling_ancher],
+ log_im_ub[id_im_scaling_ancher],
+ int(im_nbins[id_im_scaling_ancher]),
+ )
else:
- myID = [0,1,2]
+ myID = [0, 1, 2] # noqa: N806
- X,Y,Z = np.meshgrid(np.linspace(log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]])), np.linspace(log_im_lb[myID[1]], log_im_ub[myID[1]], int(im_nbins[myID[1]])), np.linspace(log_im_lb[myID[2]], log_im_ub[myID[2]], int(im_nbins[myID[2]])))
- IM_log_ref = np.vstack([X.reshape(-1), Y.reshape(-1), Z.reshape(-1)]).T
- isGrid = True
+ X, Y, Z = np.meshgrid( # noqa: N806
+ np.linspace(
+ log_im_lb[myID[0]], log_im_ub[myID[0]], int(im_nbins[myID[0]])
+ ),
+ np.linspace(
+ log_im_lb[myID[1]], log_im_ub[myID[1]], int(im_nbins[myID[1]])
+ ),
+ np.linspace(
+ log_im_lb[myID[2]], log_im_ub[myID[2]], int(im_nbins[myID[2]])
+ ),
+ )
+ IM_log_ref = np.vstack([X.reshape(-1), Y.reshape(-1), Z.reshape(-1)]).T # noqa: N806
+ isGrid = True # noqa: N806
else:
if found_scaling_anchor:
- myID = np.delete(range(nim_eff+1),id_im_scaling_ancher)
- Scaling_ref = np.linspace(log_im_lb[id_im_scaling_ancher], log_im_ub[id_im_scaling_ancher], int(im_nbins[id_im_scaling_ancher]))
+ myID = np.delete(range(nim_eff + 1), id_im_scaling_ancher) # noqa: N806
+ Scaling_ref = np.linspace( # noqa: N806
+ log_im_lb[id_im_scaling_ancher],
+ log_im_ub[id_im_scaling_ancher],
+ int(im_nbins[id_im_scaling_ancher]),
+ )
else:
- myID = range(nim_eff)
+ myID = range(nim_eff) # noqa: N806
# Let us do LHS sampling
- sampler= qmc.LatinHypercube(d=nim)
- U = sampler.random(n=ngrid)
- X= np.zeros((ngrid,nim_eff))
+ sampler = qmc.LatinHypercube(d=nim)
+ U = sampler.random(n=ngrid) # noqa: N806
+ X = np.zeros((ngrid, nim_eff)) # noqa: N806
for i in range(nim_eff):
- X[:,i] = U[:,i]*(log_im_ub[myID[i]]-log_im_lb[myID[i]]) + log_im_lb[myID[i]]
- IM_log_ref = X
- isGrid = False
-
+ X[:, i] = (
+ U[:, i] * (log_im_ub[myID[i]] - log_im_lb[myID[i]])
+ + log_im_lb[myID[i]]
+ )
+ IM_log_ref = X # noqa: N806
+ isGrid = False # noqa: N806
#
# Read Database
#
- with open(gmDataBaseDir,'r') as fd:
- gmData = json.load(fd)
-
- RSN = gmData["RSN"]
- geomPSA = gmData["geomPSA"]
- geomPGA = gmData["geomPGA"]
- geomPGV = gmData["geomPGV"]
- geomPGD = gmData["geomPGD"]
- geomDS575 = gmData["geomDS575"]
- geomDS595 = gmData["geomDS595"]
- geomIa = gmData["geomIa"]
-
- periods = gmData["period"]
- numgm = gmData["numgm"]
- eqnameID = gmData["eqnameID"]
- units = gmData["unit"]
+ with open(gmDataBaseDir) as fd: # noqa: PLW1514, PTH123
+ gmData = json.load(fd) # noqa: N806
+
+ RSN = gmData['RSN'] # noqa: N806
+ geomPSA = gmData['geomPSA'] # noqa: N806
+ geomPGA = gmData['geomPGA'] # noqa: N806
+ geomPGV = gmData['geomPGV'] # noqa: N806
+ geomPGD = gmData['geomPGD'] # noqa: N806
+ geomDS575 = gmData['geomDS575'] # noqa: N806
+ geomDS595 = gmData['geomDS595'] # noqa: N806
+ geomIa = gmData['geomIa'] # noqa: N806
+
+ periods = gmData['period']
+ numgm = gmData['numgm']
+ eqnameID = gmData['eqnameID'] # noqa: N806
+ units = gmData['unit']
#
# Define Sa(T_cond)
@@ -239,113 +277,124 @@ def __init__(self, inputArgs,err):
# Compute SaRatio(T_lowbound,T_cond,T_highbound) and Ds575
#
- IM_log_data_pool = np.zeros((numgm,0))
+ IM_log_data_pool = np.zeros((numgm, 0)) # noqa: N806
scaling_exponent = np.zeros((nim,))
myunits = []
for ni in range(nim):
- if im_names[ni].startswith("PSA"):
- Sa_T1 = np.zeros((numgm,))
- T_cond = float(im_periods[ni][0]) # central (<= 5.0)
+ if im_names[ni].startswith('PSA'):
+ Sa_T1 = np.zeros((numgm,)) # noqa: N806
+ T_cond = float(im_periods[ni][0]) # central (<= 5.0) # noqa: N806
for ng in range(numgm):
Sa_T1[ng] = np.interp(T_cond, periods, geomPSA[ng])
- Sa1_pool = Sa_T1[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(Sa1_pool)])
+ Sa1_pool = Sa_T1[np.newaxis].T # noqa: N806
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(Sa1_pool)]) # noqa: N806
scaling_exponent[ni] = 1
- myunits += ['('+units["PSA"]+')']
- elif im_names[ni]=="SaRatio":
- Sa_T1 = np.zeros((numgm,))
- Sa_T_geomean = np.zeros((numgm,))
+ myunits += ['(' + units['PSA'] + ')']
+ elif im_names[ni] == 'SaRatio':
+ Sa_T1 = np.zeros((numgm,)) # noqa: N806
+ Sa_T_geomean = np.zeros((numgm,)) # noqa: N806
- T_lowbound = float(im_periods[ni][0]) # low-bound
- T_cond = float(im_periods[ni][1]) # central (<= 5.0)
- T_highbound = float(im_periods[ni][2]) # high-bound
+ T_lowbound = float(im_periods[ni][0]) # low-bound # noqa: N806
+ T_cond = float(im_periods[ni][1]) # central (<= 5.0) # noqa: N806
+ T_highbound = float(im_periods[ni][2]) # high-bound # noqa: N806
- idx_T_range = np.where((np.array(periods) > T_lowbound) * (np.array(periods) < T_highbound))[0]
+ idx_T_range = np.where( # noqa: N806
+ (np.array(periods) > T_lowbound)
+ * (np.array(periods) < T_highbound)
+ )[0]
for ng in range(numgm):
Sa_T1[ng] = np.interp(T_cond, periods, geomPSA[ng])
- Sa_T_geomean[ng] = gmean(np.array(geomPSA[ng])[idx_T_range.astype(int)])
-
- SaRatio_pool = (Sa_T1 / Sa_T_geomean)[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool,np.log(SaRatio_pool)])
+ Sa_T_geomean[ng] = gmean(
+ np.array(geomPSA[ng])[idx_T_range.astype(int)]
+ )
+
+ SaRatio_pool = (Sa_T1 / Sa_T_geomean)[np.newaxis].T # noqa: N806
+ IM_log_data_pool = np.hstack( # noqa: N806
+ [IM_log_data_pool, np.log(SaRatio_pool)]
+ )
scaling_exponent[ni] = 0
- myunits += [""]
- elif im_names[ni]=="DS575":
+ myunits += ['']
+ elif im_names[ni] == 'DS575':
ds_pool = (np.array(geomDS575))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ds_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ds_pool)]) # noqa: N806
scaling_exponent[ni] = 0
- myunits += ['('+units["DS575"]+')']
+ myunits += ['(' + units['DS575'] + ')']
- elif im_names[ni]=="DS595":
+ elif im_names[ni] == 'DS595':
ds_pool = (np.array(geomDS595))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ds_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ds_pool)]) # noqa: N806
scaling_exponent[ni] = 0
- myunits += ['('+units["DS595"]+')']
+ myunits += ['(' + units['DS595'] + ')']
- elif im_names[ni]=="PGA":
+ elif im_names[ni] == 'PGA':
pg_pool = (np.array(geomPGA))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)]) # noqa: N806
scaling_exponent[ni] = 1
- myunits += ['('+units["PGA"]+')']
+ myunits += ['(' + units['PGA'] + ')']
- elif im_names[ni]=="PGV":
+ elif im_names[ni] == 'PGV':
pg_pool = (np.array(geomPGV))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)]) # noqa: N806
scaling_exponent[ni] = 1
- myunits += ['('+units["PGV"]+')']
+ myunits += ['(' + units['PGV'] + ')']
- elif im_names[ni]=="PGD":
+ elif im_names[ni] == 'PGD':
pg_pool = (np.array(geomPGD))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(pg_pool)]) # noqa: N806
scaling_exponent[ni] = 1
- myunits += ['('+ units["PGD"]+')']
+ myunits += ['(' + units['PGD'] + ')']
- elif im_names[ni]=="Ia":
+ elif im_names[ni] == 'Ia':
ai_pool = (np.array(geomIa))[np.newaxis].T
- IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ai_pool)])
+ IM_log_data_pool = np.hstack([IM_log_data_pool, np.log(ai_pool)]) # noqa: N806
scaling_exponent[ni] = 2
- myunits += ['('+units["Ia"]+')']
+ myunits += ['(' + units['Ia'] + ')']
else:
- msg = "unrecognized IM name "+im_names[ni]
- print(msg)
+ msg = 'unrecognized IM name ' + im_names[ni]
+ print(msg) # noqa: T201
errf.write(msg)
errf.close()
- exit(-1)
+ exit(-1) # noqa: PLR1722
if found_scaling_anchor:
- IM_log_data_scaling_anchor = IM_log_data_pool[:,id_im_scaling_ancher]
- #IM_log_ref_scaling_anchor = IM_log_ref[:,id_im_scaling_ancher]
- IM_log_ref_scaling_anchor = Scaling_ref
+ IM_log_data_scaling_anchor = IM_log_data_pool[:, id_im_scaling_ancher] # noqa: N806
+ # IM_log_ref_scaling_anchor = IM_log_ref[:,id_im_scaling_ancher]
+ IM_log_ref_scaling_anchor = Scaling_ref # noqa: N806
- IM_log_data_pool2 = np.delete(IM_log_data_pool.copy(), id_im_scaling_ancher, 1)
- IM_log_ref2 = IM_log_ref.copy()
+ IM_log_data_pool2 = np.delete( # noqa: N806
+ IM_log_data_pool.copy(), id_im_scaling_ancher, 1
+ )
+ IM_log_ref2 = IM_log_ref.copy() # noqa: N806
- scaling_exponent = scaling_exponent/scaling_exponent[id_im_scaling_ancher]
- scaling_exponent2 = np.delete(scaling_exponent.copy(), id_im_scaling_ancher)
+ scaling_exponent = ( # noqa: PLR6104
+ scaling_exponent / scaling_exponent[id_im_scaling_ancher]
+ )
+ scaling_exponent2 = np.delete(
+ scaling_exponent.copy(), id_im_scaling_ancher
+ )
log_im_range2 = np.delete(log_im_range.copy(), id_im_scaling_ancher)
- lenRef2 = np.mean(1 / np.delete(im_nbins.copy(), id_im_scaling_ancher))
+ lenRef2 = np.mean(1 / np.delete(im_nbins.copy(), id_im_scaling_ancher)) # noqa: N806
else:
- IM_log_data_pool2 = IM_log_data_pool
- IM_log_ref2 = IM_log_ref
+ IM_log_data_pool2 = IM_log_data_pool # noqa: N806
+ IM_log_ref2 = IM_log_ref # noqa: N806
scaling_exponent2 = scaling_exponent
log_im_range2 = log_im_range
- lenRef2 = np.linalg.norm(1 /im_nbins)
+ lenRef2 = np.linalg.norm(1 / im_nbins) # noqa: N806
- if id_im_scaling_ancher>=0:
+ if id_im_scaling_ancher >= 0:
if isGrid:
- nScalingGrid = im_nbins[id_im_scaling_ancher]
- nGridPerIM = ngrid/im_nbins[id_im_scaling_ancher]
+ nScalingGrid = im_nbins[id_im_scaling_ancher] # noqa: N806
+ nGridPerIM = ngrid / im_nbins[id_im_scaling_ancher] # noqa: N806
else:
- nScalingGrid = ngrid
- nGridPerIM = ngrid/im_nbins[id_im_scaling_ancher]
+ nScalingGrid = ngrid # noqa: N806
+ nGridPerIM = ngrid / im_nbins[id_im_scaling_ancher] # noqa: N806
else:
- nScalingGrid = 1
- nGridPerIM = ngrid
-
-
+ nScalingGrid = 1 # noqa: N806
+ nGridPerIM = ngrid # noqa: N806
sf_min = 0.5 # minimum of no-panalty scaling
sf_max = 10.0 # maximum of no-pad nalty scaling
@@ -355,23 +404,22 @@ def __init__(self, inputArgs,err):
# selected_gm_err_list =[]
# selected_gm_eqID_list =[]
# selected_gm_scale_list =[]
- selected_gm_ID = []
+ selected_gm_ID = [] # noqa: N806
selected_gm_err = []
- selected_gm_eqID = []
+ selected_gm_eqID = [] # noqa: N806
selected_gm_scale = []
- err_sum = np.zeros((int(nScalingGrid),int(nGridPerIM)))
-
+ err_sum = np.zeros((int(nScalingGrid), int(nGridPerIM)))
-
- nsa_tmp,ngr_tmp = np.meshgrid(range(int(nScalingGrid)), range(int(nGridPerIM)))
- nsas=list(nsa_tmp.reshape(-1))*npergrid
- ngrs=list(ngr_tmp.reshape(-1))*npergrid
+ nsa_tmp, ngr_tmp = np.meshgrid(
+ range(int(nScalingGrid)), range(int(nGridPerIM))
+ )
+ nsas = list(nsa_tmp.reshape(-1)) * npergrid
+ ngrs = list(ngr_tmp.reshape(-1)) * npergrid
randid = np.random.permutation(range(len(nsas)))
for nc in range(len(nsas)):
-
nsa = nsas[randid[nc]]
ngr = ngrs[randid[nc]]
@@ -386,83 +434,84 @@ def __init__(self, inputArgs,err):
penalty_pool = np.zeros((numgm,))
else:
- SaT_ref = np.exp(IM_log_ref_scaling_anchor[nsa])
- Sa_T1 = np.exp(IM_log_data_scaling_anchor)
+ SaT_ref = np.exp(IM_log_ref_scaling_anchor[nsa]) # noqa: N806
+ Sa_T1 = np.exp(IM_log_data_scaling_anchor) # noqa: N806
- # penalty for scaling factor
+ # penalty for scaling factor
sf_pool = SaT_ref / Sa_T1 # scaling factors
penalty_pool = np.zeros((numgm,))
temptag1 = np.where(sf_pool < sf_min)
penalty_pool[temptag1] = (sf_min - sf_pool[temptag1]) ** 2
temptag2 = np.where(sf_pool > sf_max)
- penalty_pool[temptag2] = (sf_max - sf_pool[temptag2]) ** 2;
-
-
-
- if IM_log_data_pool2.shape[1]>0:
- IM_log_data_pool3 = IM_log_data_pool2 + np.log(sf_pool[np.newaxis]).T * scaling_exponent2[np.newaxis]
- normData = IM_log_data_pool3/log_im_range2
- normRefGrid =IM_log_ref2/log_im_range2
- err_mat = distance_matrix(normData, normRefGrid, p=2) ** 2 / lenRef2**2 + np.tile(penalty_pool,(int(nGridPerIM), 1)).T * sf_penalty
- err_pure = distance_matrix(normData, normRefGrid, p=2) ** 2 / lenRef2**2
+ penalty_pool[temptag2] = (sf_max - sf_pool[temptag2]) ** 2
+
+ if IM_log_data_pool2.shape[1] > 0:
+ IM_log_data_pool3 = ( # noqa: N806
+ IM_log_data_pool2
+ + np.log(sf_pool[np.newaxis]).T * scaling_exponent2[np.newaxis]
+ )
+ normData = IM_log_data_pool3 / log_im_range2 # noqa: N806
+ normRefGrid = IM_log_ref2 / log_im_range2 # noqa: N806
+ err_mat = (
+ distance_matrix(normData, normRefGrid, p=2) ** 2 / lenRef2**2
+ + np.tile(penalty_pool, (int(nGridPerIM), 1)).T * sf_penalty
+ )
+ err_pure = (
+ distance_matrix(normData, normRefGrid, p=2) ** 2 / lenRef2**2
+ )
else:
- err_mat = np.tile(penalty_pool,(int(nGridPerIM), 1)).T * sf_penalty
- err_pure = np.tile(penalty_pool,(int(nGridPerIM), 1)).T
+ err_mat = np.tile(penalty_pool, (int(nGridPerIM), 1)).T * sf_penalty
+ err_pure = np.tile(penalty_pool, (int(nGridPerIM), 1)).T
- minerr = np.sort(err_mat,axis=0)
- minerr_tag = np.argsort(err_mat,axis=0)
+ minerr = np.sort(err_mat, axis=0)
+ minerr_tag = np.argsort(err_mat, axis=0)
count = 0
- for ng in minerr_tag[:,ngr]:
-
- cureqID = eqnameID[ng]
- cureqID_existnum = np.sum(cureqID == np.array(selected_gm_eqID))
+ for ng in minerr_tag[:, ngr]:
+ cureqID = eqnameID[ng] # noqa: N806
+ cureqID_existnum = np.sum(cureqID == np.array(selected_gm_eqID)) # noqa: N806
- if (selected_gm_ID.count(ng)==0) and(cureqID_existnummotion
coverage
(error level)'
+ )
+ fig.update_coloraxes(
+ cmin=0,
+ cmax=1,
+ )
fig.add_scatter3d(
- x= np.exp(theLogIM[idx1]),
- y= np.exp(theLogIM[idx2]),
- z=np.exp(theLogIM[idx3]),
- mode='markers',
- marker=dict(
- size=4,
- line=dict(width=1,color='black'),
- color='orange',
- ),
- name ="selected ground motion"
- )
+ x=np.exp(theLogIM[idx1]),
+ y=np.exp(theLogIM[idx2]),
+ z=np.exp(theLogIM[idx3]),
+ mode='markers',
+ marker=dict( # noqa: C408
+ size=4,
+ line=dict(width=1, color='black'), # noqa: C408
+ color='orange',
+ ),
+ name='selected ground motion',
+ )
fig.update_layout(
- scene = dict(
- xaxis = dict(
+ scene=dict( # noqa: C408
+ xaxis=dict( # noqa: C408
tickmode='array',
- #tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.01,0.1,1,10,100],),
- tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.005,0.01,0.05,0.1,0.5,1,5,10,50,100],
- title = im_names[idx1] + myunits[idx1]),
- yaxis = dict(
+ # tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.01,0.1,1,10,100],),
+ tickvals=[
+ im_lb[idx1],
+ im_ub[idx1],
+ 0.001,
+ 0.005,
+ 0.01,
+ 0.05,
+ 0.1,
+ 0.5,
+ 1,
+ 5,
+ 10,
+ 50,
+ 100,
+ ],
+ title=im_names[idx1] + myunits[idx1],
+ ),
+ yaxis=dict( # noqa: C408
tickmode='array',
- #tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.01,0.1,1,10,100],),
- tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.005,0.01,0.05,0.1,0.5,1,5,10,50,100],
- title = im_names[idx2] + myunits[idx2]),
- zaxis = dict(
+ # tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.01,0.1,1,10,100],),
+ tickvals=[
+ im_lb[idx2],
+ im_ub[idx2],
+ 0.001,
+ 0.005,
+ 0.01,
+ 0.05,
+ 0.1,
+ 0.5,
+ 1,
+ 5,
+ 10,
+ 50,
+ 100,
+ ],
+ title=im_names[idx2] + myunits[idx2],
+ ),
+ zaxis=dict( # noqa: C408
tickmode='array',
- #tickvals=[im_lb[idx3],im_ub[idx3],0.001,0.01,0.1,1,10,100],),
- tickvals=[im_lb[idx3],im_ub[idx3],0.001,0.005,0.01,0.05,0.1,0.5,1,5,10,50,100],
- title = im_names[idx3] + myunits[idx3],
- ),
- aspectmode= 'cube',),
- legend=dict(
+ # tickvals=[im_lb[idx3],im_ub[idx3],0.001,0.01,0.1,1,10,100],),
+ tickvals=[
+ im_lb[idx3],
+ im_ub[idx3],
+ 0.001,
+ 0.005,
+ 0.01,
+ 0.05,
+ 0.1,
+ 0.5,
+ 1,
+ 5,
+ 10,
+ 50,
+ 100,
+ ],
+ title=im_names[idx3] + myunits[idx3],
+ ),
+ aspectmode='cube',
+ ),
+ legend=dict( # noqa: C408
x=0,
y=0,
- xanchor="left",
- yanchor="top",
- ),
- #paper_bgcolor='rgba(0,0,0,0)',
- autosize=False, height=500, width=550,legend_orientation="h",
- scene_camera= dict(
- eye=dict(x=2, y=2, z=0.6)
+ xanchor='left',
+ yanchor='top',
),
- margin=dict(l=20, r=20, t=20, b=20),
+ # paper_bgcolor='rgba(0,0,0,0)',
+ autosize=False,
+ height=500,
+ width=550,
+ legend_orientation='h',
+ scene_camera=dict(eye=dict(x=2, y=2, z=0.6)), # noqa: C408
+ margin=dict(l=20, r=20, t=20, b=20), # noqa: C408
)
- '''
+ """
fig = plt.figure();
ax = fig.add_subplot(projection='3d')
@@ -610,94 +714,146 @@ def __init__(self, inputArgs,err):
fig.colorbar(sc,label= "coverage (error level)", cax=cax)
ax.view_init(10, 30)
- '''
- if nim==2:
-
+ """
+ if nim == 2: # noqa: PLR2004
flat_grid_error = err_sum.flatten() / npergrid
- idx1=0
- idx2=1
+ idx1 = 0
+ idx2 = 1
#
# data points
#
- X, Y = np.meshgrid(LogIMref[idx1], LogIMref[idx2])
-
+ X, Y = np.meshgrid(LogIMref[idx1], LogIMref[idx2]) # noqa: N806
#
# interpolated area
#
- lowerboundX = np.min(( np.log(im_lb[0])-log_im_range[0]*0.05 ))
- upperboundX = np.max(( np.log(im_ub[0])+log_im_range[0]*0.05))
- lowerboundY = np.min(( np.log(im_lb[1])-log_im_range[1]*0.05 ))
- upperboundY = np.max(( np.log(im_ub[1])+log_im_range[1]*0.05))
-
- xx = np.linspace(lowerboundX, upperboundX, 20)
- yy = np.linspace(lowerboundY, upperboundY, 20)
- xxx, yyy = np.meshgrid(xx, yy)
- f = interpolate.interp2d((X.reshape(-1)), (Y.reshape(-1)) , flat_grid_error)
- zzz = f(xx,yy)
-
+ lowerboundX = np.min(np.log(im_lb[0]) - log_im_range[0] * 0.05) # noqa: N806
+ upperboundX = np.max(np.log(im_ub[0]) + log_im_range[0] * 0.05) # noqa: N806
+ lowerboundY = np.min(np.log(im_lb[1]) - log_im_range[1] * 0.05) # noqa: N806
+ upperboundY = np.max(np.log(im_ub[1]) + log_im_range[1] * 0.05) # noqa: N806
+
+ xx = np.linspace(lowerboundX, upperboundX, 20)
+ yy = np.linspace(lowerboundY, upperboundY, 20)
+ xxx, yyy = np.meshgrid(xx, yy) # noqa: F841
+ f = interpolate.interp2d(
+ (X.reshape(-1)), (Y.reshape(-1)), flat_grid_error
+ )
+ zzz = f(xx, yy)
#
# Figure
#
- fig = px.scatter(x=np.exp(X.reshape(-1)),
- y=np.exp(Y.reshape(-1)),
- color=flat_grid_error,
- log_x=True,
- log_y=True,
- color_continuous_scale=colorscale,
- )
- fig.update_traces(marker=dict(size= 15,
- line=dict(width=2,color='black'),))
+ fig = px.scatter(
+ x=np.exp(X.reshape(-1)),
+ y=np.exp(Y.reshape(-1)),
+ color=flat_grid_error,
+ log_x=True,
+ log_y=True,
+ color_continuous_scale=colorscale,
+ )
+ fig.update_traces(
+ marker=dict( # noqa: C408
+ size=15,
+ line=dict(width=2, color='black'), # noqa: C408
+ )
+ )
fig['data'][0]['showlegend'] = True
- fig['data'][0]['name'] = "anchor point"
+ fig['data'][0]['name'] = 'anchor point'
fig.add_scatter(
- x= np.exp(theLogIM[idx1]),
- y= np.exp(theLogIM[idx2]),
- mode='markers',
- marker=dict(
- size=5,
- line=dict(width=1,color='black'),
- color='orange',
- ),
- name ="selected ground motion"
- )
-
- #fig = px.scatter(x=[None],y=[None],log_x=True,log_y=True,)
- #fig.update(layout_coloraxis_showscale=False)
- fig.layout.coloraxis.colorbar.title = 'Ground
motion
coverage
(error level)'
- fig.add_heatmap(x=np.exp(xx) ,y= np.exp(yy),z = zzz, zmin=0,zmax=1, colorscale =colorscale,coloraxis='coloraxis',opacity=0.5,hoverinfo='skip')
+ x=np.exp(theLogIM[idx1]),
+ y=np.exp(theLogIM[idx2]),
+ mode='markers',
+ marker=dict( # noqa: C408
+ size=5,
+ line=dict(width=1, color='black'), # noqa: C408
+ color='orange',
+ ),
+ name='selected ground motion',
+ )
+
+ # fig = px.scatter(x=[None],y=[None],log_x=True,log_y=True,)
+ # fig.update(layout_coloraxis_showscale=False)
+ fig.layout.coloraxis.colorbar.title = (
+ 'Ground
motion
coverage
(error level)'
+ )
+ fig.add_heatmap(
+ x=np.exp(xx),
+ y=np.exp(yy),
+ z=zzz,
+ zmin=0,
+ zmax=1,
+ colorscale=colorscale,
+ coloraxis='coloraxis',
+ opacity=0.5,
+ hoverinfo='skip',
+ )
fig.update_layout(
- xaxis = dict(
- tickmode='array',
- #tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.01,0.1,1,10,100],),
- tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.005,0.01,0.05,0.1,0.5,1,5,10,50,100],
- title = im_names[idx1] + myunits[idx1]),
- yaxis = dict(
- tickmode='array',
- #tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.01,0.1,1,10,100],),
- tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.005,0.01,0.05,0.1,0.5,1,5,10,50,100],
- title = im_names[idx2] + myunits[idx2]),
- legend=dict(
+ xaxis=dict( # noqa: C408
+ tickmode='array',
+ # tickvals=[im_lb[idx1],im_ub[idx1],0.001,0.01,0.1,1,10,100],),
+ tickvals=[
+ im_lb[idx1],
+ im_ub[idx1],
+ 0.001,
+ 0.005,
+ 0.01,
+ 0.05,
+ 0.1,
+ 0.5,
+ 1,
+ 5,
+ 10,
+ 50,
+ 100,
+ ],
+ title=im_names[idx1] + myunits[idx1],
+ ),
+ yaxis=dict( # noqa: C408
+ tickmode='array',
+ # tickvals=[im_lb[idx2],im_ub[idx2],0.001,0.01,0.1,1,10,100],),
+ tickvals=[
+ im_lb[idx2],
+ im_ub[idx2],
+ 0.001,
+ 0.005,
+ 0.01,
+ 0.05,
+ 0.1,
+ 0.5,
+ 1,
+ 5,
+ 10,
+ 50,
+ 100,
+ ],
+ title=im_names[idx2] + myunits[idx2],
+ ),
+ legend=dict( # noqa: C408
x=0,
y=-0.1,
- xanchor="left",
- yanchor="top",
+ xanchor='left',
+ yanchor='top',
),
- #paper_bgcolor='rgba(0,0,0,0)',
- autosize=False, height=500, width=550,legend_orientation="h",
- margin=dict(l=20, r=20, t=20, b=20),
+ # paper_bgcolor='rgba(0,0,0,0)',
+ autosize=False,
+ height=500,
+ width=550,
+ legend_orientation='h',
+ margin=dict(l=20, r=20, t=20, b=20), # noqa: C408
+ )
+ fig.update_coloraxes(
+ cmin=0,
+ cmax=1,
)
- fig.update_coloraxes(cmin=0,cmax=1,)
- '''
+ """
fig = plt.figure();
ax = fig.add_subplot()
@@ -762,12 +918,11 @@ def __init__(self, inputArgs,err):
plt.legend(["anchor point", "selected ground motion"], ncol=2, bbox_to_anchor=(0,0.02,1,-0.15), loc="upper left")
plt.title("Ground motion coverage", x=0.5, y=1.05)
fig.colorbar(sc,label= "coverage (error level)")
- '''
- if nim==1:
+ """
+ if nim == 1:
pass
# flat_grid_error = err_sum.flatten() / npergrid
-
# import matplotlib.pyplot as plt
# ngrid_1axis = int(im_nbins[0])
@@ -792,16 +947,15 @@ def __init__(self, inputArgs,err):
# ax.scatter(IM_log_ref[:, 0], 0*IM_log_ref[:, 0],s=5)
# plt.xlabel(im_names[idx1]);
- #plt.savefig('gridIM_coverage.png',bbox_inches='tight')
- if nim==2 or nim==3:
- with open(r"gridIM_coverage.html", 'w') as f:
+ # plt.savefig('gridIM_coverage.png',bbox_inches='tight')
+ if nim == 2 or nim == 3: # noqa: PLR1714, PLR2004
+ with open(r'gridIM_coverage.html', 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write(fig.to_html(full_html=False, include_plotlyjs='cdn'))
f.close()
-if __name__ == "__main__":
-
- errf = open("gridIM_log.err","w")
+if __name__ == '__main__':
+ errf = open('gridIM_log.err', 'w') # noqa: PLW1514, PTH123, SIM115
main(sys.argv, errf)
# try:
# main(sys.argv,errf)
@@ -812,4 +966,3 @@ def __init__(self, inputArgs,err):
# errf.write("Exception occurred while code Execution: " + str(repr(e)))
# errf.close()
# exit(-1)
-
diff --git a/modules/createEVENT/windTunnelExperiment/WindTunnelExperiment.cpp b/modules/createEVENT/windTunnelExperiment/WindTunnelExperiment.cpp
index 22f6d2679..ae6da62e2 100644
--- a/modules/createEVENT/windTunnelExperiment/WindTunnelExperiment.cpp
+++ b/modules/createEVENT/windTunnelExperiment/WindTunnelExperiment.cpp
@@ -87,7 +87,7 @@ main(int argc, char **argv) {
json_t *generalInformation = json_object_get(input, "GeneralInformation");
json_t *inputEventsArray = json_object_get(input, "Events");
if (generalInformation == NULL || inputEventsArray == NULL) {
- std::cerr << "FATAL ERROR - input file conatins no Events key-pair\n";
+ std::cerr << "FATAL ERROR - input file contains no Events key-pair\n";
exit(-1);
}
@@ -121,7 +121,7 @@ main(int argc, char **argv) {
json_object_set(units,"time",json_string("sec"));
json_object_set(outputEvent,"units",units);
- // call function to fill in event details .. depends on getRV flag what is acually done
+ // call function to fill in event details .. depends on getRV flag what is actually done
addEvent(generalInformation, inputEvent, outputEvent, doRV);
json_array_append(outputEventsArray, outputEvent);
@@ -173,7 +173,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
widthJO == NULL ||
depthJO == NULL ||
storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -359,7 +359,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
}
//
- // for each tap determine factors fr moments and forces for the buiding asuming a mesh discretization
+ // for each tap determine factors fr moments and forces for the building assuming a mesh descritization
//
int numDivisionX = 10;
@@ -517,7 +517,7 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
json_t *storiesJO = json_object_get(generalInfo,"stories");
if (storiesJO == NULL ) {
- std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all neeed)\n";
+ std::cerr << "ERROR missing Information from GeneralInformation (height, width, stories all needed)\n";
return -2;
}
@@ -620,10 +620,10 @@ int addEvent(json_t *generalInfo, json_t *currentEvent, json_t *outputEvent, boo
//
// function to add factors for forces and moment contribution coefficients for taps to building floor
-// determine coeffiecients for each tap for a building face. This is done by going over each story of
+// determine coefficients for each tap for a building face. This is done by going over each story of
// For each story break into numDiv X numDiv segments. For each segment assume point load at center
// segment and equal in mag to area of segment and using simply supported beam formula determine force
-// at floor below and floor above. based on distance from center line of story determine actibg moments
+// at floor below and floor above. based on distance from center line of story determine acting moments
// on floors.
//
// inputs: height: height of building
@@ -699,9 +699,9 @@ int addForcesFace(TAP *theTaps, int numTaps,
// function to fnd nearest tap
// inputs: theTAPS: array of Taps,
// numTaps: number of taps in array
-// xLoc, yLoc: is location of inut point
+// xLoc, yLoc: is location of Inuit point
// face: if of face
-// output: pinter to nearest TAp in the array, NULL if no taps with face
+// output: pointer to nearest TAp in the array, NULL if no taps with face
//
TAP *findNearestTAP(TAP *theTAPS, int numTaps, double locX, double locY, int face) {
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/ElasticAnalysis/PerformLoadsAnalysis.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/ElasticAnalysis/PerformLoadsAnalysis.tcl
index 388b10705..b17e84d1c 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/ElasticAnalysis/PerformLoadsAnalysis.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/ElasticAnalysis/PerformLoadsAnalysis.tcl
@@ -10,7 +10,7 @@
##############################################################################################################################
# Gravity-analysis parameters -- load-controlled static analysis
-set Tol 1.0e-8; # Covergence tolerance for test
+set Tol 1.0e-8; # Convergence tolerance for test
variable constraintsTypeGravity Plain; # Default
if {[info exists RigidDiaphragm] == 1} {
if {$RigidDiaphragm=="ON"} {
@@ -37,4 +37,4 @@ analyze $NstepGravity; # Apply gravity
# ------------------------------------------------- maintain constant gravity loads and reset time to zero
loadConst -time 0.0
set Tol 1.0e-6; # reduce tolerance after gravity loads
-puts "$LoadType Performed Successfully"
\ No newline at end of file
+puts "$LoadType Performed Successfully"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DefineFunctionsAndProcedures.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DefineFunctionsAndProcedures.tcl
index 157503c38..b8b10c027 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DefineFunctionsAndProcedures.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DefineFunctionsAndProcedures.tcl
@@ -69,7 +69,7 @@ proc NodesAroundPanelZone {ColPier Level XCoordinate YCoordinate PanelSize Maxim
# Input argument explanation:
# Level: the floor level for frame, ground floor is 1.
# ColPier: the column ID, starting from 1 to the number of columns in each frame
-# XCoordinate: X coodinate of the column centerline
+# XCoordinate: X coordinate of the column centerline
# YCoordinate: Y coordinate of the beam centerline
# PanelSize: a list with two elements: {a b}:
# a: the depth of column
@@ -190,7 +190,7 @@ proc elemPanelZone2D {eleID nodeR E VerTransfTag HorTransfTag} {
set x8 [expr $x1 + 7]; # top element on left side of panel zone
set A_PZ 1.0e12; # area of panel zone element (make much larger than A of frame elements)
- set Ipz 1.0e12; # moment of intertia of panel zone element (make much larger than I of frame elements)
+ set Ipz 1.0e12; # moment of inertia of panel zone element (make much larger than I of frame elements)
# create panel zone elements
# tag ndI ndJ A_PZ E I_PZ transfTag
@@ -382,4 +382,4 @@ element zeroLength $eleID $nodeR $nodeC -mat $stiffMatID $stiffMatID $eleID -dir
# equalDOF $nodeR $nodeC 1 2
}
-puts "All Functions and Procedures Have Been Sourced"
\ No newline at end of file
+puts "All Functions and Procedures Have Been Sourced"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DynamicAnalysisCollapseSolver.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DynamicAnalysisCollapseSolver.tcl
index e02513ba0..7d966b003 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DynamicAnalysisCollapseSolver.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/DynamicAnalysisCollapseSolver.tcl
@@ -17,7 +17,7 @@
#
# Subroutines called:
# MaxDriftTester: Checks after loss of convergence the drifts
-# and garantees convergence for collapse
+# and guarantees convergence for collapse
# Sequence of Algorithms used:
# 1. Newton with dt
# 2. Newton with initial tangent and original dt
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/PerformGravityAnalysis.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/PerformGravityAnalysis.tcl
index 8d3c8be31..48d43beed 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/PerformGravityAnalysis.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/DynamicAnalysis/PerformGravityAnalysis.tcl
@@ -10,7 +10,7 @@
##############################################################################################################################
# Gravity-analysis parameters -- load-controlled static analysis
-set Tol 1.0e-8; # Covergence tolerance for test
+set Tol 1.0e-8; # Convergence tolerance for test
variable constraintsTypeGravity Plain; # Default
if {[info exists RigidDiaphragm] == 1} {
if {$RigidDiaphragm=="ON"} {
@@ -37,4 +37,4 @@ analyze $NstepGravity; # Apply gravity
# ------------------------------------------------- maintain constant gravity loads and reset time to zero
loadConst -time 0.0
set Tol 1.0e-6; # reduce tolerance after gravity loads
-puts "Gravity Performed Successfully"
\ No newline at end of file
+puts "Gravity Performed Successfully"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/DefineFunctionsAndProcedures.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/DefineFunctionsAndProcedures.tcl
index 157503c38..b8b10c027 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/DefineFunctionsAndProcedures.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/DefineFunctionsAndProcedures.tcl
@@ -69,7 +69,7 @@ proc NodesAroundPanelZone {ColPier Level XCoordinate YCoordinate PanelSize Maxim
# Input argument explanation:
# Level: the floor level for frame, ground floor is 1.
# ColPier: the column ID, starting from 1 to the number of columns in each frame
-# XCoordinate: X coodinate of the column centerline
+# XCoordinate: X coordinate of the column centerline
# YCoordinate: Y coordinate of the beam centerline
# PanelSize: a list with two elements: {a b}:
# a: the depth of column
@@ -190,7 +190,7 @@ proc elemPanelZone2D {eleID nodeR E VerTransfTag HorTransfTag} {
set x8 [expr $x1 + 7]; # top element on left side of panel zone
set A_PZ 1.0e12; # area of panel zone element (make much larger than A of frame elements)
- set Ipz 1.0e12; # moment of intertia of panel zone element (make much larger than I of frame elements)
+ set Ipz 1.0e12; # moment of inertia of panel zone element (make much larger than I of frame elements)
# create panel zone elements
# tag ndI ndJ A_PZ E I_PZ transfTag
@@ -382,4 +382,4 @@ element zeroLength $eleID $nodeR $nodeC -mat $stiffMatID $stiffMatID $eleID -dir
# equalDOF $nodeR $nodeC 1 2
}
-puts "All Functions and Procedures Have Been Sourced"
\ No newline at end of file
+puts "All Functions and Procedures Have Been Sourced"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/PerformGravityAnalysis.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/PerformGravityAnalysis.tcl
index 8d3c8be31..48d43beed 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/PerformGravityAnalysis.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/EigenValueAnalysis/PerformGravityAnalysis.tcl
@@ -10,7 +10,7 @@
##############################################################################################################################
# Gravity-analysis parameters -- load-controlled static analysis
-set Tol 1.0e-8; # Covergence tolerance for test
+set Tol 1.0e-8; # Convergence tolerance for test
variable constraintsTypeGravity Plain; # Default
if {[info exists RigidDiaphragm] == 1} {
if {$RigidDiaphragm=="ON"} {
@@ -37,4 +37,4 @@ analyze $NstepGravity; # Apply gravity
# ------------------------------------------------- maintain constant gravity loads and reset time to zero
loadConst -time 0.0
set Tol 1.0e-6; # reduce tolerance after gravity loads
-puts "Gravity Performed Successfully"
\ No newline at end of file
+puts "Gravity Performed Successfully"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/DefineFunctionsAndProcedures.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/DefineFunctionsAndProcedures.tcl
index 157503c38..b8b10c027 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/DefineFunctionsAndProcedures.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/DefineFunctionsAndProcedures.tcl
@@ -69,7 +69,7 @@ proc NodesAroundPanelZone {ColPier Level XCoordinate YCoordinate PanelSize Maxim
# Input argument explanation:
# Level: the floor level for frame, ground floor is 1.
# ColPier: the column ID, starting from 1 to the number of columns in each frame
-# XCoordinate: X coodinate of the column centerline
+# XCoordinate: X coordinate of the column centerline
# YCoordinate: Y coordinate of the beam centerline
# PanelSize: a list with two elements: {a b}:
# a: the depth of column
@@ -190,7 +190,7 @@ proc elemPanelZone2D {eleID nodeR E VerTransfTag HorTransfTag} {
set x8 [expr $x1 + 7]; # top element on left side of panel zone
set A_PZ 1.0e12; # area of panel zone element (make much larger than A of frame elements)
- set Ipz 1.0e12; # moment of intertia of panel zone element (make much larger than I of frame elements)
+ set Ipz 1.0e12; # moment of inertia of panel zone element (make much larger than I of frame elements)
# create panel zone elements
# tag ndI ndJ A_PZ E I_PZ transfTag
@@ -382,4 +382,4 @@ element zeroLength $eleID $nodeR $nodeC -mat $stiffMatID $stiffMatID $eleID -dir
# equalDOF $nodeR $nodeC 1 2
}
-puts "All Functions and Procedures Have Been Sourced"
\ No newline at end of file
+puts "All Functions and Procedures Have Been Sourced"
diff --git a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/PerformGravityAnalysis.tcl b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/PerformGravityAnalysis.tcl
index 8d3c8be31..48d43beed 100644
--- a/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/PerformGravityAnalysis.tcl
+++ b/modules/createSAM/AutoSDA/BaselineTclFiles/NonlinearAnalysis/PushoverAnalysis/PerformGravityAnalysis.tcl
@@ -10,7 +10,7 @@
##############################################################################################################################
# Gravity-analysis parameters -- load-controlled static analysis
-set Tol 1.0e-8; # Covergence tolerance for test
+set Tol 1.0e-8; # Convergence tolerance for test
variable constraintsTypeGravity Plain; # Default
if {[info exists RigidDiaphragm] == 1} {
if {$RigidDiaphragm=="ON"} {
@@ -37,4 +37,4 @@ analyze $NstepGravity; # Apply gravity
# ------------------------------------------------- maintain constant gravity loads and reset time to zero
loadConst -time 0.0
set Tol 1.0e-6; # reduce tolerance after gravity loads
-puts "Gravity Performed Successfully"
\ No newline at end of file
+puts "Gravity Performed Successfully"
diff --git a/modules/createSAM/AutoSDA/beam_component.py b/modules/createSAM/AutoSDA/beam_component.py
index 3919aa60f..041bb51cf 100644
--- a/modules/createSAM/AutoSDA/beam_component.py
+++ b/modules/createSAM/AutoSDA/beam_component.py
@@ -1,9 +1,8 @@
-# This file is used to define the class of beam, which includes the axial, shear, and flexural strengths of column
+# This file is used to define the class of beam, which includes the axial, shear, and flexural strengths of column # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in Apr. 2018
# Updated in Oct. 2018
import numpy as np
-
from global_variables import SECTION_DATABASE
from help_functions import search_section_property
@@ -11,32 +10,45 @@
# Define a class of beam #
# #########################################################################
-class Beam(object):
- """
- This class is used to define a beam member, which has the following attributes:
+
+class Beam:
+ """This class is used to define a beam member, which has the following attributes:
(1) Beam section, a dictionary including size and associated properties.
(2) Beam demand, a dictionary including shear and flexural demands.
(3) Beam strength, a dictionary including shear and flexural strengths.
(4) Beam flag, a boolean variable with True or False. If it is True, the beam is feasible.
- """
+ """ # noqa: D205, D404
- def __init__(self, section_size, length, shear_demand, moment_demand_left, moment_demand_right, steel):
- """
- This function initializes the attributes of the beam class.
+ def __init__(
+ self,
+ section_size,
+ length,
+ shear_demand,
+ moment_demand_left,
+ moment_demand_right,
+ steel,
+ ):
+ """This function initializes the attributes of the beam class.
:param section_size: a string specifying the section size for the beam.
:param length: a float number denoting the beam length.
:param shear_demand: a float number denoting the shear demand.
:param moment_demand_left: a float number denoting the moment demand at right end.
:param moment_demand_right: a float number denoting the moment demand at left end.
- """
+ """ # noqa: D205, D401, D404
# Assign the necessary information for column class
self.section = search_section_property(section_size, SECTION_DATABASE)
- self.demand = {'shear': shear_demand, 'moment left': moment_demand_left, 'moment right': moment_demand_right}
+ self.demand = {
+ 'shear': shear_demand,
+ 'moment left': moment_demand_left,
+ 'moment right': moment_demand_right,
+ }
self.length = length
# Initialize the following variables
self.RBS_dimension = {} # a dictionary used to store the dimensions for reduced beam section
- self.spacing = None # a scalar indicating the spacing between two lateral supports
+ self.spacing = (
+ None # a scalar indicating the spacing between two lateral supports
+ )
self.strength = {} # a dictionary used to store the strength of beam component
self.demand_capacity_ratio = {} # a dictionary to store the demand to capacity ratio
self.is_feasible = {} # a dictionary used to store the failure mode of beam (if any)
@@ -57,29 +69,33 @@ def __init__(self, section_size, length, shear_demand, moment_demand_left, momen
self.calculate_hinge_parameters(steel)
def initialize_reduced_beam_section(self):
- """
- This method is used to initialize RBS dimensions.
+ """This method is used to initialize RBS dimensions.
:return: a dictionary including a, b, and c values describing RBS dimensions.
- """
+ """ # noqa: D205, D401, D404
# Use the lower bound as the initial value for a and b
self.RBS_dimension['a'] = 0.5 * self.section['bf']
self.RBS_dimension['b'] = 0.65 * self.section['d']
- #self.RBS_dimension['c'] = 0.1 * self.section['bf']
+ # self.RBS_dimension['c'] = 0.1 * self.section['bf']
self.RBS_dimension['c'] = 0.25 * self.section['bf']
def check_flange(self, steel):
- """
- This method is used to check whether the flange is satisfied with highly ductile requirement.
+ """This method is used to check whether the flange is satisfied with highly ductile requirement.
: steel: a class defined in "steel_material.py" file
: return: a flag (integer) which denotes the flange check result.
- """
+ """ # noqa: D205, D401, D404
# Calculate equivalent flange width at reduced beam section
- R = (4*self.RBS_dimension['c']**2 + self.RBS_dimension['b']**2)/(8*self.RBS_dimension['c'])
- bf_RBS = 2*(R-self.RBS_dimension['c']) + self.section['bf'] - 2*np.sqrt(R**2-(self.RBS_dimension['b']/3)**2)
+ R = (4 * self.RBS_dimension['c'] ** 2 + self.RBS_dimension['b'] ** 2) / ( # noqa: N806
+ 8 * self.RBS_dimension['c']
+ )
+ bf_RBS = ( # noqa: N806
+ 2 * (R - self.RBS_dimension['c'])
+ + self.section['bf']
+ - 2 * np.sqrt(R**2 - (self.RBS_dimension['b'] / 3) ** 2)
+ )
# Compute flange width-to-thickness ratio
- lambda_f = bf_RBS / (2*self.section['tf'])
+ lambda_f = bf_RBS / (2 * self.section['tf'])
# Calculate limit for flange width-to-thickness ratio
- flange_limit = 0.30 * np.sqrt(steel.E/steel.Fy)
+ flange_limit = 0.30 * np.sqrt(steel.E / steel.Fy)
# Check whether the flange satisfies the limit
if lambda_f <= flange_limit:
self.is_feasible['flange limit'] = True
@@ -87,13 +103,12 @@ def check_flange(self, steel):
self.is_feasible['flange limit'] = False
def check_web(self, steel):
- """
- This method is used to check whether the web is satisfied with highly ductile requirement.
+ """This method is used to check whether the web is satisfied with highly ductile requirement.
:param steel: a class defined in "steel_material.py" file.
:return: a flag (integer) which denotes the web check result.
- """
+ """ # noqa: D205, D401, D404
# Compute limit for web depth-to-width ratio
- web_limit = 2.45 * np.sqrt(steel.E/steel.Fy)
+ web_limit = 2.45 * np.sqrt(steel.E / steel.Fy)
# Check whether web is satisfied with the requirement or not
if self.section['h to tw ratio'] <= web_limit:
self.is_feasible['web limit'] = True
@@ -101,36 +116,34 @@ def check_web(self, steel):
self.is_feasible['web limit'] = False
def determine_spacing_between_lateral_support(self, steel):
- """
- This method is used to compute the spacing between two lateral supports.
+ """This method is used to compute the spacing between two lateral supports.
:param steel: a class defined in "steel_material.py" file.
:return: a float number indicating the spacing.
- """
+ """ # noqa: D205, D401, D404
# Compute limit for spacing (Remember to convert from inches to feet)
- spacing_limit = 0.086 * self.section['ry'] * steel.E / steel.Fy * 1/12.0
+ spacing_limit = 0.086 * self.section['ry'] * steel.E / steel.Fy * 1 / 12.0
# Start with the number of lateral support equal to 1
# Check whether the current lateral support is enough
# If it is not sufficient, increase the number of lateral support until the requirement is satisfied
number_lateral_support = 1
- while self.length/(number_lateral_support+1) > spacing_limit:
+ while self.length / (number_lateral_support + 1) > spacing_limit:
number_lateral_support += 1
# Check whether the spacing is less than Lp
# If the spacing greater than Lp, then reduce the spacing such that the flexural strength is governed by
# plastic yielding.
- Lp = 1.76 * self.section['ry'] * np.sqrt(steel.E/steel.Fy)
- while (self.length/number_lateral_support+1) > Lp:
+ Lp = 1.76 * self.section['ry'] * np.sqrt(steel.E / steel.Fy) # noqa: N806
+ while (self.length / number_lateral_support + 1) > Lp:
number_lateral_support += 1
- self.spacing = self.length/(number_lateral_support+1)
+ self.spacing = self.length / (number_lateral_support + 1)
def check_shear_strength(self, steel):
- """
- This method is used to check whether the shear strength of column is sufficient or not
+ """This method is used to check whether the shear strength of column is sufficient or not
:param steel: a class defined in "steel_material.py" file
:return: a float number denoting the shear strength and a flag denoting whether shear strength is sufficient
- """
+ """ # noqa: D205, D400, D401, D404
# Compute shear strength of beam
- Cv = 1.0
- Vn = 0.6 * steel.Fy * (self.section['tw'] * self.section['d']) * Cv
+ Cv = 1.0 # noqa: N806
+ Vn = 0.6 * steel.Fy * (self.section['tw'] * self.section['d']) * Cv # noqa: N806
phi = 1.0
self.strength['shear'] = phi * Vn
# Check whether shear strength is sufficient
@@ -140,51 +153,54 @@ def check_shear_strength(self, steel):
self.is_feasible['shear strength'] = False
def check_flexural_strength(self, steel):
- """
- This method is used to check whether the beam has enough flexural strength.
+ """This method is used to check whether the beam has enough flexural strength.
:return: a float number denoting flexural strength and a flag denoting whether the flexural strength is enough
- """
+ """ # noqa: D205, D400, D401, D404
# Compute plastic modulus at center of RBS
- Z_RBS = self.section['Zx'] - 2 * self.RBS_dimension['c'] * self.section['tf'] \
- * (self.section['d'] - self.section['tf'])
+ Z_RBS = self.section['Zx'] - 2 * self.RBS_dimension['c'] * self.section[ # noqa: N806
+ 'tf'
+ ] * (self.section['d'] - self.section['tf'])
# Calculate the moment capacity governed by plastic yielding at RBS
- Mn_RBS = steel.Fy * Z_RBS
+ Mn_RBS = steel.Fy * Z_RBS # noqa: N806
phi = 0.9
self.strength['flexural RBS'] = phi * Mn_RBS
# Check whether the flexural strength is sufficient
- M_max = np.max([abs(self.demand['moment right']), abs(self.demand['moment left'])])
+ M_max = np.max( # noqa: N806
+ [abs(self.demand['moment right']), abs(self.demand['moment left'])]
+ )
if self.strength['flexural RBS'] >= M_max:
self.is_feasible['flexural strength'] = True
else:
self.is_feasible['flexural strength'] = False
def check_flag(self):
- """
- This method is used to test whether beam passes all checks.
+ """This method is used to test whether beam passes all checks.
:return: a bool variable. True ==> passed
- """
+ """ # noqa: D205, D400, D401, D404
self.flag = True
- for key in self.is_feasible.keys():
- if self.is_feasible[key] == False:
+ for key in self.is_feasible.keys(): # noqa: SIM118
+ if self.is_feasible[key] == False: # noqa: E712
self.flag = False
return self.flag
def compute_demand_capacity_ratio(self):
- """
- This method is used to compute demand to capacity ratios.
+ """This method is used to compute demand to capacity ratios.
:return: a dictionary which includes the ratios for shear force and flexural moment.
- """
- self.demand_capacity_ratio['shear'] = self.demand['shear'] / self.strength['shear']
- self.demand_capacity_ratio['flexural'] = max(abs(self.demand['moment left']),
- abs(self.demand['moment right'])) / self.strength['flexural RBS']
+ """ # noqa: D205, D401, D404
+ self.demand_capacity_ratio['shear'] = (
+ self.demand['shear'] / self.strength['shear']
+ )
+ self.demand_capacity_ratio['flexural'] = (
+ max(abs(self.demand['moment left']), abs(self.demand['moment right']))
+ / self.strength['flexural RBS']
+ )
def calculate_hinge_parameters(self, steel):
- """
- This method is used to compute the modeling parameters for plastic hinge using modified IMK material model.
+ """This method is used to compute the modeling parameters for plastic hinge using modified IMK material model.
:return: a dictionary including each parameters required for nonlinear modeling in OpenSees.
- """
+ """ # noqa: D205, D401, D404
# Following content is based on the following reference:
- # [1] Hysteretic models tha incorporate strength and stiffness deterioration
+ # [1] Hysteretic models that incorporate strength and stiffness deterioration
# [2] Deterioration modeling of steel components in support of collapse prediction of steel moment frames under
# earthquake loading
# [3] Global collapse of frame structures under seismic excitations
@@ -203,36 +219,56 @@ def calculate_hinge_parameters(self, steel):
# beam spacing and length is in feet, remember to convert it to inches
c1 = 25.4 # c1_unit
c2 = 6.895 # c2_unit
- McMy = 1.10 # Capping moment to yielding moment ratio. Lignos et al. used 1.05 whereas Prof. Burton used 1.11.
- h = self.section['d'] - 2*self.section['tf'] # Web depth
- self.plastic_hinge['K0'] = 6 * steel.E * self.section['Ix'] / (self.length*12.0)
+ McMy = 1.10 # Capping moment to yielding moment ratio. Lignos et al. used 1.05 whereas Prof. Burton used 1.11. # noqa: N806
+ h = self.section['d'] - 2 * self.section['tf'] # Web depth
+ self.plastic_hinge['K0'] = (
+ 6 * steel.E * self.section['Ix'] / (self.length * 12.0)
+ )
self.plastic_hinge['Myp'] = self.section['Zx'] * steel.Fy
self.plastic_hinge['My'] = 1.00 * self.plastic_hinge['Myp']
- self.plastic_hinge['Lambda'] = 585 * (h/self.section['tw'])**(-1.14) \
- * (self.section['bf']/(2*self.section['tf']))**(-0.632) \
- * (self.spacing*12.0/self.section['ry'])**(-0.205) \
- * (c2*steel.Fy/355)**(-0.391)
+ self.plastic_hinge['Lambda'] = (
+ 585
+ * (h / self.section['tw']) ** (-1.14)
+ * (self.section['bf'] / (2 * self.section['tf'])) ** (-0.632)
+ * (self.spacing * 12.0 / self.section['ry']) ** (-0.205)
+ * (c2 * steel.Fy / 355) ** (-0.391)
+ )
# Pre-capping rotation
- self.plastic_hinge['theta_p'] = 0.19 * (h/self.section['tw'])**(-0.314) \
- * (self.section['bf']/(2*self.section['tf']))**(-0.100) \
- * (self.spacing*12.0/self.section['ry'])**(-0.185) \
- * (self.length*12.0/self.section['d'])**0.113 \
- * (c1*self.section['d']/533)**(-0.760) \
- * (c2*steel.Fy/355)**(-0.070)
+ self.plastic_hinge['theta_p'] = (
+ 0.19
+ * (h / self.section['tw']) ** (-0.314)
+ * (self.section['bf'] / (2 * self.section['tf'])) ** (-0.100)
+ * (self.spacing * 12.0 / self.section['ry']) ** (-0.185)
+ * (self.length * 12.0 / self.section['d']) ** 0.113
+ * (c1 * self.section['d'] / 533) ** (-0.760)
+ * (c2 * steel.Fy / 355) ** (-0.070)
+ )
# Pre-capping rotation is further revised to exclude the elastic deformation
- self.plastic_hinge['theta_p'] = self.plastic_hinge['theta_p'] \
- - (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ self.plastic_hinge['theta_p'] = ( # noqa: PLR6104
+ self.plastic_hinge['theta_p']
+ - (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
# Post-capping rotation
- self.plastic_hinge['theta_pc'] = 9.52 * (h/self.section['tw'])**(-0.513) \
- * (self.section['bf']/(2*self.section['tf']))**(-0.863) \
- * (self.spacing*12.0/self.section['ry'])**(-0.108) \
- * (c2*steel.Fy/355)**(-0.360)
+ self.plastic_hinge['theta_pc'] = (
+ 9.52
+ * (h / self.section['tw']) ** (-0.513)
+ * (self.section['bf'] / (2 * self.section['tf'])) ** (-0.863)
+ * (self.spacing * 12.0 / self.section['ry']) ** (-0.108)
+ * (c2 * steel.Fy / 355) ** (-0.360)
+ )
# Post-capping rotation is further revised to account for elastic deformation
- self.plastic_hinge['theta_y'] = self.plastic_hinge['My'] / self.plastic_hinge['K0']
- self.plastic_hinge['theta_pc'] = self.plastic_hinge['theta_pc'] \
- + self.plastic_hinge['theta_y'] \
- + (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
- self.plastic_hinge['as'] = (McMy-1.0)*self.plastic_hinge['My']\
- /(self.plastic_hinge['theta_p']*self.plastic_hinge['K0'])
+ self.plastic_hinge['theta_y'] = (
+ self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
+ self.plastic_hinge['theta_pc'] = (
+ self.plastic_hinge['theta_pc']
+ + self.plastic_hinge['theta_y']
+ + (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
+ self.plastic_hinge['as'] = (
+ (McMy - 1.0)
+ * self.plastic_hinge['My']
+ / (self.plastic_hinge['theta_p'] * self.plastic_hinge['K0'])
+ )
self.plastic_hinge['residual'] = 0.40
- self.plastic_hinge['theta_u'] = 0.20
\ No newline at end of file
+ self.plastic_hinge['theta_u'] = 0.20
diff --git a/modules/createSAM/AutoSDA/building_information.py b/modules/createSAM/AutoSDA/building_information.py
index c13de2193..ea0a21e72 100644
--- a/modules/createSAM/AutoSDA/building_information.py
+++ b/modules/createSAM/AutoSDA/building_information.py
@@ -1,4 +1,4 @@
-# This file is used to define the class of Building
+# This file is used to define the class of Building # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in June 2018
# Updated in Sept. 2018
@@ -6,40 +6,41 @@
# Last revision: 09/2020
import copy
-import numpy as np
import os
-import shutil
-import pandas as pd
from pathlib import Path
-from help_functions import determine_Fa_coefficient
-from help_functions import determine_Fv_coefficient
-from help_functions import calculate_DBE_acceleration
-from help_functions import determine_Cu_coefficient
-from help_functions import determine_floor_height
-from help_functions import calculate_Cs_coefficient
-from help_functions import determine_k_coeficient
-from help_functions import calculate_seismic_force
-from help_functions import find_section_candidate
-from help_functions import search_member_size
-from help_functions import search_section_property
-from help_functions import decrease_member_size
-from help_functions import increase_member_size
-from help_functions import constructability_helper
-from global_variables import SECTION_DATABASE
-from global_variables import COLUMN_DATABASE
-from global_variables import BEAM_DATABASE
-from global_variables import PERIOD_FOR_DRIFT_LIMIT
-from global_variables import RV_ARRAY
+import numpy as np
+import pandas as pd
# #########################################################################
# Global Constants Subjected to Revision #
# #########################################################################
-
-from global_variables import EXTERIOR_INTERIOR_COLUMN_RATIO
-from global_variables import BEAM_TO_COLUMN_RATIO
-from global_variables import IDENTICAL_SIZE_PER_STORY
-
+from global_variables import (
+ BEAM_DATABASE,
+ BEAM_TO_COLUMN_RATIO,
+ COLUMN_DATABASE,
+ EXTERIOR_INTERIOR_COLUMN_RATIO,
+ IDENTICAL_SIZE_PER_STORY,
+ PERIOD_FOR_DRIFT_LIMIT,
+ RV_ARRAY,
+ SECTION_DATABASE,
+)
+from help_functions import (
+ calculate_Cs_coefficient,
+ calculate_DBE_acceleration,
+ calculate_seismic_force,
+ constructability_helper,
+ decrease_member_size,
+ determine_Cu_coefficient,
+ determine_Fa_coefficient,
+ determine_floor_height,
+ determine_Fv_coefficient,
+ determine_k_coeficient,
+ find_section_candidate,
+ increase_member_size,
+ search_member_size,
+ search_section_property,
+)
# #########################################################################
# Open the section database and store it as a global variable #
@@ -56,9 +57,8 @@
# #########################################################################
-class Building(object):
- """
- This class aims to read all the relevant building information from .csv files
+class Building:
+ """This class aims to read all the relevant building information from .csv files
It includes the following methods:
(1) Define paths to different folders which will be used later
(2) Read geometry information
@@ -67,14 +67,13 @@ class Building(object):
(5) Compute lateral force for the building based on ASCE 7-10
(6) Determine possible section sizes for columns and beams based on user-specified section depth
(7) Propose initial beam and column sizes
- """
+ """ # noqa: D205, D400, D404
- def __init__(self, base_directory, pathDataFolder, workingDirectory):
- """
- This function initializes the attributes of a building instance
+ def __init__(self, base_directory, pathDataFolder, workingDirectory): # noqa: N803
+ """This function initializes the attributes of a building instance
:param building_id: a string that used as a UID to label the building
:param base_directory: a string that denotes the path to root folder
- """
+ """ # noqa: D205, D400, D401, D404
# Assign basic information: unique ID for the building and base path
self.base_directory = base_directory
self.dataFolderDirectory = pathDataFolder
@@ -103,87 +102,105 @@ def __init__(self, base_directory, pathDataFolder, workingDirectory):
self.initialize_member()
# self.initialize_member_v2()
- def define_directory(self):
+ def define_directory(self): # noqa: D102
# Define all useful paths based on the path to root folder
# Define path to folder where the baseline .tcl files for elastic analysis are saved
- baseline_elastic_directory = self.base_directory + "/BaselineTclFiles/ElasticAnalysis"
+ baseline_elastic_directory = (
+ self.base_directory + '/BaselineTclFiles/ElasticAnalysis'
+ )
# Define path to folder where the baseline .tcl files for nonlinear analysis are stored
- baseline_nonlinear_directory = self.base_directory + "/BaselineTclFiles/NonlinearAnalysis"
+ baseline_nonlinear_directory = (
+ self.base_directory + '/BaselineTclFiles/NonlinearAnalysis'
+ )
# Define path to folder where the building data (.csv) are saved
building_data_directory = self.dataFolderDirectory
# Define path to folder where the design results are saved
- building_design_results_directory = self.workingDirectory + "/BuildingDesignResults"
+ building_design_results_directory = (
+ self.workingDirectory + '/BuildingDesignResults'
+ )
# Define path to folder where the generated elastic analysis OpenSees model is saved
- building_elastic_model_directory = self.workingDirectory + "/BuildingElasticModels"
+ building_elastic_model_directory = (
+ self.workingDirectory + '/BuildingElasticModels'
+ )
# Define path to folder where the generated nonlinear analysis OpenSees model is saved
- building_nonlinear_model_directory = self.workingDirectory + "/BuildingNonlinearModels"
+ building_nonlinear_model_directory = (
+ self.workingDirectory + '/BuildingNonlinearModels'
+ )
# Store all necessary directories into a dictionary
- self.directory = {'baseline files elastic': baseline_elastic_directory,
- 'baseline files nonlinear': baseline_nonlinear_directory,
- 'building data': building_data_directory,
- 'building elastic model': building_elastic_model_directory,
- 'building design results': building_design_results_directory,
- 'building nonlinear model': building_nonlinear_model_directory}
+ self.directory = {
+ 'baseline files elastic': baseline_elastic_directory,
+ 'baseline files nonlinear': baseline_nonlinear_directory,
+ 'building data': building_data_directory,
+ 'building elastic model': building_elastic_model_directory,
+ 'building design results': building_design_results_directory,
+ 'building nonlinear model': building_nonlinear_model_directory,
+ }
def read_geometry(self):
- """
- This method is used to read the building geometry information from .csv files:
+ """This method is used to read the building geometry information from .csv files:
(1) Change the working directory to the folder where .csv data are stored
(2) Open the .csv file and save all relevant information to the object itself
- """
+ """ # noqa: D205, D400, D401, D404
os.chdir(self.directory['building data'])
- with open('Geometry.csv', 'r') as csvfile:
+ with open('Geometry.csv') as csvfile: # noqa: PLW1514, PTH123
geometry_data = pd.read_csv(csvfile, header=0)
# Each variable is a scalar
number_of_story = geometry_data.loc[0, 'number of story']
- number_of_X_bay = geometry_data.loc[0, 'number of X bay']
- number_of_Z_bay = geometry_data.loc[0, 'number of Z bay']
+ number_of_X_bay = geometry_data.loc[0, 'number of X bay'] # noqa: N806
+ number_of_Z_bay = geometry_data.loc[0, 'number of Z bay'] # noqa: N806
first_story_height = geometry_data.loc[0, 'first story height']
typical_story_height = geometry_data.loc[0, 'typical story height']
- X_bay_width = geometry_data.loc[0, 'X bay width']
- Z_bay_width = geometry_data.loc[0, 'Z bay width']
- number_of_X_LFRS = geometry_data.loc[0, 'number of X LFRS'] # number of lateral resisting frame in X direction
- number_of_Z_LFRS = geometry_data.loc[0, 'number of Z LFRS'] # number of lateral resisting frame in Z direction
+ X_bay_width = geometry_data.loc[0, 'X bay width'] # noqa: N806
+ Z_bay_width = geometry_data.loc[0, 'Z bay width'] # noqa: N806
+ number_of_X_LFRS = geometry_data.loc[ # noqa: N806
+ 0, 'number of X LFRS'
+ ] # number of lateral resisting frame in X direction
+ number_of_Z_LFRS = geometry_data.loc[ # noqa: N806
+ 0, 'number of Z LFRS'
+ ] # number of lateral resisting frame in Z direction
# Call function defined in "help_functions.py" to determine the height for each floor level
- floor_height = determine_floor_height(number_of_story, first_story_height, typical_story_height)
+ floor_height = determine_floor_height(
+ number_of_story, first_story_height, typical_story_height
+ )
# Store all necessary information into a dictionary named geometry
- self.geometry = {'number of story': number_of_story,
- 'number of X bay': number_of_X_bay,
- 'number of Z bay': number_of_Z_bay,
- 'first story height': first_story_height,
- 'typical story height': typical_story_height,
- 'X bay width': X_bay_width,
- 'Z bay width': Z_bay_width,
- 'number of X LFRS': number_of_X_LFRS,
- 'number of Z LFRS': number_of_Z_LFRS,
- 'floor height': floor_height}
+ self.geometry = {
+ 'number of story': number_of_story,
+ 'number of X bay': number_of_X_bay,
+ 'number of Z bay': number_of_Z_bay,
+ 'first story height': first_story_height,
+ 'typical story height': typical_story_height,
+ 'X bay width': X_bay_width,
+ 'Z bay width': Z_bay_width,
+ 'number of X LFRS': number_of_X_LFRS,
+ 'number of Z LFRS': number_of_Z_LFRS,
+ 'floor height': floor_height,
+ }
def read_gravity_loads(self):
- """
- This method is used to read the load information from .csv files
+ """This method is used to read the load information from .csv files
(1) Change the directory to the folder where the load data are stored
(2) Read the .csv files and assign save load values to object values
- """
+ """ # noqa: D205, D400, D401, D404
os.chdir(self.directory['building data'])
- with open('Loads.csv', 'r') as csvfile:
+ with open('Loads.csv') as csvfile: # noqa: PLW1514, PTH123
loads_data = pd.read_csv(csvfile, header=0)
- #for i in loads_data._iter_column_arrays():
- #KZ - a minor replacement avoiding pandas.DataFrame bug when running on stampede
+ # for i in loads_data._iter_column_arrays():
+ # KZ - a minor replacement avoiding pandas.DataFrame bug when running on stampede
for ii in loads_data.columns:
- i = loads_data.loc[:,ii]
+ i = loads_data.loc[:, ii]
for j in range(len(i)):
val = i[j]
try:
float(val)
- except:
+ except: # noqa: E722
rv = RV_ARRAY.get(val, 0)
if rv != 0:
i[j] = rv
else:
- print("Error getting an RV with the key",val)
+ print('Error getting an RV with the key', val) # noqa: T201
return
# All data is a list (array). Length is the number of story
@@ -193,107 +210,168 @@ def read_gravity_loads(self):
floor_live_load = loads_data['floor live load'].astype(float)
beam_dead_load = loads_data['beam dead load'].astype(float)
beam_live_load = loads_data['beam live load'].astype(float)
- leaning_column_dead_load = loads_data['leaning column dead load'].astype(float)
- leaning_column_live_load = loads_data['leaning column live load'].astype(float)
+ leaning_column_dead_load = loads_data['leaning column dead load'].astype(
+ float
+ )
+ leaning_column_live_load = loads_data['leaning column live load'].astype(
+ float
+ )
- print(floor_weight)
+ print(floor_weight) # noqa: T201
# Store all necessary information into a dictionary named gravity_loads
- self.gravity_loads = {'floor weight': floor_weight,
- 'floor dead load': floor_dead_load,
- 'floor live load': floor_live_load,
- 'beam dead load': beam_dead_load,
- 'beam live load': beam_live_load,
- 'leaning column dead load': leaning_column_dead_load,
- 'leaning column live load': leaning_column_live_load}
+ self.gravity_loads = {
+ 'floor weight': floor_weight,
+ 'floor dead load': floor_dead_load,
+ 'floor live load': floor_live_load,
+ 'beam dead load': beam_dead_load,
+ 'beam live load': beam_live_load,
+ 'leaning column dead load': leaning_column_dead_load,
+ 'leaning column live load': leaning_column_live_load,
+ }
def read_elf_parameters(self):
- """
- This method is used to read equivalent lateral force (in short: elf) parameters and calculate SDS and SD1
+ """This method is used to read equivalent lateral force (in short: elf) parameters and calculate SDS and SD1
(1) Read equivalent lateral force parameters
(2) Calculate SMS, SM1, SDS, SD1 values and save them into the attribute
- """
+ """ # noqa: D205, D400, D401, D404
os.chdir(self.directory['building data'])
- with open('ELFParameters.csv', 'r') as csvfile:
+ with open('ELFParameters.csv') as csvfile: # noqa: PLW1514, PTH123
elf_parameters_data = pd.read_csv(csvfile, header=0)
# Determine Fa and Fv coefficient based on site class and Ss and S1 (ASCE 7-10 Table 11.4-1 & 11.4-2)
# Call function defined in "help_functions.py" to calculate two coefficients: Fa and Fv
- Fa = determine_Fa_coefficient(elf_parameters_data.loc[0, 'site class'], elf_parameters_data.loc[0, 'Ss'])
- Fv = determine_Fv_coefficient(elf_parameters_data.loc[0, 'site class'], elf_parameters_data.loc[0, 'S1'])
+ Fa = determine_Fa_coefficient( # noqa: N806
+ elf_parameters_data.loc[0, 'site class'],
+ elf_parameters_data.loc[0, 'Ss'],
+ )
+ Fv = determine_Fv_coefficient( # noqa: N806
+ elf_parameters_data.loc[0, 'site class'],
+ elf_parameters_data.loc[0, 'S1'],
+ )
# Determine SMS, SM1, SDS, SD1 using the defined function in "help_functions.py"
- SMS, SM1, SDS, SD1 = calculate_DBE_acceleration(elf_parameters_data.loc[0, 'Ss'],
- elf_parameters_data.loc[0, 'S1'], Fa, Fv)
+ SMS, SM1, SDS, SD1 = calculate_DBE_acceleration( # noqa: N806
+ elf_parameters_data.loc[0, 'Ss'],
+ elf_parameters_data.loc[0, 'S1'],
+ Fa,
+ Fv,
+ )
# Determine Cu using the defined function in "help_functions.py"
- Cu = determine_Cu_coefficient(SD1)
+ Cu = determine_Cu_coefficient(SD1) # noqa: N806
# Calculate the building period: approximate fundamental period and upper bound period
- approximate_period = elf_parameters_data.loc[0, 'Ct'] \
- * (self.geometry['floor height'][-1] ** elf_parameters_data.loc[0, 'x'])
+ approximate_period = elf_parameters_data.loc[0, 'Ct'] * (
+ self.geometry['floor height'][-1] ** elf_parameters_data.loc[0, 'x']
+ )
upper_period = Cu * approximate_period
# Save all coefficient into the dictionary named elf_parameters
- self.elf_parameters = {'Ss': elf_parameters_data.loc[0, 'Ss'], 'S1': elf_parameters_data.loc[0, 'S1'],
- 'TL': elf_parameters_data.loc[0, 'TL'], 'Cd': elf_parameters_data.loc[0, 'Cd'],
- 'R': elf_parameters_data.loc[0, 'R'], 'Ie': elf_parameters_data.loc[0, 'Ie'],
- 'rho': elf_parameters_data.loc[0, 'rho'],
- 'site class': elf_parameters_data.loc[0, 'site class'],
- 'Ct': elf_parameters_data.loc[0, 'Ct'], 'x': elf_parameters_data.loc[0, 'x'],
- 'Fa': Fa, 'Fv': Fv, 'SMS': SMS, 'SM1': SM1, 'SDS': SDS, 'SD1': SD1, 'Cu': Cu,
- 'approximate period': approximate_period, 'period': upper_period}
+ self.elf_parameters = {
+ 'Ss': elf_parameters_data.loc[0, 'Ss'],
+ 'S1': elf_parameters_data.loc[0, 'S1'],
+ 'TL': elf_parameters_data.loc[0, 'TL'],
+ 'Cd': elf_parameters_data.loc[0, 'Cd'],
+ 'R': elf_parameters_data.loc[0, 'R'],
+ 'Ie': elf_parameters_data.loc[0, 'Ie'],
+ 'rho': elf_parameters_data.loc[0, 'rho'],
+ 'site class': elf_parameters_data.loc[0, 'site class'],
+ 'Ct': elf_parameters_data.loc[0, 'Ct'],
+ 'x': elf_parameters_data.loc[0, 'x'],
+ 'Fa': Fa,
+ 'Fv': Fv,
+ 'SMS': SMS,
+ 'SM1': SM1,
+ 'SDS': SDS,
+ 'SD1': SD1,
+ 'Cu': Cu,
+ 'approximate period': approximate_period,
+ 'period': upper_period,
+ }
def compute_seismic_force(self):
- """
- This method is used to calculate the seismic story force using ELF procedure specified in ASCE 7-10 Section 12.8
+ """This method is used to calculate the seismic story force using ELF procedure specified in ASCE 7-10 Section 12.8
(1) Determine the floor level height and save it in a list (array)
(2) Determine the correct period between first mode period and CuTa
(3) Determine the Cs coefficient
- (4) Determine the lateral force at each floor level (ground to roof) and save it in an arrary
- """
+ (4) Determine the lateral force at each floor level (ground to roof) and save it in an array
+ """ # noqa: D205, D400, D401, D404
# Please note that the period for computing the required strength should be bounded by CuTa
- period_for_strength = min(self.elf_parameters['modal period'], self.elf_parameters['period'])
+ period_for_strength = min(
+ self.elf_parameters['modal period'], self.elf_parameters['period']
+ )
# The period used for computing story drift is not required to be bounded by CuTa
if PERIOD_FOR_DRIFT_LIMIT:
- period_for_drift = min(self.elf_parameters['modal period'], self.elf_parameters['period'])
+ period_for_drift = min(
+ self.elf_parameters['modal period'], self.elf_parameters['period']
+ )
else:
period_for_drift = self.elf_parameters['modal period']
# Call function defined in "help_functions.py" to determine the seismic response coefficient
- Cs_for_strength = calculate_Cs_coefficient(self.elf_parameters['SDS'], self.elf_parameters['SD1'],
- self.elf_parameters['S1'], period_for_strength,
- self.elf_parameters['TL'], self.elf_parameters['R'],
- self.elf_parameters['Ie'])
- Cs_for_drift = calculate_Cs_coefficient(self.elf_parameters['SDS'], self.elf_parameters['SD1'],
- self.elf_parameters['S1'], period_for_drift,
- self.elf_parameters['TL'], self.elf_parameters['R'],
- self.elf_parameters['Ie'])
+ Cs_for_strength = calculate_Cs_coefficient( # noqa: N806
+ self.elf_parameters['SDS'],
+ self.elf_parameters['SD1'],
+ self.elf_parameters['S1'],
+ period_for_strength,
+ self.elf_parameters['TL'],
+ self.elf_parameters['R'],
+ self.elf_parameters['Ie'],
+ )
+ Cs_for_drift = calculate_Cs_coefficient( # noqa: N806
+ self.elf_parameters['SDS'],
+ self.elf_parameters['SD1'],
+ self.elf_parameters['S1'],
+ period_for_drift,
+ self.elf_parameters['TL'],
+ self.elf_parameters['R'],
+ self.elf_parameters['Ie'],
+ )
# Calculate the base shear
- base_shear_for_strength = Cs_for_strength * np.sum(self.gravity_loads['floor weight'])
- base_shear_for_drift = Cs_for_drift * np.sum(self.gravity_loads['floor weight'])
+ base_shear_for_strength = Cs_for_strength * np.sum(
+ self.gravity_loads['floor weight']
+ )
+ base_shear_for_drift = Cs_for_drift * np.sum(
+ self.gravity_loads['floor weight']
+ )
# Call function defined in "help_functions.py" to compute k coefficient
k = determine_k_coeficient(self.elf_parameters['period'])
# Call function defined in "help_functions.py" to determine the lateral force for each floor level
- lateral_story_force_for_strength, story_shear_for_strength \
- = calculate_seismic_force(base_shear_for_strength, self.gravity_loads['floor weight'], \
- self.geometry['floor height'], k)
- lateral_story_force_for_drift, story_shear_for_drift \
- = calculate_seismic_force(base_shear_for_drift, self.gravity_loads['floor weight'], \
- self.geometry['floor height'], k)
+ lateral_story_force_for_strength, story_shear_for_strength = (
+ calculate_seismic_force(
+ base_shear_for_strength,
+ self.gravity_loads['floor weight'],
+ self.geometry['floor height'],
+ k,
+ )
+ )
+ lateral_story_force_for_drift, story_shear_for_drift = (
+ calculate_seismic_force(
+ base_shear_for_drift,
+ self.gravity_loads['floor weight'],
+ self.geometry['floor height'],
+ k,
+ )
+ )
# Store information into class attribute
- self.seismic_force_for_strength = {'lateral story force': lateral_story_force_for_strength, \
- 'story shear': story_shear_for_strength, \
- 'base shear': base_shear_for_strength, 'Cs': Cs_for_strength}
- self.seismic_force_for_drift = {'lateral story force': lateral_story_force_for_drift, \
- 'story shear': story_shear_for_drift, \
- 'base shear': base_shear_for_drift, 'Cs': Cs_for_drift}
+ self.seismic_force_for_strength = {
+ 'lateral story force': lateral_story_force_for_strength,
+ 'story shear': story_shear_for_strength,
+ 'base shear': base_shear_for_strength,
+ 'Cs': Cs_for_strength,
+ }
+ self.seismic_force_for_drift = {
+ 'lateral story force': lateral_story_force_for_drift,
+ 'story shear': story_shear_for_drift,
+ 'base shear': base_shear_for_drift,
+ 'Cs': Cs_for_drift,
+ }
def determine_member_candidate(self):
- """
- This method is used to determine all possible member candidates based on the user-specified section depth
+ """This method is used to determine all possible member candidates based on the user-specified section depth
:return: a dictionary which contains the all possible sizes for exterior columns, interior columns, and beams.
- """
+ """ # noqa: D205, D401, D404
# Read the user-specified depths for interior columns, exterior columns, and beams.
os.chdir(self.directory['building data'])
- with open('MemberDepth.csv', 'r') as csvfile:
+ with open('MemberDepth.csv') as csvfile: # noqa: PLW1514, PTH123
depth_data = pd.read_csv(csvfile, header=0)
# Initialize dictionary that will be used to store all possible section sizes for each member (in each story)
interior_column_candidate = {}
@@ -303,81 +381,110 @@ def determine_member_candidate(self):
interior_column_depth = []
exterior_column_depth = []
beam_depth = []
- for story in range(0, self.geometry['number of story']): # story number
+ for story in range(self.geometry['number of story']): # story number
# Initialize the Series that will be used to store the member sizes for each single story
temp_interior_column = pd.Series()
temp_exterior_column = pd.Series()
temp_beam = pd.Series()
# Convert string (read from csv) to list
- interior_column_depth_list = depth_data.loc[story, 'interior column'].split(', ')
- exterior_column_depth_list = depth_data.loc[story, 'exterior column'].split(', ')
+ interior_column_depth_list = depth_data.loc[
+ story, 'interior column'
+ ].split(', ')
+ exterior_column_depth_list = depth_data.loc[
+ story, 'exterior column'
+ ].split(', ')
beam_depth_list = depth_data.loc[story, 'beam'].split(', ')
# Find the section size candidates associated with a certain depth specified by user
- for item in range(0, len(interior_column_depth_list)):
- temp1 = find_section_candidate(interior_column_depth_list[item], COLUMN_DATABASE)
+ for item in range(len(interior_column_depth_list)):
+ temp1 = find_section_candidate(
+ interior_column_depth_list[item], COLUMN_DATABASE
+ )
temp_interior_column = pd.concat([temp_interior_column, temp1])
- for item in range(0, len(exterior_column_depth_list)):
- temp2 = find_section_candidate(exterior_column_depth_list[item], COLUMN_DATABASE)
+ for item in range(len(exterior_column_depth_list)):
+ temp2 = find_section_candidate(
+ exterior_column_depth_list[item], COLUMN_DATABASE
+ )
temp_exterior_column = pd.concat([temp_exterior_column, temp2])
- for item in range(0, len(beam_depth_list)):
+ for item in range(len(beam_depth_list)):
temp3 = find_section_candidate(beam_depth_list[item], BEAM_DATABASE)
temp_beam = pd.concat([temp_beam, temp3])
# Store the section size candidates for each member per story in a dictionary
# Re-order the Series based on the index (which is further based on descending order of Ix for column
# and Zx for beam). Convert Series to list.
- interior_column_candidate['story %s' % (story + 1)] = list(temp_interior_column.sort_index())
- exterior_column_candidate['story %s' % (story + 1)] = list(temp_exterior_column.sort_index())
- beam_candidate['floor level %s' % (story + 2)] = list(temp_beam.sort_index())
+ interior_column_candidate['story %s' % (story + 1)] = list(
+ temp_interior_column.sort_index()
+ )
+ exterior_column_candidate['story %s' % (story + 1)] = list(
+ temp_exterior_column.sort_index()
+ )
+ beam_candidate['floor level %s' % (story + 2)] = list(
+ temp_beam.sort_index()
+ )
# Store the section depth for each member in each story
interior_column_depth.append(interior_column_depth_list)
exterior_column_depth.append(exterior_column_depth_list)
beam_depth.append(beam_depth_list)
# Summarize all the section candidates to a dictionary
- self.element_candidate = {'interior column': interior_column_candidate,
- 'exterior column': exterior_column_candidate,
- 'beam': beam_candidate}
+ self.element_candidate = {
+ 'interior column': interior_column_candidate,
+ 'exterior column': exterior_column_candidate,
+ 'beam': beam_candidate,
+ }
# Summarize all the section depth to a dictionary
- self.section_depth = {'interior column': interior_column_depth,
- 'exterior column': exterior_column_depth,
- 'beam': beam_depth}
+ self.section_depth = {
+ 'interior column': interior_column_depth,
+ 'exterior column': exterior_column_depth,
+ 'beam': beam_depth,
+ }
def initialize_member(self):
- """
- This method is used to initialize the member size
+ """This method is used to initialize the member size
:return: a dictionary which includes the initial size for interior columns, exterior columns, and beams
- """
+ """ # noqa: D205, D400, D401, D404
# Define initial sizes for columns and beams
interior_column = []
exterior_column = []
beam = []
- for story in range(0, self.geometry['number of story']):
+ for story in range(self.geometry['number of story']):
# The initial column is selected as the greatest sizes in the candidate pool
- initial_interior = self.element_candidate['interior column']['story %s' % (story + 1)][0]
- initial_exterior = self.element_candidate['exterior column']['story %s' % (story + 1)][0]
+ initial_interior = self.element_candidate['interior column'][
+ 'story %s' % (story + 1)
+ ][0]
+ initial_exterior = self.element_candidate['exterior column'][
+ 'story %s' % (story + 1)
+ ][0]
# Merge initial size of each story together
interior_column.append(initial_interior)
exterior_column.append(initial_exterior)
# Compute the section property of the interior column size
- reference_property = search_section_property(initial_interior, SECTION_DATABASE)
+ reference_property = search_section_property(
+ initial_interior, SECTION_DATABASE
+ )
# Determine the beam size based on beam-to-column section modulus ratio
- beam_size = search_member_size('Zx', reference_property['Zx'] * BEAM_TO_COLUMN_RATIO,
- self.element_candidate['beam']['floor level %s' % (story + 2)],
- SECTION_DATABASE)
+ beam_size = search_member_size(
+ 'Zx',
+ reference_property['Zx'] * BEAM_TO_COLUMN_RATIO,
+ self.element_candidate['beam']['floor level %s' % (story + 2)],
+ SECTION_DATABASE,
+ )
# Merge initial beam size of each story together
beam.append(beam_size)
# Store all initial member sizes into the dictionary (which will be updated using optimization algorithm later)
- self.member_size = {'interior column': interior_column,
- 'exterior column': exterior_column,
- 'beam': beam}
+ self.member_size = {
+ 'interior column': interior_column,
+ 'exterior column': exterior_column,
+ 'beam': beam,
+ }
def read_modal_period(self):
- """
- This method is used to read the modal period from OpenSees eigen value analysis results and store it in ELF
+ """This method is used to read the modal period from OpenSees eigen value analysis results and store it in ELF
parameters.
:return: the first mode period stored in self.elf_parameters
- """
+ """ # noqa: D205, D400, D401, D404
# Change the working directory to the folder where the eigen value analysis results are stored
- path_modal_period = self.directory['building elastic model'] + "/EigenAnalysis"
+ path_modal_period = (
+ self.directory['building elastic model'] + '/EigenAnalysis'
+ )
# Create the 'path_modal_period' directory if it does not already exist
Path(path_modal_period).mkdir(parents=True, exist_ok=True)
@@ -386,16 +493,18 @@ def read_modal_period(self):
# Save the first mode period in elf_parameters
# period = np.loadtxt('Periods.out')
period = pd.read_csv('Periods.out', header=None)
- self.elf_parameters['modal period'] = np.float64((period.iloc[0, 0]))
+ self.elf_parameters['modal period'] = np.float64(period.iloc[0, 0])
def read_story_drift(self):
- """
- This method is used to read the story drifts from OpenSees elastic analysis results and stored it as attribute
+ """This method is used to read the story drifts from OpenSees elastic analysis results and stored it as attribute
The load case for story drift is the combination of dead, live, and earthquake loads.
:return: an [story*1] array which includes the story drifts for each story.
- """
+ """ # noqa: D205, D401, D404
# Change the working directory to the folder where story drifts are stored
- path_story_drift = self.directory['building elastic model'] + "/GravityEarthquake/StoryDrifts"
+ path_story_drift = (
+ self.directory['building elastic model']
+ + '/GravityEarthquake/StoryDrifts'
+ )
os.chdir(path_story_drift)
# Save all story drifts in an array
story_drift = np.zeros([self.geometry['number of story'], 1])
@@ -407,55 +516,70 @@ def read_story_drift(self):
self.elastic_response = {'story drift': story_drift}
def optimize_member_for_drift(self):
- """
- This method is used to decrease the member size such that the design is most economic.
+ """This method is used to decrease the member size such that the design is most economic.
:return: update self.member_size
- """
+ """ # noqa: D205, D400, D401, D404
# Find the story which has the smallest drift
- target_story = np.where(self.elastic_response['story drift'] ==
- np.min(self.elastic_response['story drift']))[0][0]
+ target_story = np.where(
+ self.elastic_response['story drift']
+ == np.min(self.elastic_response['story drift'])
+ )[0][0]
# Update the interior column size in target story
- self.member_size['interior column'][target_story] = \
- decrease_member_size(self.element_candidate['interior column']['story %s' % (target_story + 1)],
- self.member_size['interior column'][target_story])
+ self.member_size['interior column'][target_story] = decrease_member_size(
+ self.element_candidate['interior column'][
+ 'story %s' % (target_story + 1)
+ ],
+ self.member_size['interior column'][target_story],
+ )
# Compute the section property of the interior column size
- reference_property = search_section_property(self.member_size['interior column'][target_story],
- SECTION_DATABASE)
+ reference_property = search_section_property(
+ self.member_size['interior column'][target_story], SECTION_DATABASE
+ )
# Determine the beam size based on beam-to-column section modulus ratio
- beam_size = search_member_size('Zx', reference_property['Zx'] * BEAM_TO_COLUMN_RATIO,
- self.element_candidate['beam']['floor level %s' % (target_story + 2)],
- SECTION_DATABASE)
+ beam_size = search_member_size(
+ 'Zx',
+ reference_property['Zx'] * BEAM_TO_COLUMN_RATIO,
+ self.element_candidate['beam']['floor level %s' % (target_story + 2)],
+ SECTION_DATABASE,
+ )
# "Push" the updated beam size back to the class dictionary
self.member_size['beam'][target_story] = beam_size
# Determine the exterior column size based on exterior/interior column moment of inertia ratio
- exterior_size = search_member_size('Ix', reference_property['Ix'] * EXTERIOR_INTERIOR_COLUMN_RATIO,
- self.element_candidate['exterior column']['story %s' % (target_story + 1)],
- SECTION_DATABASE)
+ exterior_size = search_member_size(
+ 'Ix',
+ reference_property['Ix'] * EXTERIOR_INTERIOR_COLUMN_RATIO,
+ self.element_candidate['exterior column'][
+ 'story %s' % (target_story + 1)
+ ],
+ SECTION_DATABASE,
+ )
self.member_size['exterior column'][target_story] = exterior_size
def upscale_column(self, target_story, type_column):
- """
- This method is used to increase column size which might be necessary when column strength is not sufficient
+ """This method is used to increase column size which might be necessary when column strength is not sufficient
or strong column weak beam is not satisfied.
:param target_story: a scalar to denote which story column shall be increased (from 0 to total story # - 1).
:param type_column: a string denoting whether it is an exterior column or interior column
:return: update the column size stored in self.member_size
- """
- temp_size = increase_member_size(self.element_candidate[type_column]['story %s' % (target_story + 1)],
- self.member_size[type_column][target_story])
+ """ # noqa: D205, D400, D401, D404
+ temp_size = increase_member_size(
+ self.element_candidate[type_column]['story %s' % (target_story + 1)],
+ self.member_size[type_column][target_story],
+ )
self.member_size[type_column][target_story] = temp_size
# temp_size_2 = increase_member_size(self.element_candidate['exterior column']['story %x' % (target_story+1)],
# self.member_size['exterior column'][target_story])
# self.member_size['exterior column'][target_story] = temp_size_2
def upscale_beam(self, target_floor):
- """
- This method is used to increase beam size which might be necessary when beam strength is not sufficient
+ """This method is used to increase beam size which might be necessary when beam strength is not sufficient
:param target_floor: a scalar to denote which floor beam shall be improved. (from 0 to total story # - 1)
:return: update the beam size stored in self.member_size
- """
- temp_size = increase_member_size(self.element_candidate['beam']['floor level %s' % (target_floor + 2)],
- self.member_size['beam'][target_floor])
+ """ # noqa: D205, D400, D401, D404
+ temp_size = increase_member_size(
+ self.element_candidate['beam']['floor level %s' % (target_floor + 2)],
+ self.member_size['beam'][target_floor],
+ )
self.member_size['beam'][target_floor] = temp_size
# ************************************* Keep previous version as backup ********************************************
@@ -463,10 +587,10 @@ def upscale_beam(self, target_floor):
# """
# This method is used to update the member size by considering the constructability (ease of construction)
# :return: a dictionary which includes the member sizes after consideration of constructability.
- # Those siezes are considered to be the actual final design.
+ # Those sizes are considered to be the actual final design.
# """
# # Make a deep copy of the member sizes and stored them in a new dictionary named construction_size
- # # Use deep copy to avoid changing the varaiables stored in member size
+ # # Use deep copy to avoid changing the variables stored in member size
# temp_size = copy.deepcopy(self.member_size)
# # Update interior and exterior column size
# member = ['interior column', 'exterior column']
@@ -486,29 +610,35 @@ def upscale_beam(self, target_floor):
# ********************************************* Previous version ends here *****************************************
def constructability_beam(self):
- """
- This method is used to update the beam member size by considering the constructability (ease of construction).
+ """This method is used to update the beam member size by considering the constructability (ease of construction).
:return: update the beam sizes stored in self.member_size['beam']
- """
+ """ # noqa: D205, D400, D401, D404
# Make a deep copy of the member sizes and stored them in a new dictionary named construction_size
# Use deep copy to avoid changing the variables stored in member size
temp_size = copy.deepcopy(self.member_size)
# Update beam size (beam size is updated based on descending order of Zx)
- self.construction_size['beam'] = constructability_helper(temp_size['beam'], IDENTICAL_SIZE_PER_STORY,
- self.geometry['number of story'], 'Ix')
+ self.construction_size['beam'] = constructability_helper(
+ temp_size['beam'],
+ IDENTICAL_SIZE_PER_STORY,
+ self.geometry['number of story'],
+ 'Ix',
+ )
# Column sizes here have not been updated (just directly copy)
self.construction_size['interior column'] = temp_size['interior column']
self.construction_size['exterior column'] = temp_size['exterior column']
def constructability_column(self):
- """
- This method is used to update the column member size by considering the constructability (ease of construction).
+ """This method is used to update the column member size by considering the constructability (ease of construction).
:return: update the column sizes stored in self.member_size
- """
+ """ # noqa: D205, D400, D401, D404
# Make a copy of the member size
temp_size = copy.deepcopy(self.member_size)
# Update column sizes based on the sorted Ix
member_list = ['interior column', 'exterior column']
for mem in member_list:
- self.construction_size[mem] = constructability_helper(temp_size[mem], IDENTICAL_SIZE_PER_STORY,
- self.geometry['number of story'], 'Ix')
+ self.construction_size[mem] = constructability_helper(
+ temp_size[mem],
+ IDENTICAL_SIZE_PER_STORY,
+ self.geometry['number of story'],
+ 'Ix',
+ )
diff --git a/modules/createSAM/AutoSDA/column_component.py b/modules/createSAM/AutoSDA/column_component.py
index 5463ca7de..cbd83b50e 100644
--- a/modules/createSAM/AutoSDA/column_component.py
+++ b/modules/createSAM/AutoSDA/column_component.py
@@ -1,30 +1,37 @@
-# This file is used to define the class of column, which includes the axial, shear, and flexural strengths of column
+# This file is used to define the class of column, which includes the axial, shear, and flexural strengths of column # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in Apr. 2018
# Updated in Oct. 2018
import numpy as np
-
-from scipy import interpolate
-
-from help_functions import search_section_property
from global_variables import SECTION_DATABASE
+from help_functions import search_section_property
+from scipy import interpolate
# #########################################################################
# Define a class of column #
# #########################################################################
-class Column(object):
- """
- This class is used to define a column member, which has the following attributes:
+
+class Column:
+ """This class is used to define a column member, which has the following attributes:
(1) Column section, a dictionary including the size and associated properties.
(2) Column demand, a dictionary including axial, shear, and flexural demands.
(3) Column strength, a dictionary including axial, shear, and flexural strengths.
(4) Column flag, an integer with value of zero or nonzero. If it's zero, the column is feasible.
- """
+ """ # noqa: D205, D404
- def __init__(self, section_size, axial_demand, shear_demand, moment_demand_bot, moment_demand_top, Lx, Ly, steel):
- """
- This function initializes the attributes of class of column.
+ def __init__(
+ self,
+ section_size,
+ axial_demand,
+ shear_demand,
+ moment_demand_bot,
+ moment_demand_top,
+ Lx, # noqa: N803
+ Ly, # noqa: N803
+ steel,
+ ):
+ """This function initializes the attributes of class of column.
:param section_size: a string which specifies the size for column.
:param axial_demand: a float number which describes axial demand.
:param shear_demand: a float number which describes shear demand.
@@ -32,11 +39,15 @@ def __init__(self, section_size, axial_demand, shear_demand, moment_demand_bot,
:param moment_demand_top: a float number which describes moment demand at top of column.
:param Lx: unbraced length in x direction.
:param Ly: unbraced length in y direction.
- """
+ """ # noqa: D205, D401, D404
# Assign the necessary information for column class
self.section = search_section_property(section_size, SECTION_DATABASE)
- self.demand = {'axial': axial_demand, 'shear': shear_demand,
- 'moment bottom': moment_demand_bot, 'moment top': moment_demand_top}
+ self.demand = {
+ 'axial': axial_demand,
+ 'shear': shear_demand,
+ 'moment bottom': moment_demand_bot,
+ 'moment top': moment_demand_top,
+ }
self.unbraced_length = {'x': Lx, 'y': Ly}
# Initialize the strength dictionary with an empty dictionary
@@ -62,13 +73,12 @@ def __init__(self, section_size, axial_demand, shear_demand, moment_demand_bot,
self.calculate_hinge_parameters(steel)
def check_flange(self, steel):
- """
- This method is used to check whether the flange is satisfied with highly ductile requirement, as specified in
+ """This method is used to check whether the flange is satisfied with highly ductile requirement, as specified in
Seismic Design Manual Table D1.1.
:param steel: a class defined in "steel_material.py" file.
:return: a boolean variable which denotes the flange check results.
- """
- flange_limit = 0.30 * np.sqrt(steel.E/steel.Fy)
+ """ # noqa: D205, D401, D404
+ flange_limit = 0.30 * np.sqrt(steel.E / steel.Fy)
# If flag is still zero after checking the limitation. Then the highly ductile requirement is met.
# Otherwise, it is not satisfied.
if self.section['bf to tf ratio'] <= flange_limit:
@@ -77,19 +87,23 @@ def check_flange(self, steel):
self.is_feasible['flange limit'] = False
def check_web(self, steel):
- """
- This method is used to check whether the web is satisfied with highly ductile requirement, as specified in
+ """This method is used to check whether the web is satisfied with highly ductile requirement, as specified in
Seismic Design Manual Table D1.1.
:param steel: a class defined in "steel_material.py" file.
:return: a boolean variable which denotes the flange check results.
- """
+ """ # noqa: D205, D401, D404
# Compute the limit for web depth-to-thickness ratio
phi = 0.9
- Ca = self.demand['axial'] / (phi*steel.Fy*self.section['A'])
- if Ca <= 0.125:
- web_limit = 2.45 * np.sqrt(steel.E/steel.Fy) * (1-0.93*Ca)
+ Ca = self.demand['axial'] / (phi * steel.Fy * self.section['A']) # noqa: N806
+ if Ca <= 0.125: # noqa: PLR2004
+ web_limit = 2.45 * np.sqrt(steel.E / steel.Fy) * (1 - 0.93 * Ca)
else:
- web_limit = np.max([0.77*np.sqrt(steel.E/steel.Fy)*(2.93-Ca), 1.49*np.sqrt(steel.E/steel.Fy)])
+ web_limit = np.max(
+ [
+ 0.77 * np.sqrt(steel.E / steel.Fy) * (2.93 - Ca),
+ 1.49 * np.sqrt(steel.E / steel.Fy),
+ ]
+ )
# Compare the section depth-to0-thickness ratio with limit
if self.section['h to tw ratio'] <= web_limit:
self.is_feasible['web limit'] = True
@@ -97,26 +111,29 @@ def check_web(self, steel):
self.is_feasible['web limit'] = False
def check_axial_strength(self, steel):
- """
- This method is used to check the axial strength of the column.
+ """This method is used to check the axial strength of the column.
:param steel: a class defined in "steel_material.py" file.
:return: a float number denoting the axial strength
and a boolean variable denoting whether the column strength is enough.
- """
+ """ # noqa: D205, D401, D404
# Default values for two coefficient
- Kx = 1.0
- Ky = 1.0
- slenderness_ratio = max([Kx*self.unbraced_length['x']/self.section['rx'],
- Ky*self.unbraced_length['y']/self.section['ry']])
+ Kx = 1.0 # noqa: N806
+ Ky = 1.0 # noqa: N806
+ slenderness_ratio = max(
+ [
+ Kx * self.unbraced_length['x'] / self.section['rx'],
+ Ky * self.unbraced_length['y'] / self.section['ry'],
+ ]
+ )
# Compute elastic buckling stress
- Fe = np.pi**2 * steel.E / (slenderness_ratio**2)
+ Fe = np.pi**2 * steel.E / (slenderness_ratio**2) # noqa: N806
# Calculate critical stress
- if slenderness_ratio <= (4.71 * np.sqrt(steel.E/steel.Fy)):
- Fcr = 0.658**(steel.Fy/Fe) * steel.Fy
+ if slenderness_ratio <= (4.71 * np.sqrt(steel.E / steel.Fy)):
+ Fcr = 0.658 ** (steel.Fy / Fe) * steel.Fy # noqa: N806
else:
- Fcr = 0.877 * Fe
+ Fcr = 0.877 * Fe # noqa: N806
# Compute nominal compressive strength
- Pn = Fcr * self.section['A']
+ Pn = Fcr * self.section['A'] # noqa: N806
# Store axial strength into "strength" dictionary
phi = 0.9
self.strength['axial'] = phi * Pn
@@ -127,15 +144,14 @@ def check_axial_strength(self, steel):
self.is_feasible['axial strength'] = False
def check_shear_strength(self, steel):
- """
- This method is used to check the shear strength of single column member.
+ """This method is used to check the shear strength of single column member.
:param steel: a class defined in "steel_material.py" file.
:return: a float number denoting shear strength
and a boolean variable denoting whether shear strength is enough.
- """
- Cv = 1.0
+ """ # noqa: D205, D401, D404
+ Cv = 1.0 # noqa: N806
# Compute nominal shear strength
- Vn = 0.6 * steel.Fy * (self.section['tw'] * self.section['d']) * Cv
+ Vn = 0.6 * steel.Fy * (self.section['tw'] * self.section['d']) * Cv # noqa: N806
phi = 0.9
# Store the shear strength into "strength" dictionary
self.strength['shear'] = phi * Vn
@@ -146,12 +162,11 @@ def check_shear_strength(self, steel):
self.is_feasible['shear strength'] = False
def check_flexural_strength(self, steel):
- """
- This method is used to check the flexural strength of single column member.
+ """This method is used to check the flexural strength of single column member.
:param steel: a class defined in "steel_material.py" file.
:return: a float number denoting the flexural strength
and a boolean denoting whether flexural strength is enough.
- """
+ """ # noqa: D205, D401, D404
# Compute the distance between center lines of top and bottom flanges
h0 = self.section['d'] - self.section['tf']
# Determine coefficient: based whether it is a "W" section
@@ -160,41 +175,54 @@ def check_flexural_strength(self, steel):
else:
c = h0 / 2 * np.sqrt(self.section['Iy'] / self.section['Cw'])
# Compute Lp and Lr, both of which are necessary to determine flexural strength
- Lp = 1.76 * self.section['ry'] * np.sqrt(steel.E/steel.Fy)
- temp1 = np.sqrt((self.section['J']*c/(self.section['Sx']*h0))**2 + 6.76*(0.7*steel.Fy/steel.E)**2)
+ Lp = 1.76 * self.section['ry'] * np.sqrt(steel.E / steel.Fy) # noqa: N806
+ temp1 = np.sqrt(
+ (self.section['J'] * c / (self.section['Sx'] * h0)) ** 2
+ + 6.76 * (0.7 * steel.Fy / steel.E) ** 2
+ )
temp2 = np.sqrt(self.section['J'] * c / (self.section['Sx'] * h0) + temp1)
- Lr = 1.95 * self.section['rts'] * steel.E / (0.7*steel.Fy) * temp2
+ Lr = 1.95 * self.section['rts'] * steel.E / (0.7 * steel.Fy) * temp2 # noqa: N806
# Unbraced length
- Lb = min([self.unbraced_length['x'], self.unbraced_length['y']])
+ Lb = min([self.unbraced_length['x'], self.unbraced_length['y']]) # noqa: N806
# Compute moment capacity governed by plastic yielding
- Mp = steel.Fy * self.section['Zx']
+ Mp = steel.Fy * self.section['Zx'] # noqa: N806
# Compute MA, MB, and MC coefficients, all of which are necessary to compute Cb coefficient
# See page 16.1-46 in Seismic Design Manual
- M_max = np.max([abs(self.demand['moment bottom']), abs(self.demand['moment top'])])
- linear_function = interpolate.interp1d([0, 1],
- [self.demand['moment bottom'], (-1)*self.demand['moment top']])
- [MA, MB, MC] = np.abs(linear_function([0.25, 0.50, 0.75]))
- Cb = 12.5 * M_max / (2.5*M_max + 3*MA + 4*MB + 3*MC)
+ M_max = np.max( # noqa: N806
+ [abs(self.demand['moment bottom']), abs(self.demand['moment top'])]
+ )
+ linear_function = interpolate.interp1d(
+ [0, 1], [self.demand['moment bottom'], (-1) * self.demand['moment top']]
+ )
+ [MA, MB, MC] = np.abs(linear_function([0.25, 0.50, 0.75])) # noqa: N806
+ Cb = 12.5 * M_max / (2.5 * M_max + 3 * MA + 4 * MB + 3 * MC) # noqa: N806
# Calculate moment capacity based on unbraced length: case-by-case analysis
# Case I: flexural strength is governed by plastic yielding
# Case II: flexural strength is governed by lateral torsional buckling with Lp < Lb <= Lr
# Case III: flexural strength is governed by lateral torsional buckling with Lb > Lr
if Lb <= Lp:
- Mn = Mp
+ Mn = Mp # noqa: N806
elif Lb <= Lr:
- Mn = Cb * (Mp-(Mp-0.7*steel.Fy*self.section['Sx'])*(Lb-Lp)/(Lr-Lp))
+ Mn = Cb * ( # noqa: N806
+ Mp
+ - (Mp - 0.7 * steel.Fy * self.section['Sx']) * (Lb - Lp) / (Lr - Lp)
+ )
else:
- temp = np.sqrt((1 + 0.078*self.section['J']*c)/(self.section['Sx']*h0)*(Lb/self.section['rts'])**2)
- Fcr = Cb * np.pi**2 * steel.E/((Lb/self.section['rts'])**2) * temp
- Mn = Fcr * self.section['Sx']
+ temp = np.sqrt(
+ (1 + 0.078 * self.section['J'] * c)
+ / (self.section['Sx'] * h0)
+ * (Lb / self.section['rts']) ** 2
+ )
+ Fcr = Cb * np.pi**2 * steel.E / ((Lb / self.section['rts']) ** 2) * temp # noqa: N806
+ Mn = Fcr * self.section['Sx'] # noqa: N806
# Attention no matter which case the column is, the flexural strength cannot exceed plastic moment capacity
- Mn = np.min([Mn, Mp])
+ Mn = np.min([Mn, Mp]) # noqa: N806
# Store the flexural strength into "strength" dictionary
phi = 0.9
- self.strength['flexural'] = phi*Mn
+ self.strength['flexural'] = phi * Mn
# Check whether the flexural strength is sufficient and return it into flag variable
if self.strength['flexural'] >= M_max:
self.is_feasible['flexural strength'] = True
@@ -202,25 +230,26 @@ def check_flexural_strength(self, steel):
self.is_feasible['flexural strength'] = False
def check_combined_loads(self):
- """
- This method is whether the strength is sufficient for column subjected to combined loading.
+ """This method is whether the strength is sufficient for column subjected to combined loading.
:return: a boolean variable denoting whether the strength is sufficient under combined loading.
- """
+ """ # noqa: D205, D401, D404
# Obtain the axial capacity and moment capacity
phi = 0.9
- Pc = self.strength['axial'] / phi
- Mcx = self.strength['flexural'] / phi
- Pr = self.demand['axial']
+ Pc = self.strength['axial'] / phi # noqa: N806
+ Mcx = self.strength['flexural'] / phi # noqa: N806
+ Pr = self.demand['axial'] # noqa: N806
# Determine the governing moment:
# Maximum value from moments at two ends
- Mrx = np.max([abs(self.demand['moment bottom']), abs(self.demand['moment top'])])
+ Mrx = np.max( # noqa: N806
+ [abs(self.demand['moment bottom']), abs(self.demand['moment top'])]
+ )
# Case-by-case analysis:
# Case I: axial load ratio is less than or equal to 0.2
# Case II: axial load ratio is greater than 0.2
- if Pr/Pc <= 0.2:
- combination = Pr/Pc + 8/9 * (Mrx/Mcx)
+ if Pr / Pc <= 0.2: # noqa: PLR2004
+ combination = Pr / Pc + 8 / 9 * (Mrx / Mcx)
else:
- combination = Pr/(2*Pc) + (Mrx/Mcx)
+ combination = Pr / (2 * Pc) + (Mrx / Mcx)
# Check whether the coefficient is less than 1.0 (AISC Specifications Eq. H1-1)
if combination <= 1.0:
self.is_feasible['combined strength'] = True
@@ -228,33 +257,36 @@ def check_combined_loads(self):
self.is_feasible['combined strength'] = False
def check_flag(self):
- """
- This method is used check whether the column passes all checks.
+ """This method is used check whether the column passes all checks.
:return: a boolean variable indicating whether column is feasible or not.
- """
+ """ # noqa: D205, D401, D404
self.flag = True
- for key in self.is_feasible.keys():
- if self.is_feasible[key] == False:
+ for key in self.is_feasible.keys(): # noqa: SIM118
+ if self.is_feasible[key] == False: # noqa: E712
self.flag = False
return self.flag
def compute_demand_capacity_ratio(self):
- """
- This method is used to calculate the demand to capacity ratios for column components
+ """This method is used to calculate the demand to capacity ratios for column components
:return: a dictionary which includes ratios for axial force, shear force, flexural moment, and combined loading.
- """
- self.demand_capacity_ratio['axial'] = self.demand['axial'] / self.strength['axial']
- self.demand_capacity_ratio['shear'] = self.demand['shear'] / self.strength['shear']
- self.demand_capacity_ratio['flexural'] = max(abs(self.demand['moment bottom']), abs(self.demand['moment top']))\
- /self.strength['flexural']
+ """ # noqa: D205, D401, D404
+ self.demand_capacity_ratio['axial'] = (
+ self.demand['axial'] / self.strength['axial']
+ )
+ self.demand_capacity_ratio['shear'] = (
+ self.demand['shear'] / self.strength['shear']
+ )
+ self.demand_capacity_ratio['flexural'] = (
+ max(abs(self.demand['moment bottom']), abs(self.demand['moment top']))
+ / self.strength['flexural']
+ )
def calculate_hinge_parameters(self, steel):
- """
- This method is used to compute the modeling parameters for plastic hinge using modified IMK material model.
+ """This method is used to compute the modeling parameters for plastic hinge using modified IMK material model.
:return: a dictionary including each parameters required for nonlinear modeling in OpenSees.
- """
+ """ # noqa: D205, D401, D404
# Following content is based on the following reference:
- # [1] Hysteretic models tha incorporate strength and stiffness deterioration
+ # [1] Hysteretic models that incorporate strength and stiffness deterioration
# [2] Deterioration modeling of steel components in support of collapse prediction of steel moment frames under
# earthquake loading
# [3] Global collapse of frame structures under seismic excitations
@@ -272,55 +304,91 @@ def calculate_hinge_parameters(self, steel):
# Note that for column, the unbraced length is the column length itself.
# units: kips, inches
# Note that column unbraced length is in feet, remember to convert it to inches
- c1 = 25.4 # c1_unit
- c2 = 6.895 # c2_unit
- h = self.section['d'] - 2*self.section['tf'] # Web depth
+ c1 = 25.4 # c1_unit # noqa: F841
+ c2 = 6.895 # c2_unit # noqa: F841
+ h = self.section['d'] - 2 * self.section['tf'] # Web depth
# Capping moment to yielding moment ratio. Lignos et al. used 1.05 whereas Prof. Burton used 1.11.
- McMy = 12.5 * (h/self.section['tw'])**(-0.2) \
- * (self.unbraced_length['x']*12.0/self.section['ry'])**(-0.4) \
- * (1-self.demand_capacity_ratio['axial']) ** (0.4)
- if McMy < 1.0:
- McMy = 1.0
- if McMy > 1.3:
- McMy = 1.3
+ McMy = ( # noqa: N806
+ 12.5
+ * (h / self.section['tw']) ** (-0.2)
+ * (self.unbraced_length['x'] * 12.0 / self.section['ry']) ** (-0.4)
+ * (1 - self.demand_capacity_ratio['axial']) ** (0.4)
+ )
+ McMy = max(McMy, 1.0) # noqa: N806
+ McMy = min(McMy, 1.3) # noqa: N806
# Beam component rotational stiffness
- self.plastic_hinge['K0'] = 6 * steel.E * self.section['Ix'] / (self.unbraced_length['x']*12.0)
+ self.plastic_hinge['K0'] = (
+ 6 * steel.E * self.section['Ix'] / (self.unbraced_length['x'] * 12.0)
+ )
# Flexual strength
self.plastic_hinge['Myp'] = self.section['Zx'] * steel.Fy
# Effective flexural strength
- if self.demand_capacity_ratio['axial'] <= 0.2:
- self.plastic_hinge['My'] = 1.15 * steel.Ry * self.plastic_hinge['Myp'] \
- * (1-0.5*self.demand_capacity_ratio['axial'])
+ if self.demand_capacity_ratio['axial'] <= 0.2: # noqa: PLR2004
+ self.plastic_hinge['My'] = (
+ 1.15
+ * steel.Ry
+ * self.plastic_hinge['Myp']
+ * (1 - 0.5 * self.demand_capacity_ratio['axial'])
+ )
else:
- self.plastic_hinge['My'] = 1.15 * steel.Ry * self.plastic_hinge['Myp'] \
- * 9/8 * (1-self.demand_capacity_ratio['axial'])
+ self.plastic_hinge['My'] = (
+ 1.15
+ * steel.Ry
+ * self.plastic_hinge['Myp']
+ * 9
+ / 8
+ * (1 - self.demand_capacity_ratio['axial'])
+ )
# Reference cumulative plastic rotation:
- if self.demand_capacity_ratio['axial'] <= 0.35:
- self.plastic_hinge['Lambda'] = 255000 * (h/self.section['tw'])**(-2.14) \
- * (self.unbraced_length['x']/self.section['ry']) ** (-0.53) \
- * (1-self.demand_capacity_ratio['axial'])**4.92
+ if self.demand_capacity_ratio['axial'] <= 0.35: # noqa: PLR2004
+ self.plastic_hinge['Lambda'] = (
+ 255000
+ * (h / self.section['tw']) ** (-2.14)
+ * (self.unbraced_length['x'] / self.section['ry']) ** (-0.53)
+ * (1 - self.demand_capacity_ratio['axial']) ** 4.92
+ )
else:
- self.plastic_hinge['Lambda'] = 268000 * (h/self.section['tw'])**(-2.30) \
- * (self.unbraced_length['x']/self.section['ry'])**(-1.30) \
- * (1-self.demand_capacity_ratio['axial'])**1.19
+ self.plastic_hinge['Lambda'] = (
+ 268000
+ * (h / self.section['tw']) ** (-2.30)
+ * (self.unbraced_length['x'] / self.section['ry']) ** (-1.30)
+ * (1 - self.demand_capacity_ratio['axial']) ** 1.19
+ )
# Pre-capping rotation:
- self.plastic_hinge['theta_p'] = 294 * (h/self.section['tw'])**(-1.7) \
- * (self.unbraced_length['x']/self.section['ry'])**(-0.7) \
- * (1-self.demand_capacity_ratio['axial'])**(1.6)
+ self.plastic_hinge['theta_p'] = (
+ 294
+ * (h / self.section['tw']) ** (-1.7)
+ * (self.unbraced_length['x'] / self.section['ry']) ** (-0.7)
+ * (1 - self.demand_capacity_ratio['axial']) ** (1.6)
+ )
self.plastic_hinge['theta_p'] = min(self.plastic_hinge['theta_p'], 0.20)
# Pre-capping rotation is further revised to exclude the elastic deformation
- self.plastic_hinge['theta_p'] = self.plastic_hinge['theta_p'] \
- - (McMy-1.0)*self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ self.plastic_hinge['theta_p'] = ( # noqa: PLR6104
+ self.plastic_hinge['theta_p']
+ - (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
# Post-capping rotation:
- self.plastic_hinge['theta_pc'] = 90 * (h/self.section['tw'])**(-0.8) \
- * (self.unbraced_length['x']/self.section['ry'])**(-0.8) \
- * (1-self.demand_capacity_ratio['axial'])**2.5
+ self.plastic_hinge['theta_pc'] = (
+ 90
+ * (h / self.section['tw']) ** (-0.8)
+ * (self.unbraced_length['x'] / self.section['ry']) ** (-0.8)
+ * (1 - self.demand_capacity_ratio['axial']) ** 2.5
+ )
# Post-capping rotation is further revised to account for elastic deformation
- self.plastic_hinge['theta_y'] = self.plastic_hinge['My'] / self.plastic_hinge['K0']
- self.plastic_hinge['theta_pc'] = self.plastic_hinge['theta_pc'] \
- + self.plastic_hinge['theta_y'] \
- + (McMy-1.0)*self.plastic_hinge['My']/self.plastic_hinge['K0']
- self.plastic_hinge['as'] = (McMy-1.0)*self.plastic_hinge['My']\
- /(self.plastic_hinge['theta_p']*self.plastic_hinge['K0'])
- self.plastic_hinge['residual'] = 0.5 - 0.4*self.demand_capacity_ratio['axial']
+ self.plastic_hinge['theta_y'] = (
+ self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
+ self.plastic_hinge['theta_pc'] = (
+ self.plastic_hinge['theta_pc']
+ + self.plastic_hinge['theta_y']
+ + (McMy - 1.0) * self.plastic_hinge['My'] / self.plastic_hinge['K0']
+ )
+ self.plastic_hinge['as'] = (
+ (McMy - 1.0)
+ * self.plastic_hinge['My']
+ / (self.plastic_hinge['theta_p'] * self.plastic_hinge['K0'])
+ )
+ self.plastic_hinge['residual'] = (
+ 0.5 - 0.4 * self.demand_capacity_ratio['axial']
+ )
self.plastic_hinge['theta_u'] = 0.15
diff --git a/modules/createSAM/AutoSDA/connection_part.py b/modules/createSAM/AutoSDA/connection_part.py
index 5be1275ac..7459d1190 100644
--- a/modules/createSAM/AutoSDA/connection_part.py
+++ b/modules/createSAM/AutoSDA/connection_part.py
@@ -1,4 +1,4 @@
-# This file is used to define the class of beam-column connection, which includes beam/column depth
+# This file is used to define the class of beam-column connection, which includes beam/column depth # noqa: CPY001, D100, INP001
# check, RBS dimensions, moment capacity at column face, strong-column-weak-beam check, and panel zone
# thickness (doubler plate)
@@ -10,45 +10,50 @@
# Please add all the imported modules in the part below
import copy
-import pandas as pd
import sys
-##########################################################################
-# Load User Defined Class and Py Files #
-##########################################################################
-
-from help_functions import extract_depth
-from help_functions import extract_weight
-
# #########################################################################
# Open the section database and store it as a global variable #
# #########################################################################
-
from global_variables import STRONG_COLUMN_WEAK_BEAM_RATIO
+##########################################################################
+# Load User Defined Class and Py Files #
+##########################################################################
+from help_functions import extract_depth, extract_weight
+
# #########################################################################
# Define a class of beam #
# #########################################################################
-class Connection(object):
- """
- This class is used to define a beam-column connection part, which has the following attributes:
+
+class Connection:
+ """This class is used to define a beam-column connection part, which has the following attributes:
(1) Check column and beam depth as well as weight per ANSI Section 5.3.1 prequalified connection.
(2) Extract RBS (reduced beam section) dimension from beam class.
(3) Compute the probable maximum moment at the center of RBS
(4) Calculate shear force at the center of RBS
(5) Compute probable moment at column face
- (6) Compute plastic moment of beam based on expeced yield stress
+ (6) Compute plastic moment of beam based on expected yield stress
(7) Check moment capacity at column face
(8) Check shear strength of beam
(9) Check whether strong column weak beam is satisfied
(10) Calculate doubler plate thickness
- """
+ """ # noqa: D205, D400, D404
- def __init__(self, connection_type, steel, beam_dead_load, beam_live_load, span,
- left_beam=None, right_beam=None, top_column=None, bottom_column=None):
- """
- This function initializes all attributes of Connection class.
+ def __init__(
+ self,
+ connection_type,
+ steel,
+ beam_dead_load,
+ beam_live_load,
+ span,
+ left_beam=None,
+ right_beam=None,
+ top_column=None,
+ bottom_column=None,
+ ):
+ """This function initializes all attributes of Connection class.
:param connection_type: a string which denotes the type of beam-column connection.
"interior": interior beam-column connection with two beams and two columns
"exterior": exterior beam-column connection with one beam and two columns
@@ -66,7 +71,7 @@ def __init__(self, connection_type, steel, beam_dead_load, beam_live_load, span,
upper story of the connection.
:param bottom_column: a class defined in "column_component.py" file which refers the column in
lower story of the connection.
- """
+ """ # noqa: D205, D401, D404
self.connection_type = connection_type
# The dictionary used to store the RBS dimensions
self.left_RBS_dimension = {}
@@ -83,38 +88,64 @@ def __init__(self, connection_type, steel, beam_dead_load, beam_live_load, span,
self.flag = None
# Call methods to initialize the attributes listed above
- self.check_column_beam(connection_type, left_beam, right_beam, top_column, bottom_column)
+ self.check_column_beam(
+ connection_type, left_beam, right_beam, top_column, bottom_column
+ )
self.extract_reduced_beam_section(connection_type, left_beam, right_beam)
- self.compute_probable_moment_RBS(connection_type, steel, left_beam, right_beam)
- self.compute_shear_force_RBS(connection_type, beam_dead_load, beam_live_load, span, bottom_column)
+ self.compute_probable_moment_RBS(
+ connection_type, steel, left_beam, right_beam
+ )
+ self.compute_shear_force_RBS(
+ connection_type, beam_dead_load, beam_live_load, span, bottom_column
+ )
self.compute_probable_moment_column_face(connection_type)
self.compute_plastic_moment(connection_type, steel, left_beam, right_beam)
self.check_moment_column_face(connection_type)
- self.check_shear_strength(connection_type, beam_dead_load, beam_live_load, left_beam, right_beam)
- self.check_column_beam_relationships(connection_type, steel, left_beam, right_beam, top_column, bottom_column)
- self.determine_doubler_plate(connection_type, steel, left_beam, right_beam, bottom_column, top_column)
+ self.check_shear_strength(
+ connection_type, beam_dead_load, beam_live_load, left_beam, right_beam
+ )
+ self.check_column_beam_relationships(
+ connection_type, steel, left_beam, right_beam, top_column, bottom_column
+ )
+ self.determine_doubler_plate(
+ connection_type, steel, left_beam, right_beam, bottom_column, top_column
+ )
- def check_column_beam(self, connection_type, left_beam, right_beam, top_column, bottom_column):
- """
- This method is used to check whether the column and beam depth (weight) is feasible for
+ def check_column_beam(
+ self,
+ connection_type,
+ left_beam,
+ right_beam,
+ top_column,
+ bottom_column,
+ ):
+ """This method is used to check whether the column and beam depth (weight) is feasible for
prequalified connection. (step 1 in ANSI Section 5.8)
The explanations for input arguments are presented in __init__() function.
:return: a boolean result stored in is_feasible dictionary.
Actually, this method should always be true because all beam and column members are selected from a
database that non-prequalified sizes have been removed.
- """
+ """ # noqa: D205, D401, D404
# Extract the beam depth and weight
if connection_type == 'typical exterior':
# Connection only has one beam and two columns
left_beam_depth = extract_depth(left_beam.section['section size'])
left_beam_weight = extract_weight(left_beam.section['section size'])
top_column_depth = extract_depth(top_column.section['section size'])
- bottom_column_depth = extract_depth(bottom_column.section['section size'])
- if (left_beam_depth <= 36 and left_beam_weight <= 300
- and top_column_depth <= 36 and bottom_column_depth <= 36):
+ bottom_column_depth = extract_depth(
+ bottom_column.section['section size']
+ )
+ if (
+ left_beam_depth <= 36 # noqa: PLR2004
+ and left_beam_weight <= 300 # noqa: PLR2004
+ and top_column_depth <= 36 # noqa: PLR2004
+ and bottom_column_depth <= 36 # noqa: PLR2004
+ ):
self.is_feasible['geometry limits'] = True
else:
- sys.stderr.write('Beam and column depth & weight are not acceptable!\n')
+ sys.stderr.write(
+ 'Beam and column depth & weight are not acceptable!\n'
+ )
self.is_feasible['geometry limits'] = False
elif connection_type == 'top exterior':
# ****************** Debug using only *************************
@@ -125,11 +156,19 @@ def check_column_beam(self, connection_type, left_beam, right_beam, top_column,
# Connection only has one beam and one column
left_beam_depth = extract_depth(left_beam.section['section size'])
left_beam_weight = extract_weight(left_beam.section['section size'])
- bottom_column_depth = extract_depth(bottom_column.section['section size'])
- if left_beam_depth <= 36 and left_beam_weight <= 300 and bottom_column_depth <= 36:
+ bottom_column_depth = extract_depth(
+ bottom_column.section['section size']
+ )
+ if (
+ left_beam_depth <= 36 # noqa: PLR2004
+ and left_beam_weight <= 300 # noqa: PLR2004
+ and bottom_column_depth <= 36 # noqa: PLR2004
+ ):
self.is_feasible['geometry limits'] = True
else:
- sys.stderr.write('Beam and column depth & weight are not acceptable!\n')
+ sys.stderr.write(
+ 'Beam and column depth & weight are not acceptable!\n'
+ )
self.is_feasible['geometry limits'] = False
elif connection_type == 'typical interior':
# Connection has two beams and two columns
@@ -138,13 +177,22 @@ def check_column_beam(self, connection_type, left_beam, right_beam, top_column,
right_beam_depth = extract_depth(right_beam.section['section size'])
right_beam_weight = extract_weight(right_beam.section['section size'])
top_column_depth = extract_depth(top_column.section['section size'])
- bottom_column_depth = extract_depth(bottom_column.section['section size'])
- if (left_beam_depth <= 36 and right_beam_depth <= 36
- and left_beam_weight <= 300 and right_beam_weight <= 300
- and top_column_depth <= 36 and bottom_column_depth <= 36):
+ bottom_column_depth = extract_depth(
+ bottom_column.section['section size']
+ )
+ if (
+ left_beam_depth <= 36 # noqa: PLR0916, PLR2004
+ and right_beam_depth <= 36 # noqa: PLR2004
+ and left_beam_weight <= 300 # noqa: PLR2004
+ and right_beam_weight <= 300 # noqa: PLR2004
+ and top_column_depth <= 36 # noqa: PLR2004
+ and bottom_column_depth <= 36 # noqa: PLR2004
+ ):
self.is_feasible['geometry limits'] = True
else:
- sys.stderr.write('Beam and beam depth & weight are not acceptable!\n')
+ sys.stderr.write(
+ 'Beam and beam depth & weight are not acceptable!\n'
+ )
self.is_feasible['geometry limits'] = False
elif connection_type == 'top interior':
# Connection has two beams and one column
@@ -152,311 +200,512 @@ def check_column_beam(self, connection_type, left_beam, right_beam, top_column,
left_beam_weight = extract_weight(left_beam.section['section size'])
right_beam_depth = extract_depth(right_beam.section['section size'])
right_beam_weight = extract_weight(right_beam.section['section size'])
- bottom_column_depth = extract_depth(bottom_column.section['section size'])
- if (left_beam_depth <= 36 and right_beam_depth <= 36
- and left_beam_weight <= 300 and right_beam_weight <= 300
- and bottom_column_depth <= 36):
+ bottom_column_depth = extract_depth(
+ bottom_column.section['section size']
+ )
+ if (
+ left_beam_depth <= 36 # noqa: PLR2004
+ and right_beam_depth <= 36 # noqa: PLR2004
+ and left_beam_weight <= 300 # noqa: PLR2004
+ and right_beam_weight <= 300 # noqa: PLR2004
+ and bottom_column_depth <= 36 # noqa: PLR2004
+ ):
self.is_feasible['geometry limits'] = True
else:
- sys.stderr.write('Beam and beam depth & weight are not acceptable!\n')
+ sys.stderr.write(
+ 'Beam and beam depth & weight are not acceptable!\n'
+ )
self.is_feasible['geometry limits'] = False
else:
- sys.stderr.write('Error: wrong type of connection specified!\n No such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\n No such keyword for connection exists!\n'
+ )
sys.exit(2)
def extract_reduced_beam_section(self, connection_type, left_beam, right_beam):
- """
- This method is used to extract the RBS dimensions into one (or two) dictionary.
+ """This method is used to extract the RBS dimensions into one (or two) dictionary.
The explanations for input arguments are presented in __init__() function.
:return: one (two) dictionary which contains the RBS dimensions.
- """
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
+ """ # noqa: D205, D401, D404
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
# The connection only has one beam in this case
self.left_RBS_dimension = copy.deepcopy(left_beam.RBS_dimension)
- elif connection_type == 'typical interior' or connection_type == 'top interior':
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
# The connection has two beams at both sides
self.left_RBS_dimension = copy.deepcopy(left_beam.RBS_dimension)
self.right_RBS_dimension = copy.deepcopy(right_beam.RBS_dimension)
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
- def compute_probable_moment_RBS(self, connection_type, steel, left_beam, right_beam):
- """
- This method is used to compute section modulus at RBS center (step 2 and 3 in ANSI Section 5.8)
+ def compute_probable_moment_RBS( # noqa: N802
+ self,
+ connection_type,
+ steel,
+ left_beam,
+ right_beam,
+ ):
+ """This method is used to compute section modulus at RBS center (step 2 and 3 in ANSI Section 5.8)
:return: a dictionary which includes the probable moment at RBS center
- """
- Cpr = (steel.Fy+steel.Fu) / (2*steel.Fy)
- if Cpr >= 1.2:
- Cpr = 1.2
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
- left_Z_RBS = left_beam.section['Zx'] - 2 * left_beam.RBS_dimension['c'] * left_beam.section['tf'] \
- * (left_beam.section['d'] - left_beam.section['tf'])
+ """ # noqa: D205, D400, D401, D404
+ Cpr = (steel.Fy + steel.Fu) / (2 * steel.Fy) # noqa: N806
+ Cpr = min(1.2, Cpr) # noqa: N806
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
+ left_Z_RBS = left_beam.section['Zx'] - 2 * left_beam.RBS_dimension[ # noqa: N806
+ 'c'
+ ] * left_beam.section['tf'] * (
+ left_beam.section['d'] - left_beam.section['tf']
+ )
self.moment['Mpr1'] = Cpr * steel.Ry * steel.Fy * left_Z_RBS
- elif connection_type == 'typical interior' or connection_type == 'top interior':
- left_Z_RBS = left_beam.section['Zx'] - 2 * left_beam.RBS_dimension['c'] * left_beam.section['tf'] \
- * (left_beam.section['d'] - left_beam.section['tf'])
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
+ left_Z_RBS = left_beam.section['Zx'] - 2 * left_beam.RBS_dimension[ # noqa: N806
+ 'c'
+ ] * left_beam.section['tf'] * (
+ left_beam.section['d'] - left_beam.section['tf']
+ )
self.moment['Mpr1'] = Cpr * steel.Ry * steel.Fy * left_Z_RBS
- right_Z_RBS = right_beam.section['Zx'] - 2 * right_beam.RBS_dimension['c'] * right_beam.section['tf'] \
- * (right_beam.section['d'] - right_beam.section['tf'])
+ right_Z_RBS = right_beam.section['Zx'] - 2 * right_beam.RBS_dimension[ # noqa: N806
+ 'c'
+ ] * right_beam.section['tf'] * (
+ right_beam.section['d'] - right_beam.section['tf']
+ )
self.moment['Mpr2'] = Cpr * steel.Ry * steel.Fy * right_Z_RBS
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!'
+ )
sys.exit(2)
- def compute_shear_force_RBS(self, connection_type, beam_dead_load, beam_live_load, span, bottom_column):
- """
- This method calculates the shear force at the center of RBS (step 4 in ANSI Section 5.8)
+ def compute_shear_force_RBS( # noqa: N802
+ self,
+ connection_type,
+ beam_dead_load,
+ beam_live_load,
+ span,
+ bottom_column,
+ ):
+ """This method calculates the shear force at the center of RBS (step 4 in ANSI Section 5.8)
:return: a dictionary which includes the shear forces
- """
+ """ # noqa: D205, D400, D401, D404
# Be cautious: beam_dead_load read here is in the unit of lb/ft
# The unit should be converted from lb/ft to kips/inch
- wu = 1.2*(beam_dead_load*0.001/12) + 0.5*(beam_live_load*0.001/12) + 0.2*0
- Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b']/2
- Lh = span*12.0 - 2 * bottom_column.section['d'] - 2 * Sh
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
- self.shear_force['VRBS1'] = 2*self.moment['Mpr1']/Lh + wu*Lh/2
- elif connection_type == 'typical interior' or connection_type == 'top interior':
+ wu = (
+ 1.2 * (beam_dead_load * 0.001 / 12)
+ + 0.5 * (beam_live_load * 0.001 / 12)
+ + 0.2 * 0
+ )
+ Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b'] / 2 # noqa: N806
+ Lh = span * 12.0 - 2 * bottom_column.section['d'] - 2 * Sh # noqa: N806
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
+ self.shear_force['VRBS1'] = 2 * self.moment['Mpr1'] / Lh + wu * Lh / 2
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
self.shear_force['VRBS1'] = 2 * self.moment['Mpr1'] / Lh + wu * Lh / 2
self.shear_force['VRBS2'] = 2 * self.moment['Mpr2'] / Lh - wu * Lh / 2
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
def compute_probable_moment_column_face(self, connection_type):
- """
- This method calculates the probable maximum moment at the face of the column. (step 5 in ANSI Section 5.8)
+ """This method calculates the probable maximum moment at the face of the column. (step 5 in ANSI Section 5.8)
:return: Store probable maximum moment at column face into the dictionary
- """
- Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b']/2
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
- self.moment['Mf1'] = self.moment['Mpr1'] + self.shear_force['VRBS1']*Sh
- elif connection_type == 'typical interior' or connection_type == 'top interior':
- self.moment['Mf1'] = self.moment['Mpr1'] + self.shear_force['VRBS1']*Sh
- self.moment['Mf2'] = self.moment['Mpr2'] + self.shear_force['VRBS2']*Sh
+ """ # noqa: D205, D400, D401, D404
+ Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b'] / 2 # noqa: N806
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
+ self.moment['Mf1'] = self.moment['Mpr1'] + self.shear_force['VRBS1'] * Sh
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
+ self.moment['Mf1'] = self.moment['Mpr1'] + self.shear_force['VRBS1'] * Sh
+ self.moment['Mf2'] = self.moment['Mpr2'] + self.shear_force['VRBS2'] * Sh
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
def compute_plastic_moment(self, connection_type, steel, left_beam, right_beam):
- """
- This method calculates the plastic moment of the beam based on expected yield stress.
+ """This method calculates the plastic moment of the beam based on expected yield stress.
(step 6 in ANSI Section 5.8)
:return: Store the plastic moment to the dictionary.
- """
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
+ """ # noqa: D205, D401, D404
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
self.moment['Mpe1'] = steel.Ry * steel.Fy * left_beam.section['Zx']
- elif connection_type == 'typical interior' or connection_type == 'top interior':
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
self.moment['Mpe1'] = steel.Ry * steel.Fy * left_beam.section['Zx']
self.moment['Mpe2'] = steel.Ry * steel.Fy * right_beam.section['Zx']
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
def check_moment_column_face(self, connection_type):
- """
- This method checks whether the plastic moment is greater than the actual moment at column face.
+ """This method checks whether the plastic moment is greater than the actual moment at column face.
(step 7 in ANSI Section 5.8)
:return: boolean result stored in is_feasible dictionary.
- """
+ """ # noqa: D205, D401, D404
phi_d = 1.0
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
- if phi_d*self.moment['Mpe1'] >= self.moment['Mf1']:
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
+ if phi_d * self.moment['Mpe1'] >= self.moment['Mf1']:
self.is_feasible['flexural strength'] = True
else:
- sys.stderr.write('Plastic moment at column face is not sufficient!\n')
+ sys.stderr.write(
+ 'Plastic moment at column face is not sufficient!\n'
+ )
self.is_feasible['flexural strength'] = False
- elif connection_type == 'typical interior' or connection_type == 'top interior':
- if (phi_d*self.moment['Mpe1'] >= self.moment['Mf1']
- and phi_d*self.moment['Mpe2'] >= self.moment['Mf2']):
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
+ if (
+ phi_d * self.moment['Mpe1'] >= self.moment['Mf1']
+ and phi_d * self.moment['Mpe2'] >= self.moment['Mf2']
+ ):
self.is_feasible['flexural strength'] = True
else:
- sys.stderr.write('Plastic moment at column face is not sufficient!\n')
+ sys.stderr.write(
+ 'Plastic moment at column face is not sufficient!\n'
+ )
self.is_feasible['flexural strength'] = False
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
- def check_shear_strength(self, connection_type, beam_dead_load, beam_live_load, left_beam, right_beam):
- """
- This method checks whether the beam shear strength is sufficient for the required shear strength.
+ def check_shear_strength(
+ self,
+ connection_type,
+ beam_dead_load,
+ beam_live_load,
+ left_beam,
+ right_beam,
+ ):
+ """This method checks whether the beam shear strength is sufficient for the required shear strength.
(step 8 in ANSI Section 5.8)
:return: boolean result stored in is_feasible dictionary.
- """
- wu = 1.2 * (beam_dead_load * 0.001 / 12) + 0.5 * (beam_live_load * 0.001 / 12) + 0.2 * 0
- Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b'] / 2
- if connection_type == 'typical exterior' or connection_type == 'top exterior':
- self.shear_force['Vu1'] = self.shear_force['VRBS1'] + wu*Sh
+ """ # noqa: D205, D401, D404
+ wu = (
+ 1.2 * (beam_dead_load * 0.001 / 12)
+ + 0.5 * (beam_live_load * 0.001 / 12)
+ + 0.2 * 0
+ )
+ Sh = self.left_RBS_dimension['a'] + self.left_RBS_dimension['b'] / 2 # noqa: N806
+ if (
+ connection_type == 'typical exterior' # noqa: PLR1714
+ or connection_type == 'top exterior'
+ ):
+ self.shear_force['Vu1'] = self.shear_force['VRBS1'] + wu * Sh
if left_beam.strength['shear'] >= self.shear_force['Vu1']:
self.is_feasible['shear strength'] = True
else:
sys.stderr.write('Shear strength is not sufficient!\n')
self.is_feasible['shear strength'] = False
- elif connection_type == 'typical interior' or connection_type == 'top interior':
+ elif (
+ connection_type == 'typical interior' # noqa: PLR1714
+ or connection_type == 'top interior'
+ ):
self.shear_force['Vu1'] = self.shear_force['VRBS1'] + wu * Sh
self.shear_force['Vu2'] = self.shear_force['VRBS2'] + wu * Sh
- if (left_beam.strength['shear'] >= self.shear_force['Vu1']
- and right_beam.strength['shear'] >= self.shear_force['Vu2']):
+ if (
+ left_beam.strength['shear'] >= self.shear_force['Vu1']
+ and right_beam.strength['shear'] >= self.shear_force['Vu2']
+ ):
self.is_feasible['shear strength'] = True
else:
sys.stderr.write('Shear strength is not sufficient!\n')
self.is_feasible['shear strength'] = False
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
- def check_column_beam_relationships(self, connection_type, steel, left_beam, right_beam, top_column, bottom_column):
- """
- This method examines whether the "strong-column-weak-beam" criteria is satisfied.
+ def check_column_beam_relationships( # noqa: C901
+ self,
+ connection_type,
+ steel,
+ left_beam,
+ right_beam,
+ top_column,
+ bottom_column,
+ ):
+ """This method examines whether the "strong-column-weak-beam" criteria is satisfied.
(step 11 in ANSI Section 5.8)
:return: boolean result stored in is_feasible dictionary.
- """
+ """ # noqa: D205, D401, D404
if connection_type == 'top exterior':
# For column in one-story building or top story:
# Strong column weak beam is exempted if the column axial load ratio < 0.3 for all load combinations except
# those using amplified seismic load.
# If not the case, still need to check the Mpc/Mpb ratio.
- if bottom_column.demand['axial']/bottom_column.strength['axial'] < 0.3:
+ if bottom_column.demand['axial'] / bottom_column.strength['axial'] < 0.3: # noqa: PLR2004
self.is_feasible['SCWB'] = True
else:
- Puc_bot = bottom_column.demand['axial']
- Ag_bot = bottom_column.section['A']
- ht_bot = bottom_column.unbraced_length['x']*12.2 # Be cautious: convert the unit from ft to inch
- Zx_bot = bottom_column.section['Zx']
+ Puc_bot = bottom_column.demand['axial'] # noqa: N806
+ Ag_bot = bottom_column.section['A'] # noqa: N806
+ ht_bot = (
+ bottom_column.unbraced_length['x'] * 12.2
+ ) # Be cautious: convert the unit from ft to inch
+ Zx_bot = bottom_column.section['Zx'] # noqa: N806
db = left_beam.section['d']
# Compute the moment summation for column
- self.moment['Mpc'] = Zx_bot * (steel.Fy-Puc_bot/Ag_bot) * (ht_bot/(ht_bot-db/2))
+ self.moment['Mpc'] = (
+ Zx_bot
+ * (steel.Fy - Puc_bot / Ag_bot)
+ * (ht_bot / (ht_bot - db / 2))
+ )
# Compute the moment summation for beam
- self.moment['Muv'] = self.shear_force['VRBS1'] * (self.left_RBS_dimension['a']
- + self.left_RBS_dimension['b']/2
- + bottom_column.section['d']/2)
+ self.moment['Muv'] = self.shear_force['VRBS1'] * (
+ self.left_RBS_dimension['a']
+ + self.left_RBS_dimension['b'] / 2
+ + bottom_column.section['d'] / 2
+ )
self.moment['Mpb'] = self.moment['Mpr1'] + self.moment['Muv']
# Perform the strong column weak beam check
- if self.moment['Mpc']/self.moment['Mpb'] >= STRONG_COLUMN_WEAK_BEAM_RATIO:
+ if (
+ self.moment['Mpc'] / self.moment['Mpb']
+ >= STRONG_COLUMN_WEAK_BEAM_RATIO
+ ):
self.is_feasible['SCWB'] = True
else:
- sys.stderr.write('Strong column weak beam (top exterior) is not satisfied!\n')
+ sys.stderr.write(
+ 'Strong column weak beam (top exterior) is not satisfied!\n'
+ )
self.is_feasible['SCWB'] = False
elif connection_type == 'top interior':
# For column in one-story building or top story:
# Strong column weak beam is exempted if the column axial load ratio < 0.3 for all load combinations except
# those using amplified seismic load.
# If not the case, still need to check the Mpc/Mpb ratio.
- if bottom_column.demand['axial']/bottom_column.strength['axial'] < 0.3:
+ if bottom_column.demand['axial'] / bottom_column.strength['axial'] < 0.3: # noqa: PLR2004
self.is_feasible['SCWB'] = True
else:
- Puc_bot = bottom_column.demand['axial']
- Ag_bot = bottom_column.section['A']
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- Zx_bot = bottom_column.section['Zx']
+ Puc_bot = bottom_column.demand['axial'] # noqa: N806
+ Ag_bot = bottom_column.section['A'] # noqa: N806
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ Zx_bot = bottom_column.section['Zx'] # noqa: N806
# Generally the left and right beams have the identical beam sizes
db = (left_beam.section['d'] + right_beam.section['d']) / 2
# Compute the moment summation for column
- self.moment['Mpc'] = Zx_bot * (steel.Fy-Puc_bot/Ag_bot) * (h_bot/(h_bot-db/2))
+ self.moment['Mpc'] = (
+ Zx_bot
+ * (steel.Fy - Puc_bot / Ag_bot)
+ * (h_bot / (h_bot - db / 2))
+ )
# Compute the moment summation for beam
- self.moment['Muv'] = (self.shear_force['VRBS1']+self.shear_force['VRBS2']) \
- * (self.left_RBS_dimension['a']+self.left_RBS_dimension['b']/2
- +bottom_column.section['d']/2)
- self.moment['Mpb'] = self.moment['Mpr1'] + self.moment['Mpr2'] + self.moment['Muv']
+ self.moment['Muv'] = (
+ self.shear_force['VRBS1'] + self.shear_force['VRBS2']
+ ) * (
+ self.left_RBS_dimension['a']
+ + self.left_RBS_dimension['b'] / 2
+ + bottom_column.section['d'] / 2
+ )
+ self.moment['Mpb'] = (
+ self.moment['Mpr1'] + self.moment['Mpr2'] + self.moment['Muv']
+ )
# Perform the strong column weak beam check
- if self.moment['Mpc']/self.moment['Mpb'] >= STRONG_COLUMN_WEAK_BEAM_RATIO:
+ if (
+ self.moment['Mpc'] / self.moment['Mpb']
+ >= STRONG_COLUMN_WEAK_BEAM_RATIO
+ ):
self.is_feasible['SCWB'] = True
else:
- sys.stderr.write('Strong column weak beam (top interior) is not satisfied!\n')
+ sys.stderr.write(
+ 'Strong column weak beam (top interior) is not satisfied!\n'
+ )
self.is_feasible['SCWB'] = False
elif connection_type == 'typical exterior':
# This connection has two columns and one beam
- Puc_top = top_column.demand['axial']
- Puc_bot = bottom_column.demand['axial']
- Ag_top = top_column.section['A']
- Ag_bot = bottom_column.section['A']
- ht_top = top_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- ht_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- Zx_top = top_column.section['Zx']
- Zx_bot = bottom_column.section['Zx']
+ Puc_top = top_column.demand['axial'] # noqa: N806
+ Puc_bot = bottom_column.demand['axial'] # noqa: N806
+ Ag_top = top_column.section['A'] # noqa: N806
+ Ag_bot = bottom_column.section['A'] # noqa: N806
+ ht_top = (
+ top_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ ht_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ Zx_top = top_column.section['Zx'] # noqa: N806
+ Zx_bot = bottom_column.section['Zx'] # noqa: N806
db = left_beam.section['d']
# Compute the moment summation for column
- self.moment['Mpc'] = Zx_top * (steel.Fy-Puc_top/Ag_top) * (ht_top/(ht_top-db/2)) \
- + Zx_bot * (steel.Fy-Puc_bot/Ag_bot) * (ht_bot/(ht_bot-db/2))
+ self.moment['Mpc'] = Zx_top * (steel.Fy - Puc_top / Ag_top) * (
+ ht_top / (ht_top - db / 2)
+ ) + Zx_bot * (steel.Fy - Puc_bot / Ag_bot) * (ht_bot / (ht_bot - db / 2))
# Compute the moment summation for beam
- self.moment['Muv'] = self.shear_force['VRBS1'] * (self.left_RBS_dimension['a']
- + self.left_RBS_dimension['b']/2
- + bottom_column.section['d']/2)
+ self.moment['Muv'] = self.shear_force['VRBS1'] * (
+ self.left_RBS_dimension['a']
+ + self.left_RBS_dimension['b'] / 2
+ + bottom_column.section['d'] / 2
+ )
self.moment['Mpb'] = self.moment['Mpr1'] + self.moment['Muv']
# Perform the strong column weak beam check
- if self.moment['Mpc']/self.moment['Mpb'] >= STRONG_COLUMN_WEAK_BEAM_RATIO:
+ if (
+ self.moment['Mpc'] / self.moment['Mpb']
+ >= STRONG_COLUMN_WEAK_BEAM_RATIO
+ ):
self.is_feasible['SCWB'] = True
else:
sys.stderr.write('Strong column weak beam is not satisfied!\n')
self.is_feasible['SCWB'] = False
elif connection_type == 'typical interior':
# This connection has two columns and two beams
- Puc_top = top_column.demand['axial']
- Puc_bot = bottom_column.demand['axial']
- Ag_top = top_column.section['A']
- Ag_bot = bottom_column.section['A']
- h_top = top_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- Zx_top = top_column.section['Zx']
- Zx_bot = bottom_column.section['Zx']
+ Puc_top = top_column.demand['axial'] # noqa: N806
+ Puc_bot = bottom_column.demand['axial'] # noqa: N806
+ Ag_top = top_column.section['A'] # noqa: N806
+ Ag_bot = bottom_column.section['A'] # noqa: N806
+ h_top = (
+ top_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ Zx_top = top_column.section['Zx'] # noqa: N806
+ Zx_bot = bottom_column.section['Zx'] # noqa: N806
# Generally the left and right beams have the identical beam sizes
db = (left_beam.section['d'] + right_beam.section['d']) / 2
# Compute the moment summation for column
- self.moment['Mpc'] = Zx_top * (steel.Fy - Puc_top / Ag_top) * (h_top / (h_top - db / 2)) \
- + Zx_bot * (steel.Fy - Puc_bot / Ag_bot) * (h_bot / (h_bot - db / 2))
+ self.moment['Mpc'] = Zx_top * (steel.Fy - Puc_top / Ag_top) * (
+ h_top / (h_top - db / 2)
+ ) + Zx_bot * (steel.Fy - Puc_bot / Ag_bot) * (h_bot / (h_bot - db / 2))
# Compute the moment summation for beam
- self.moment['Muv'] = (self.shear_force['VRBS1']+self.shear_force['VRBS2']) \
- * (self.left_RBS_dimension['a']+self.left_RBS_dimension['b']/2
- + bottom_column.section['d']/2)
- self.moment['Mpb'] = self.moment['Mpr1'] + self.moment['Mpr2'] + self.moment['Muv']
+ self.moment['Muv'] = (
+ self.shear_force['VRBS1'] + self.shear_force['VRBS2']
+ ) * (
+ self.left_RBS_dimension['a']
+ + self.left_RBS_dimension['b'] / 2
+ + bottom_column.section['d'] / 2
+ )
+ self.moment['Mpb'] = (
+ self.moment['Mpr1'] + self.moment['Mpr2'] + self.moment['Muv']
+ )
# Perform the strong column weak beam check
- if self.moment['Mpc'] / self.moment['Mpb'] >= STRONG_COLUMN_WEAK_BEAM_RATIO:
+ if (
+ self.moment['Mpc'] / self.moment['Mpb']
+ >= STRONG_COLUMN_WEAK_BEAM_RATIO
+ ):
self.is_feasible['SCWB'] = True
else:
sys.stderr.write('Strong column weak beam is not satisfied!\n')
self.is_feasible['SCWB'] = False
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
- def determine_doubler_plate(self, connection_type, steel, left_beam, right_beam, bottom_column, top_column):
- """
- This method determines the panel zone thickness (doubler plates).
+ def determine_doubler_plate(
+ self,
+ connection_type,
+ steel,
+ left_beam,
+ right_beam,
+ bottom_column,
+ top_column,
+ ):
+ """This method determines the panel zone thickness (doubler plates).
:return: a scalar which denotes the doubler plate thickness.
- """
+ """ # noqa: D205, D401, D404
if connection_type == 'top exterior':
# Connection has one left beam and one bottom column
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
db = left_beam.section['d']
tf = left_beam.section['tf']
- self.shear_force['Vc'] = (self.moment['Mf1']+0) / (h_bot/2+0)
- self.shear_force['Ru'] = (self.moment['Mf1']+0)/(db-tf) - self.shear_force['Vc']
+ self.shear_force['Vc'] = (self.moment['Mf1'] + 0) / (h_bot / 2 + 0)
+ self.shear_force['Ru'] = (self.moment['Mf1'] + 0) / (
+ db - tf
+ ) - self.shear_force['Vc']
elif connection_type == 'typical exterior':
# Connection has one left beam and two columns
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- h_top = top_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ h_top = (
+ top_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
db = left_beam.section['d']
tf = left_beam.section['tf']
- self.shear_force['Vc'] = (self.moment['Mf1']+0) / (h_bot/2+h_top/2)
- self.shear_force['Ru'] = (self.moment['Mf1']+0)/(db-tf) - self.shear_force['Vc']
+ self.shear_force['Vc'] = (self.moment['Mf1'] + 0) / (
+ h_bot / 2 + h_top / 2
+ )
+ self.shear_force['Ru'] = (self.moment['Mf1'] + 0) / (
+ db - tf
+ ) - self.shear_force['Vc']
elif connection_type == 'top interior':
# Connection has two beams and one bottom column
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
# Actually left and right beams have the identical sizes
- db = (left_beam.section['d'] + right_beam.section['d'])/2
- tf = (left_beam.section['tf'] + right_beam.section['tf'])/2
- self.shear_force['Vc'] = (self.moment['Mf1']+self.moment['Mf2']) / (h_bot/2)
- self.shear_force['Ru'] = (self.moment['Mf1']+self.moment['Mf2'])/(db-tf) - self.shear_force['Vc']
+ db = (left_beam.section['d'] + right_beam.section['d']) / 2
+ tf = (left_beam.section['tf'] + right_beam.section['tf']) / 2
+ self.shear_force['Vc'] = (self.moment['Mf1'] + self.moment['Mf2']) / (
+ h_bot / 2
+ )
+ self.shear_force['Ru'] = (self.moment['Mf1'] + self.moment['Mf2']) / (
+ db - tf
+ ) - self.shear_force['Vc']
elif connection_type == 'typical interior':
# Connection has two beams and two columns
- h_bot = bottom_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
- h_top = top_column.unbraced_length['x']*12.0 # Be cautious: convert the unit from ft to inch
+ h_bot = (
+ bottom_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
+ h_top = (
+ top_column.unbraced_length['x'] * 12.0
+ ) # Be cautious: convert the unit from ft to inch
db = (left_beam.section['d'] + right_beam.section['d']) / 2
tf = (left_beam.section['tf'] + right_beam.section['tf']) / 2
- self.shear_force['Vc'] = (self.moment['Mf1']+self.moment['Mf2']) / (h_bot/2+h_top/2)
- self.shear_force['Ru'] = (self.moment['Mf1']+self.moment['Mf2'])/(db-tf) - self.shear_force['Vc']
+ self.shear_force['Vc'] = (self.moment['Mf1'] + self.moment['Mf2']) / (
+ h_bot / 2 + h_top / 2
+ )
+ self.shear_force['Ru'] = (self.moment['Mf1'] + self.moment['Mf2']) / (
+ db - tf
+ ) - self.shear_force['Vc']
else:
- sys.stderr.write('Error: wrong type of connection specified!\nNo such keyword for connection exists!\n')
+ sys.stderr.write(
+ 'Error: wrong type of connection specified!\nNo such keyword for connection exists!\n'
+ )
sys.exit(2)
# Compute the shear strength of the panel zone
phi = 1.0
@@ -465,27 +714,30 @@ def determine_doubler_plate(self, connection_type, steel, left_beam, right_beam,
bcf = bottom_column.section['bf']
tcf = bottom_column.section['tf']
db = left_beam.section['d']
- self.shear_force['Rn'] = 0.60 * steel.Fy * dc * tw * (1+(3*bcf*tcf**2)/(db*dc*tw))
+ self.shear_force['Rn'] = (
+ 0.60 * steel.Fy * dc * tw * (1 + (3 * bcf * tcf**2) / (db * dc * tw))
+ )
# Compute the doubler plate thickness
- if phi*self.shear_force['Rn'] >= self.shear_force['Ru']:
+ if phi * self.shear_force['Rn'] >= self.shear_force['Ru']:
# Panel zone shear strength is sufficient ==> no need for doubler plate
self.doubler_plate_thickness = 0
else:
# Panel zone shear strength is not sufficient ==> need doubler plate
- required_tp = (self.shear_force['Ru'] - 0.60*steel.Fy*(3*bcf*tcf**2)/db) / (0.60*steel.Fy*dc)
+ required_tp = (
+ self.shear_force['Ru'] - 0.60 * steel.Fy * (3 * bcf * tcf**2) / db
+ ) / (0.60 * steel.Fy * dc)
tp = 0.25 # Assumed doubler plate thickness
while tp < required_tp:
tp += 0.25 # Update the thickness at an increment of 0.25 until it reaches the requirement
self.doubler_plate_thickness = tp
def check_flag(self):
- """
- This method is used to test whether the connection passed all checks.
+ """This method is used to test whether the connection passed all checks.
:return: a boolean variable indicating the connection is feasible or note.
- """
+ """ # noqa: D205, D401, D404
# Loop over each checking result to see if it is feasible or not
self.flag = True
- for key in self.is_feasible.keys():
- if self.is_feasible[key] == False:
+ for key in self.is_feasible.keys(): # noqa: SIM118
+ if self.is_feasible[key] == False: # noqa: E712
self.flag = False
return self.flag
diff --git a/modules/createSAM/AutoSDA/elastic_analysis.py b/modules/createSAM/AutoSDA/elastic_analysis.py
index 651278955..662254ee0 100644
--- a/modules/createSAM/AutoSDA/elastic_analysis.py
+++ b/modules/createSAM/AutoSDA/elastic_analysis.py
@@ -1,4 +1,4 @@
-# This file is used to include all user defined classes and functions
+# This file is used to include all user defined classes and functions # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in June 2018
# Updated in Sept. 2018
@@ -6,19 +6,19 @@
# Last revision: 09/2020
import os
-import subprocess
import shutil
+import subprocess # noqa: S404
+from pathlib import Path
from global_variables import ACCIDENTAL_TORSION
-from pathlib import Path
# #########################################################################
# Generate OpenSees model (write .tcl files) #
# #########################################################################
-class ElasticAnalysis(object):
- """
- This class generates the .tcl files required for elastic analysis. It includes .tcl files for the following modules:
+
+class ElasticAnalysis:
+ """This class generates the .tcl files required for elastic analysis. It includes .tcl files for the following modules:
(1) OpenSees nodes
(2) boundary condition
(3) floor constraint
@@ -37,11 +37,10 @@ class ElasticAnalysis(object):
(16) gravity and earthquake loads combination
(17) copy baseline .tcl files
(18) run OpenSees.exe
- """
+ """ # noqa: D205, D400, D404
- def __init__(self, building, for_drift_only=False, for_period_only=False):
- """
- This function is used to call all methods to write .tcl files required for an elastic analysis OpenSees model.
+ def __init__(self, building, for_drift_only=False, for_period_only=False): # noqa: FBT002
+ """This function is used to call all methods to write .tcl files required for an elastic analysis OpenSees model.
:param building: a class defined in "building_information.py" file
:param for_drift_only: a boolean variable.
True means we only perform the elastic analysis under GravityEarthquake loads.
@@ -49,9 +48,11 @@ def __init__(self, building, for_drift_only=False, for_period_only=False):
:param for_period_only: a boolean variable.
True means we only perform the eigen value analysis to obtain the period
Otherwise, all load types will be considered.
- """
+ """ # noqa: D205, D401, D404
# Change the working directory to folder where .tcl files will be saved
- Path(building.directory['building elastic model']).mkdir(parents=True, exist_ok=True)
+ Path(building.directory['building elastic model']).mkdir(
+ parents=True, exist_ok=True
+ )
os.chdir(building.directory['building elastic model'])
@@ -78,660 +79,993 @@ def __init__(self, building, for_drift_only=False, for_period_only=False):
# Call method to run OpenSees.exe for performing elastic analysis
self.run_OpenSees_program(building)
- def write_nodes(self, building):
+ def write_nodes(self, building): # noqa: D102, PLR6301
# Create a .tcl file and write the node information
- with open('DefineNodes2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define all nodes \n") # Introduce the file usage
- tclfile.write("# Units: inch \n\n\n") # Explain the units
-
- tclfile.write("# Set bay width and story height \n")
- tclfile.write("set\tBayWidth\t[expr %.2f*12]; \n" % (building.geometry['X bay width']))
- tclfile.write("set\tFirstStory\t[expr %.2f*12]; \n" % (building.geometry['first story height']))
- tclfile.write("set\tTypicalStory\t[expr %.2f*12]; \n\n\n" % (building.geometry['typical story height']))
+ with open('DefineNodes2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define all nodes \n'
+ ) # Introduce the file usage
+ tclfile.write('# Units: inch \n\n\n') # Explain the units
+
+ tclfile.write('# Set bay width and story height \n')
+ tclfile.write(
+ 'set\tBayWidth\t[expr %.2f*12]; \n' # noqa: UP031
+ % (building.geometry['X bay width'])
+ )
+ tclfile.write(
+ 'set\tFirstStory\t[expr %.2f*12]; \n' # noqa: UP031
+ % (building.geometry['first story height'])
+ )
+ tclfile.write(
+ 'set\tTypicalStory\t[expr %.2f*12]; \n\n\n' # noqa: UP031
+ % (building.geometry['typical story height'])
+ )
# Write the nodes at beam column intersection points
- tclfile.write("# Define nodes at corner of frames \n")
- for i in range(1, building.geometry['number of story']+2): # i is floor level number
- tclfile.write("# Level %i \n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j is column label
- tclfile.write("node\t%i%i%i" % (j, i, 1)) # Node label
- tclfile.write("\t[expr %i*$BayWidth]" % (j - 1)) # X coordinate
- if i <= 2:
- tclfile.write("\t[expr %i*$FirstStory];" % (i - 1)) # Y coordinate
- tclfile.write("\t # Column #%i \n" % j) # Comments to explain the column label
+ tclfile.write('# Define nodes at corner of frames \n')
+ for i in range(
+ 1, building.geometry['number of story'] + 2
+ ): # i is floor level number
+ tclfile.write('# Level %i \n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is column label
+ tclfile.write('node\t%i%i%i' % (j, i, 1)) # Node label
+ tclfile.write('\t[expr %i*$BayWidth]' % (j - 1)) # X coordinate
+ if i <= 2: # noqa: PLR2004
+ tclfile.write(
+ '\t[expr %i*$FirstStory];' % (i - 1)
+ ) # Y coordinate
+ tclfile.write(
+ '\t # Column #%i \n' % j
+ ) # Comments to explain the column label
else:
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i - 2))
- tclfile.write("\t # Column #%i \n" % j)
- tclfile.write("\n")
- tclfile.write("# puts \"Nodes at frame corner defined\" \n\n")
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ )
+ tclfile.write('\t # Column #%i \n' % j)
+ tclfile.write('\n')
+ tclfile.write('# puts "Nodes at frame corner defined" \n\n')
# Write the nodes for leaning column
- tclfile.write("# Define nodes for leaning column \n")
- for i in range(1, building.geometry['number of story']+2):
- tclfile.write("node\t%i%i" % (building.geometry['number of X bay']+2, i)) # Node label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay']+1)) # X coordinate
- if i <= 2:
- tclfile.write("\t[expr %i*$FirstStory]; " % (i-1)) # Y coordinate
- tclfile.write("\t# Level %i\n" % i) # Comments to explain floor level
+ tclfile.write('# Define nodes for leaning column \n')
+ for i in range(1, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'node\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Node label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ if i <= 2: # noqa: PLR2004
+ tclfile.write(
+ '\t[expr %i*$FirstStory]; ' % (i - 1)
+ ) # Y coordinate
+ tclfile.write(
+ '\t# Level %i\n' % i
+ ) # Comments to explain floor level
else:
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2))
- tclfile.write("\t# Level %i\n" % i)
- tclfile.write("\n")
- tclfile.write("# puts \"Nodes for leaning column defined\" \n\n")
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ )
+ tclfile.write('\t# Level %i\n' % i)
+ tclfile.write('\n')
+ tclfile.write('# puts "Nodes for leaning column defined" \n\n')
# Write the extra nodes for leaning column springs
- tclfile.write("# Define extra nodes needed to define leaning column springs \n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write(
+ '# Define extra nodes needed to define leaning column springs \n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
# The node below floor level
- tclfile.write("node\t%i%i%i" % (building.geometry['number of X bay']+2, i, 2)) # Node label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay'] + 1)) # X coordinate
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2)) # Y coordinate
- tclfile.write("\t# Node below floor level %i\n" % i)
+ tclfile.write(
+ 'node\t%i%i%i' % (building.geometry['number of X bay'] + 2, i, 2)
+ ) # Node label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ ) # Y coordinate
+ tclfile.write('\t# Node below floor level %i\n' % i)
# If it's top story, node above roof is not needed
# because no leaning column above roof
- if i < building.geometry['number of story']+1:
+ if i < building.geometry['number of story'] + 1:
# The node above floor level
- tclfile.write("node\t%i%i%i" % (building.geometry['number of X bay']+2, i, 4)) # Nodel label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay']+1)) # X coordinate
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2)) # Y coordinate
- tclfile.write("\t# Node above floor level %i\n" % i)
+ tclfile.write(
+ 'node\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i, 4)
+ ) # Model label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ ) # Y coordinate
+ tclfile.write('\t# Node above floor level %i\n' % i)
else:
pass
- tclfile.write("\n")
- tclfile.write("# puts \"Extra nodes for leaning column springs defined\"\n")
+ tclfile.write('\n')
+ tclfile.write(
+ '# puts "Extra nodes for leaning column springs defined"\n'
+ )
- def write_fixities(self, building):
+ def write_fixities(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write boundary for the model
- with open('DefineFixities2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define the fixity at all column bases \n\n\n")
- tclfile.write("# Defining fixity at column base \n")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("fix\t%i%i%i\t1\t1\t1; \n" % (j, 1, 1))
+ with open('DefineFixities2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define the fixity at all column bases \n\n\n'
+ )
+ tclfile.write('# Defining fixity at column base \n')
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('fix\t%i%i%i\t1\t1\t1; \n' % (j, 1, 1))
# Leaning column base
- tclfile.write("fix\t%i%i\t1\t1\t0; \n\n" % (building.geometry['number of X bay']+2, 1))
- tclfile.write("# puts \"All column base fixities have been defined\"")
+ tclfile.write(
+ 'fix\t%i%i\t1\t1\t0; \n\n'
+ % (building.geometry['number of X bay'] + 2, 1)
+ )
+ tclfile.write('# puts "All column base fixities have been defined"')
- def write_floor_constraint(self, building):
+ def write_floor_constraint(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write floor constrain, i.e., equal DOF
- with open('DefineFloorConstraint2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define floor constraint \n\n")
- tclfile.write("set\tConstrainDOF\t1;\t# Nodes at same floor level have identical lateral displacement \n\n")
+ with open('DefineFloorConstraint2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define floor constraint \n\n')
+ tclfile.write(
+ 'set\tConstrainDOF\t1;\t# Nodes at same floor level have identical lateral displacement \n\n'
+ )
# Constraint starts from floor level 2
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level %i \n" % i)
- for j in range(2, building.geometry['number of X bay']+2):
- tclfile.write("equalDOF\t%i%i%i\t%i%i%i\t$ConstrainDOF;" % (1, i, 1, j, i, 1))
- tclfile.write("\t# Pier 1 to Pier %i\n" % j)
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level %i \n' % i)
+ for j in range(2, building.geometry['number of X bay'] + 2):
+ tclfile.write(
+ 'equalDOF\t%i%i%i\t%i%i%i\t$ConstrainDOF;'
+ % (1, i, 1, j, i, 1)
+ )
+ tclfile.write('\t# Pier 1 to Pier %i\n' % j)
# Include the leaning column nodes to floor constrain
- tclfile.write("equalDOF\t%i%i%i\t%i%i\t$ConstrainDOF;"
- % (1, i, 1, building.geometry['number of X bay']+2, i))
- tclfile.write("\t# Pier 1 to Leaning column\n\n")
- tclfile.write("# puts \"Floor constraint defined\"")
-
- def write_beam(self, building):
+ tclfile.write(
+ 'equalDOF\t%i%i%i\t%i%i\t$ConstrainDOF;'
+ % (1, i, 1, building.geometry['number of X bay'] + 2, i)
+ )
+ tclfile.write('\t# Pier 1 to Leaning column\n\n')
+ tclfile.write('# puts "Floor constraint defined"')
+
+ def write_beam(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write beam elements
- with open('DefineBeams2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define beam elements \n\n\n")
- tclfile.write("# Define beam section sizes \n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tBeamLevel%i\t[SectionProperty %s]; \n" % (i, building.member_size['beam'][i-2]))
- tclfile.write("\n\n# Define beams \n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level %i\n" % i)
+ with open('DefineBeams2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define beam elements \n\n\n')
+ tclfile.write('# Define beam section sizes \n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tBeamLevel%i\t[SectionProperty %s]; \n'
+ % (i, building.member_size['beam'][i - 2])
+ )
+ tclfile.write('\n\n# Define beams \n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level %i\n' % i)
# Beam elements in frame
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("element\telasticBeamColumn") # elastic beam-column command
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1)) # Beam element tag
- tclfile.write("\t%i%i%i" % (j, i, 1)) # Starting node
- tclfile.write("\t%i%i%i" % (j+1, i, 1)) # Ending node
- tclfile.write("\t[lindex $BeamLevel%i 2]" % i) # Area of beam section
- tclfile.write("\t$Es") # Young's modulus of steel material
- tclfile.write("\t[lindex $BeamLevel%i 6]" % i) # Moment of inertia of beam section
- tclfile.write("\t$LinearTransf; \n") # Geometric transformation
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write(
+ 'element\telasticBeamColumn'
+ ) # elastic beam-column command
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1)
+ ) # Beam element tag
+ tclfile.write('\t%i%i%i' % (j, i, 1)) # Starting node
+ tclfile.write('\t%i%i%i' % (j + 1, i, 1)) # Ending node
+ tclfile.write(
+ '\t[lindex $BeamLevel%i 2]' % i
+ ) # Area of beam section
+ tclfile.write('\t$Es') # Young's modulus of steel material
+ tclfile.write(
+ '\t[lindex $BeamLevel%i 6]' % i
+ ) # Moment of inertia of beam section
+ tclfile.write('\t$LinearTransf; \n') # Geometric transformation
# Beam elements connection frame and leaning column
- tclfile.write("element\ttruss") # elastic beam-column command
- tclfile.write("\t%i%i%i%i%i%i" % (2, building.geometry['number of X bay']+1, i, 1,
- building.geometry['number of X bay']+2, i))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+1, i, 1)) # Starting node in frame
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # Ending node in leaning column
- tclfile.write("\t$AreaRigid\t$TrussMatID; \n") # Large area and truss element material
- tclfile.write("\n")
- tclfile.write("# puts \"Beams defined\"")
-
- def write_column(self, building):
+ tclfile.write('element\ttruss') # elastic beam-column command
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 2,
+ building.geometry['number of X bay'] + 1,
+ i,
+ 1,
+ building.geometry['number of X bay'] + 2,
+ i,
+ )
+ )
+ tclfile.write(
+ '\t%i%i%i' % (building.geometry['number of X bay'] + 1, i, 1)
+ ) # Starting node in frame
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Ending node in leaning column
+ tclfile.write(
+ '\t$AreaRigid\t$TrussMatID; \n'
+ ) # Large area and truss element material
+ tclfile.write('\n')
+ tclfile.write('# puts "Beams defined"')
+
+ def write_column(self, building): # noqa: D102, PLR6301
# Create a .tcl file to define all column elements
- with open('DefineColumns2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define columns \n\n\n")
+ with open('DefineColumns2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define columns \n\n\n')
# Define exterior column sizes
- tclfile.write("# Define exterior column section sizes \n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("set\tExteriorColumnStory%i\t[SectionProperty %s]; \n"
- % (i, building.member_size['exterior column'][i-1]))
+ tclfile.write('# Define exterior column section sizes \n')
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write(
+ 'set\tExteriorColumnStory%i\t[SectionProperty %s]; \n'
+ % (i, building.member_size['exterior column'][i - 1])
+ )
- tclfile.write("\n\n")
+ tclfile.write('\n\n')
# Define interior column sizes
- tclfile.write("# Define interior column section sizes \n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("set\tInteriorColumnStory%i\t[SectionProperty %s]; \n"
- % (i, building.member_size['interior column'][i-1]))
-
- tclfile.write("\n\n# Define columns\n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("# Story %i \n" % i)
+ tclfile.write('# Define interior column section sizes \n')
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write(
+ 'set\tInteriorColumnStory%i\t[SectionProperty %s]; \n'
+ % (i, building.member_size['interior column'][i - 1])
+ )
+
+ tclfile.write('\n\n# Define columns\n')
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write('# Story %i \n' % i)
# Columns in frame
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("element\telasticBeamColumn") # element command
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, j, i, 1, j, i+1, 1)) # element tag
- tclfile.write("\t%i%i%i" % (j, i, 1)) # Starting node
- tclfile.write("\t%i%i%i" % (j, i+1, 1)) # Ending node
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('element\telasticBeamColumn') # element command
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (3, j, i, 1, j, i + 1, 1)
+ ) # element tag
+ tclfile.write('\t%i%i%i' % (j, i, 1)) # Starting node
+ tclfile.write('\t%i%i%i' % (j, i + 1, 1)) # Ending node
# Determine whether the column is interior or exterior column
# This would affect the column section size
if 1 < j < building.geometry['number of X bay'] + 1:
- tclfile.write("\t[lindex $InteriorColumnStory%i 2]" % i) # Area of section
- tclfile.write("\t$Es") # Young's modulus of steel material
- tclfile.write("\t[lindex $InteriorColumnStory%i 6]" % i) # Moment of inertia of column section
+ tclfile.write(
+ '\t[lindex $InteriorColumnStory%i 2]' % i
+ ) # Area of section
+ tclfile.write('\t$Es') # Young's modulus of steel material
+ tclfile.write(
+ '\t[lindex $InteriorColumnStory%i 6]' % i
+ ) # Moment of inertia of column section
else:
- tclfile.write("\t[lindex $ExteriorColumnStory%i 2]" % i) # Area of section
- tclfile.write("\t$Es")
- tclfile.write("\t[lindex $ExteriorColumnStory%i 6]" % i) # Moment of inertia of column section
- tclfile.write("\t$PDeltaTransf; \n") # Geometric transformation
+ tclfile.write(
+ '\t[lindex $ExteriorColumnStory%i 2]' % i
+ ) # Area of section
+ tclfile.write('\t$Es')
+ tclfile.write(
+ '\t[lindex $ExteriorColumnStory%i 6]' % i
+ ) # Moment of inertia of column section
+ tclfile.write('\t$PDeltaTransf; \n') # Geometric transformation
# Leaning column elements
- tclfile.write("element\telasticBeamColumn") # element command
+ tclfile.write('element\telasticBeamColumn') # element command
if i == 1:
- tclfile.write("\t%i%i%i%i%i%i" % (3, building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i+1, 2))
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 3,
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'] + 2,
+ i + 1,
+ 2,
+ )
+ )
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ )
+ tclfile.write(
+ '\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i + 1, 2)
+ )
else:
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, building.geometry['number of X bay']+2, i, 4,
- building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i, 4))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t$AreaRigid\t$Es\t$IRigid\t$PDeltaTransf; \n\n")
- tclfile.write("# puts \"Columns defined\"")
-
- def write_leaning_column_spring(self, building):
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i'
+ % (
+ 3,
+ building.geometry['number of X bay'] + 2,
+ i,
+ 4,
+ building.geometry['number of X bay'] + 2,
+ i + 1,
+ 2,
+ )
+ )
+ tclfile.write(
+ '\t%i%i%i' % (building.geometry['number of X bay'] + 2, i, 4)
+ )
+ tclfile.write(
+ '\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i + 1, 2)
+ )
+ tclfile.write('\t$AreaRigid\t$Es\t$IRigid\t$PDeltaTransf; \n\n')
+ tclfile.write('# puts "Columns defined"')
+
+ def write_leaning_column_spring(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write all rotational springs for leaning column
- with open('DefineLeaningColumnSpring.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define column hinges \n\n")
- for i in range(2, building.geometry['number of story']+2):
+ with open('DefineLeaningColumnSpring.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define column hinges \n\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Spring below the floor level i
- tclfile.write("rotLeaningCol") # rotLeaningCol is user-defined process in OpenSees
- tclfile.write("\t%i%i%i%i%i" % (building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay']+2, i, 2)) # Spring tag
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # Node at floor level
- tclfile.write("\t%i%i%i;" % (building.geometry['number of X bay']+2, i, 2)) # Node below floor level
- tclfile.write("\t# Spring below floor level %i \n" % i)
+ tclfile.write(
+ 'rotLeaningCol'
+ ) # rotLeaningCol is user-defined process in OpenSees
+ tclfile.write(
+ '\t%i%i%i%i%i'
+ % (
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'] + 2,
+ i,
+ 2,
+ )
+ ) # Spring tag
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Node at floor level
+ tclfile.write(
+ '\t%i%i%i;' % (building.geometry['number of X bay'] + 2, i, 2)
+ ) # Node below floor level
+ tclfile.write('\t# Spring below floor level %i \n' % i)
# Spring above floor level i
# If it's roof, no rotational spring exists above roof
- if i < building.geometry['number of story']+1:
- tclfile.write("rotLeaningCol") # rotLeaningCol is user-defined process in OpenSees
- tclfile.write("\t%i%i%i%i%i" % (building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay'], i, 4)) # Spring tag
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # Node at floor level
+ if i < building.geometry['number of story'] + 1:
+ tclfile.write(
+ 'rotLeaningCol'
+ ) # rotLeaningCol is user-defined process in OpenSees
+ tclfile.write(
+ '\t%i%i%i%i%i'
+ % (
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'],
+ i,
+ 4,
+ )
+ ) # Spring tag
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Node at floor level
# Node above floor level
- tclfile.write("\t%i%i%i;" % (building.geometry['number of X bay']+2, i, 4))
- tclfile.write("\t# Spring above floor level %i \n" % i)
+ tclfile.write(
+ '\t%i%i%i;'
+ % (building.geometry['number of X bay'] + 2, i, 4)
+ )
+ tclfile.write('\t# Spring above floor level %i \n' % i)
else:
pass
- tclfile.write("\n")
- tclfile.write("# puts \"Leaning column springs defined\"")
+ tclfile.write('\n')
+ tclfile.write('# puts "Leaning column springs defined"')
- def write_mass(self, building):
+ def write_mass(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write nodal mass
- with open('DefineMasses2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define all nodal masses \n\n")
+ with open('DefineMasses2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define all nodal masses \n\n')
# Write values for floor weights, tributary mass ratio, and nodal mass
- tclfile.write("# Define floor weights and each nodal mass \n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tFloor%iWeight\t%.2f; \n" % (i, building.gravity_loads['floor weight'][i-2]))
- tclfile.write("set\tFrameTributaryMassRatio\t%s; \n" % (1.0 / building.geometry['number of X LFRS']))
- tclfile.write("set\tTotalNodesPerFloor\t%i; \n" % (building.geometry['number of X bay']+2))
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tNodalMassFloor%i" % i)
- tclfile.write("\t[expr $Floor%iWeight*$FrameTributaryMassRatio/$TotalNodesPerFloor/$g]; \n" % i)
- tclfile.write("\n\n")
+ tclfile.write('# Define floor weights and each nodal mass \n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tFloor%iWeight\t%.2f; \n'
+ % (i, building.gravity_loads['floor weight'][i - 2])
+ )
+ tclfile.write(
+ 'set\tFrameTributaryMassRatio\t%s; \n'
+ % (1.0 / building.geometry['number of X LFRS'])
+ )
+ tclfile.write(
+ 'set\tTotalNodesPerFloor\t%i; \n'
+ % (building.geometry['number of X bay'] + 2)
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('set\tNodalMassFloor%i' % i)
+ tclfile.write(
+ '\t[expr $Floor%iWeight*$FrameTributaryMassRatio/$TotalNodesPerFloor/$g]; \n'
+ % i
+ )
+ tclfile.write('\n\n')
# Write nodal masses for each floor level
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level %i \n" % i)
- for j in range(1, building.geometry['number of X bay']+3):
- if j < building.geometry['number of X bay']+2:
- tclfile.write("mass\t%i%i%i" % (j, i, 1)) # Nodal mass command and node tag
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level %i \n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 3):
+ if j < building.geometry['number of X bay'] + 2:
+ tclfile.write(
+ 'mass\t%i%i%i' % (j, i, 1)
+ ) # Nodal mass command and node tag
else:
- tclfile.write("mass\t%i%i" % (j, i)) # Nodal mass for leaning column
- tclfile.write("\t$NodalMassFloor%i" % i) # Mass along X direction
- tclfile.write("\t$Negligible\t$Negligible\n") # Mass along Y and RotZ doesn't matter
- tclfile.write("\n")
- tclfile.write("# puts \"Nodal mass defined\"") # Write puts command which denotes the ending of the .tcl file
-
- def write_all_recorder(self):
+ tclfile.write(
+ 'mass\t%i%i' % (j, i)
+ ) # Nodal mass for leaning column
+ tclfile.write(
+ '\t$NodalMassFloor%i' % i
+ ) # Mass along X direction
+ tclfile.write(
+ '\t$Negligible\t$Negligible\n'
+ ) # Mass along Y and RotZ doesn't matter
+ tclfile.write('\n')
+ tclfile.write(
+ '# puts "Nodal mass defined"'
+ ) # Write puts command which denotes the ending of the .tcl file
+
+ def write_all_recorder(self): # noqa: D102, PLR6301
# Create a .tcl file to write all recorders for output
- with open('DefineAllRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define all recorders \n\n\n") # File explanation
- tclfile.write("# Setting up main folders for different load scenarios\n")
- tclfile.write("set\tbaseDir\t[pwd]\n") # OpenSees base directory
- tclfile.write("set\tdataDir\t$LoadType\n") # OpenSees output data folder, based on type of applied load
- tclfile.write("file\tmkdir\t$dataDir\n") # Create the data folder
- tclfile.write("cd\t$baseDir/$dataDir\n\n") # Go to data folder
-
- tclfile.write("# Creating all the sub-folders for different quantities\n")
- tclfile.write("file\tmkdir\tStoryDrifts\n")
- tclfile.write("file\tmkdir\tNodeDisplacements\n")
- tclfile.write("file\tmkdir\tGlobalBeamForces\n")
- tclfile.write("file\tmkdir\tGlobalColumnForces\n\n")
-
- tclfile.write("# Source all the tcl files that define the output\n")
- tclfile.write("cd\t$baseDir\n")
- tclfile.write("source\tDefineStoryDriftRecorders2DModel.tcl\n\n")
-
- tclfile.write("cd\t$baseDir\n")
- tclfile.write("source\tDefineNodeDisplacementRecorders2DModel.tcl\n\n")
-
- tclfile.write("cd\t$baseDir\n")
- tclfile.write("source\tDefineGlobalBeamForceRecorders2DModel.tcl\n\n")
-
- tclfile.write("cd\t$baseDir\n")
- tclfile.write("source\tDefineGlobalColumnForceRecorders2DModel.tcl\n\n")
-
- tclfile.write("cd\t$baseDir\n")
- tclfile.write("# puts \"All recorders defined\"")
-
- def write_story_drift_recorder(self, building):
+ with open('DefineAllRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define all recorders \n\n\n'
+ ) # File explanation
+ tclfile.write('# Setting up main folders for different load scenarios\n')
+ tclfile.write('set\tbaseDir\t[pwd]\n') # OpenSees base directory
+ tclfile.write(
+ 'set\tdataDir\t$LoadType\n'
+ ) # OpenSees output data folder, based on type of applied load
+ tclfile.write('file\tmkdir\t$dataDir\n') # Create the data folder
+ tclfile.write('cd\t$baseDir/$dataDir\n\n') # Go to data folder
+
+ tclfile.write(
+ '# Creating all the sub-folders for different quantities\n'
+ )
+ tclfile.write('file\tmkdir\tStoryDrifts\n')
+ tclfile.write('file\tmkdir\tNodeDisplacements\n')
+ tclfile.write('file\tmkdir\tGlobalBeamForces\n')
+ tclfile.write('file\tmkdir\tGlobalColumnForces\n\n')
+
+ tclfile.write('# Source all the tcl files that define the output\n')
+ tclfile.write('cd\t$baseDir\n')
+ tclfile.write('source\tDefineStoryDriftRecorders2DModel.tcl\n\n')
+
+ tclfile.write('cd\t$baseDir\n')
+ tclfile.write('source\tDefineNodeDisplacementRecorders2DModel.tcl\n\n')
+
+ tclfile.write('cd\t$baseDir\n')
+ tclfile.write('source\tDefineGlobalBeamForceRecorders2DModel.tcl\n\n')
+
+ tclfile.write('cd\t$baseDir\n')
+ tclfile.write('source\tDefineGlobalColumnForceRecorders2DModel.tcl\n\n')
+
+ tclfile.write('cd\t$baseDir\n')
+ tclfile.write('# puts "All recorders defined"')
+
+ def write_story_drift_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write story drift recorder for output
- with open('DefineStoryDriftRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define story drift recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/StoryDrifts\n\n")
+ with open('DefineStoryDriftRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define story drift recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/StoryDrifts\n\n')
# Write the story drift recorder for each story
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("recorder\tDrift\t-file")
- tclfile.write("\t$baseDir/$dataDir/StoryDrifts/Story%i.out" % i)
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write('recorder\tDrift\t-file')
+ tclfile.write('\t$baseDir/$dataDir/StoryDrifts/Story%i.out' % i)
# Always use nodes on column #1 to calculate story drift
- tclfile.write("\t-time\t-iNode\t%i%i%i" % (1, i, 1)) # node at bottom of current story
- tclfile.write("\t-jNode\t%i%i%i" % (1, i+1, 1)) # node at top of current story
- tclfile.write("\t-dof\t1\t-perpDirn\t2; \n")
+ tclfile.write(
+ '\t-time\t-iNode\t%i%i%i' % (1, i, 1)
+ ) # node at bottom of current story
+ tclfile.write(
+ '\t-jNode\t%i%i%i' % (1, i + 1, 1)
+ ) # node at top of current story
+ tclfile.write('\t-dof\t1\t-perpDirn\t2; \n')
# Write the story drift recorder for roof
- tclfile.write("recorder\tDrift\t-file")
- tclfile.write("\t$baseDir/$dataDir/StoryDrifts/Roof.out")
- tclfile.write("\t-time\t-iNode\t%i%i%i" % (1, 1, 1))
- tclfile.write("\t-jNode\t%i%i%i" % (1, building.geometry['number of story']+1, 1))
- tclfile.write("\t-dof\t1\t-perpDirn\t2; \n")
-
- def write_node_displacement_recorder(self, building):
+ tclfile.write('recorder\tDrift\t-file')
+ tclfile.write('\t$baseDir/$dataDir/StoryDrifts/Roof.out')
+ tclfile.write('\t-time\t-iNode\t%i%i%i' % (1, 1, 1))
+ tclfile.write(
+ '\t-jNode\t%i%i%i' % (1, building.geometry['number of story'] + 1, 1)
+ )
+ tclfile.write('\t-dof\t1\t-perpDirn\t2; \n')
+
+ def write_node_displacement_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write node displacement recorder for output
- with open('DefineNodeDisplacementRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define node displacement recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/NodeDisplacements\n\n")
+ with open('DefineNodeDisplacementRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define node displacement recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/NodeDisplacements\n\n')
# Write the node displacement recorder for node at each floor level
- for i in range(1, building.geometry['number of story']+2):
- tclfile.write("recorder\tNode\t-file")
- tclfile.write("\tNodeDisplacementLevel%i.out" % i)
- tclfile.write("\t-time\t-node")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i" % (j, i, 1))
- tclfile.write("\t-dof\t1\t2\t3\tdisp; \n")
-
- def write_beam_force_recorder(self, building):
+ for i in range(1, building.geometry['number of story'] + 2):
+ tclfile.write('recorder\tNode\t-file')
+ tclfile.write('\tNodeDisplacementLevel%i.out' % i)
+ tclfile.write('\t-time\t-node')
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i' % (j, i, 1))
+ tclfile.write('\t-dof\t1\t2\t3\tdisp; \n')
+
+ def write_beam_force_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write beam force recorder for output
- with open('DefineGlobalBeamForceRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define global beam force recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/GlobalBeamForces\n\n")
-
- tclfile.write("# Beam element global force recorders\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("recorder\tElement\t-file\tGlobalXBeamForcesLevel%i.out" % i)
- tclfile.write("\t-time\t-ele")
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1))
- tclfile.write("\tforce; \n")
-
- def write_column_force_recorder(self, building):
+ with open('DefineGlobalBeamForceRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define global beam force recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/GlobalBeamForces\n\n')
+
+ tclfile.write('# Beam element global force recorders\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tElement\t-file\tGlobalXBeamForcesLevel%i.out' % i
+ )
+ tclfile.write('\t-time\t-ele')
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1))
+ tclfile.write('\tforce; \n')
+
+ def write_column_force_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write column force recorder for output
- with open('DefineGlobalColumnForceRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define global column force recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/GlobalColumnForces\n\n")
-
- tclfile.write("# X-Direction frame column element global force recorders\n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("recorder\tElement\t-file\tGlobalColumnForcesStory%i.out" % i)
- tclfile.write("\t-time\t-ele")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, j, i, 1, j, i+1, 1))
- tclfile.write("\tforce;\n")
-
- def write_gravity_dead_load(self, building):
+ with open('DefineGlobalColumnForceRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define global column force recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/GlobalColumnForces\n\n')
+
+ tclfile.write(
+ '# X-Direction frame column element global force recorders\n'
+ )
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write(
+ 'recorder\tElement\t-file\tGlobalColumnForcesStory%i.out' % i
+ )
+ tclfile.write('\t-time\t-ele')
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (3, j, i, 1, j, i + 1, 1))
+ tclfile.write('\tforce;\n')
+
+ def write_gravity_dead_load(self, building): # noqa: D102, PLR6301
# Create a .tcl file that writes the gravity dead load on the model
- with open('DefineGravityDeadLoads2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define gravity dead loads\n\n\n")
+ with open('DefineGravityDeadLoads2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define gravity dead loads\n\n\n')
# Assign the beam dead load values
- tclfile.write("# Assign uniform beam dead load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam dead load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam dead load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamDeadLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam dead load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the beam live load values
- tclfile.write("# Assign uniform beam live load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam live load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam live load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamLiveLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam live load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the values for point dead load acting on leaning column
- tclfile.write("# Assign point dead load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column dead load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point dead load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnDeadLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column dead load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the values for point live load acting on leaning column
- tclfile.write("# Assign point live load values on leaning column (kip)\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column live load'][i-2]))
- tclfile.write("\n")
+ tclfile.write(
+ '# Assign point live load values on leaning column (kip)\n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnLiveLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column live load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the lateral load values caused by earthquake
# Please note this is used to verify the strength -> use seismic_force_for_strength
- tclfile.write("# Assign lateral load values caused by earthquake (kip)\n")
- tclfile.write("set\tLateralLoad\t[list")
+ tclfile.write(
+ '# Assign lateral load values caused by earthquake (kip)\n'
+ )
+ tclfile.write('set\tLateralLoad\t[list')
for i in range(building.geometry['number of story']):
- tclfile.write("\t%f"
- % (building.seismic_force_for_strength['lateral story force'][i] \
- /building.geometry['number of X LFRS'] * ACCIDENTAL_TORSION))
- tclfile.write("];\n\n\n")
+ tclfile.write(
+ '\t%f'
+ % (
+ building.seismic_force_for_strength['lateral story force'][i]
+ / building.geometry['number of X LFRS']
+ * ACCIDENTAL_TORSION
+ )
+ )
+ tclfile.write('];\n\n\n')
# Define the load pattern in OpenSees
- tclfile.write("# Define uniform loads on beams\n")
- tclfile.write("# Load combinations:\n")
- tclfile.write("# 101 Dead load only\n")
- tclfile.write("# 102 Live load only\n")
- tclfile.write("# 103 Earthquake load only\n")
- tclfile.write("# 104 Gravity and earthquake (for calculation of drift)\n")
- tclfile.write("pattern\tPlain\t101\tConstant\t{")
+ tclfile.write('# Define uniform loads on beams\n')
+ tclfile.write('# Load combinations:\n')
+ tclfile.write('# 101 Dead load only\n')
+ tclfile.write('# 102 Live load only\n')
+ tclfile.write('# 103 Earthquake load only\n')
+ tclfile.write(
+ '# 104 Gravity and earthquake (for calculation of drift)\n'
+ )
+ tclfile.write('pattern\tPlain\t101\tConstant\t{')
# Dead loads on beam
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("eleLoad\t-ele")
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1))
- tclfile.write("\t-type\t-beamUniform\t[expr -1*$BeamDeadLoadFloor%i]; \n" % i)
- tclfile.write("\n")
- tclfile.write("\n\n")
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level%i\n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('eleLoad\t-ele')
+ tclfile.write('\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1))
+ tclfile.write(
+ '\t-type\t-beamUniform\t[expr -1*$BeamDeadLoadFloor%i]; \n'
+ % i
+ )
+ tclfile.write('\n')
+ tclfile.write('\n\n')
# Dead loads on leaning column
- tclfile.write("# Define point loads on leaning column\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i\t0\t[expr -1*$LeaningColumnDeadLoadFloor%i]\t0; \n"
- %(building.geometry['number of X bay']+2, i, i))
- tclfile.write("\n}\n")
+ tclfile.write('# Define point loads on leaning column\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i\t0\t[expr -1*$LeaningColumnDeadLoadFloor%i]\t0; \n'
+ % (building.geometry['number of X bay'] + 2, i, i)
+ )
+ tclfile.write('\n}\n')
- tclfile.write("# puts \"Dead load defined\"")
+ tclfile.write('# puts "Dead load defined"')
- def write_gravity_live_load(self, building):
+ def write_gravity_live_load(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write live load
- with open('DefineGravityLiveLoads2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define gravity live loads\n\n\n")
+ with open('DefineGravityLiveLoads2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define gravity live loads\n\n\n')
# Assign the beam dead load values
- tclfile.write("# Assign uniform beam dead load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam dead load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam dead load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamDeadLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam dead load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the beam live load values
- tclfile.write("# Assign uniform beam live load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam live load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam live load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamLiveLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam live load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the values for point dead load acting on leaning column
- tclfile.write("# Assign point dead load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column dead load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point dead load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnDeadLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column dead load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the values for point live load acting on leaning column
- tclfile.write("# Assign point live load values on leaning column (kip)\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnLiveLoadFloor%i\t%f; \n"
- %(i, building.gravity_loads['leaning column live load'][i-2]))
- tclfile.write("\n")
+ tclfile.write(
+ '# Assign point live load values on leaning column (kip)\n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnLiveLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column live load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the lateral load values caused by earthquake
# This is used to compute the required strength -> use seismic_force_for_strength
- tclfile.write("# Assign lateral load values caused by earthquake (kip)\n")
- tclfile.write("set\tLateralLoad\t[list")
+ tclfile.write(
+ '# Assign lateral load values caused by earthquake (kip)\n'
+ )
+ tclfile.write('set\tLateralLoad\t[list')
for i in range(building.geometry['number of story']):
- tclfile.write("\t%f"
- % (building.seismic_force_for_strength['lateral story force'][i]
- / building.geometry['number of X LFRS'] * ACCIDENTAL_TORSION))
- tclfile.write("];\n\n\n")
+ tclfile.write(
+ '\t%f'
+ % (
+ building.seismic_force_for_strength['lateral story force'][i]
+ / building.geometry['number of X LFRS']
+ * ACCIDENTAL_TORSION
+ )
+ )
+ tclfile.write('];\n\n\n')
# Define the load pattern in OpenSees
- tclfile.write("# Define uniform loads on beams\n")
- tclfile.write("# Load combinations:\n")
- tclfile.write("# 101 Dead load only\n")
- tclfile.write("# 102 Live load only\n")
- tclfile.write("# 103 Earthquake load only\n")
- tclfile.write("# 104 Gravity and earthquake (for calculation of drift)\n")
- tclfile.write("pattern\tPlain\t102\tConstant\t{")
+ tclfile.write('# Define uniform loads on beams\n')
+ tclfile.write('# Load combinations:\n')
+ tclfile.write('# 101 Dead load only\n')
+ tclfile.write('# 102 Live load only\n')
+ tclfile.write('# 103 Earthquake load only\n')
+ tclfile.write(
+ '# 104 Gravity and earthquake (for calculation of drift)\n'
+ )
+ tclfile.write('pattern\tPlain\t102\tConstant\t{')
# Dead loads on beam
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("eleLoad\t-ele")
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1))
- tclfile.write("\t-type\t-beamUniform\t[expr -1*$BeamLiveLoadFloor%i]; \n" % i)
- tclfile.write("\n")
- tclfile.write("\n\n")
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level%i\n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('eleLoad\t-ele')
+ tclfile.write('\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1))
+ tclfile.write(
+ '\t-type\t-beamUniform\t[expr -1*$BeamLiveLoadFloor%i]; \n'
+ % i
+ )
+ tclfile.write('\n')
+ tclfile.write('\n\n')
# Dead loads on leaning column
- tclfile.write("# Define point loads on leaning column\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i\t0\t[expr -1*$LeaningColumnLiveLoadFloor%i]\t0; \n"
- % (building.geometry['number of X bay']+2, i, i))
- tclfile.write("\n}\n")
+ tclfile.write('# Define point loads on leaning column\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i\t0\t[expr -1*$LeaningColumnLiveLoadFloor%i]\t0; \n'
+ % (building.geometry['number of X bay'] + 2, i, i)
+ )
+ tclfile.write('\n}\n')
- tclfile.write("# puts \"Live load defined\"")
+ tclfile.write('# puts "Live load defined"')
- def write_earthquake_load(self, building):
+ def write_earthquake_load(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write earthquake load
- with open('DefineEarthquakeLaterLoads2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define gravity live loads\n\n\n")
+ with open('DefineEarthquakeLaterLoads2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define gravity live loads\n\n\n')
# Assign the beam dead load values
- tclfile.write("# Assign uniform beam dead load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam dead load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam dead load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamDeadLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam dead load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the beam live load values
- tclfile.write("# Assign uniform beam live load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam live load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam live load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamLiveLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam live load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the values for point dead load acting on leaning column
- tclfile.write("# Assign point dead load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column dead load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point dead load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnDeadLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column dead load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the values for point live load acting on leaning column
- tclfile.write("# Assign point live load values on leaning column (kip)\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column live load'][i-2]))
- tclfile.write("\n")
+ tclfile.write(
+ '# Assign point live load values on leaning column (kip)\n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnLiveLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column live load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the lateral load values caused by earthquake
# This is used to compute the required strength -> use seismic_force_for_strength
- tclfile.write("# Assign lateral load values caused by earthquake (kip)\n")
- tclfile.write("set\tLateralLoad\t[list")
+ tclfile.write(
+ '# Assign lateral load values caused by earthquake (kip)\n'
+ )
+ tclfile.write('set\tLateralLoad\t[list')
for i in range(building.geometry['number of story']):
- tclfile.write("\t%f"
- % (building.seismic_force_for_strength['lateral story force'][i]
- / building.geometry['number of X LFRS'] * ACCIDENTAL_TORSION))
- tclfile.write("];\n\n\n")
+ tclfile.write(
+ '\t%f'
+ % (
+ building.seismic_force_for_strength['lateral story force'][i]
+ / building.geometry['number of X LFRS']
+ * ACCIDENTAL_TORSION
+ )
+ )
+ tclfile.write('];\n\n\n')
# Define the load pattern in OpenSees
- tclfile.write("# Define uniform loads on beams\n")
- tclfile.write("# Load combinations:\n")
- tclfile.write("# 101 Dead load only\n")
- tclfile.write("# 102 Live load only\n")
- tclfile.write("# 103 Earthquake load only\n")
- tclfile.write("# 104 Gravity and earthquake (for calculation of drift)\n")
- tclfile.write("pattern\tPlain\t103\tLinear\t{")
- tclfile.write("\n\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i%i\t[lindex $LateralLoad %i] 0.0 0.0;\t# Level%i\n" % (1, i, 1, i-2, i))
- tclfile.write("\n}\n")
- tclfile.write("# puts \"Earthquake load defined\"")
-
- def write_gravity_earthquake_load(self, building):
+ tclfile.write('# Define uniform loads on beams\n')
+ tclfile.write('# Load combinations:\n')
+ tclfile.write('# 101 Dead load only\n')
+ tclfile.write('# 102 Live load only\n')
+ tclfile.write('# 103 Earthquake load only\n')
+ tclfile.write(
+ '# 104 Gravity and earthquake (for calculation of drift)\n'
+ )
+ tclfile.write('pattern\tPlain\t103\tLinear\t{')
+ tclfile.write('\n\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i%i\t[lindex $LateralLoad %i] 0.0 0.0;\t# Level%i\n'
+ % (1, i, 1, i - 2, i)
+ )
+ tclfile.write('\n}\n')
+ tclfile.write('# puts "Earthquake load defined"')
+
+ def write_gravity_earthquake_load(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write the combination of earthquake and gravity loads
# This load case is used to calculate story drift
- with open('DefineGravityEarthquakeLoads2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define gravity live loads\n\n\n")
+ with open('DefineGravityEarthquakeLoads2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define gravity live loads\n\n\n')
# Assign the beam dead load values
- tclfile.write("# Assign uniform beam dead load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam dead load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam dead load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamDeadLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam dead load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the beam live load values
- tclfile.write("# Assign uniform beam live load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam live load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam live load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamLiveLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam live load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the values for point dead load acting on leaning column
- tclfile.write("# Assign point dead load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column dead load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point dead load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnDeadLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column dead load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the values for point live load acting on leaning column
- tclfile.write("# Assign point live load values on leaning column (kip)\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column live load'][i-2]))
- tclfile.write("\n")
+ tclfile.write(
+ '# Assign point live load values on leaning column (kip)\n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnLiveLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column live load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the lateral load values caused by earthquake
- tclfile.write("# Assign lateral load values caused by earthquake (kip)\n")
- tclfile.write("set\tLateralLoad\t[list")
+ tclfile.write(
+ '# Assign lateral load values caused by earthquake (kip)\n'
+ )
+ tclfile.write('set\tLateralLoad\t[list')
for i in range(building.geometry['number of story']):
- tclfile.write("\t%f"
- % (building.seismic_force_for_drift['lateral story force'][i]
- / building.geometry['number of X LFRS'] * ACCIDENTAL_TORSION))
- tclfile.write("];\n\n\n")
+ tclfile.write(
+ '\t%f'
+ % (
+ building.seismic_force_for_drift['lateral story force'][i]
+ / building.geometry['number of X LFRS']
+ * ACCIDENTAL_TORSION
+ )
+ )
+ tclfile.write('];\n\n\n')
# Define the load pattern in OpenSees
- tclfile.write("# Load combinations:\n"
- "# 101 Dead load only\n")
- tclfile.write("# 102 Live load only\n")
- tclfile.write("# 103 Earthquake load only\n")
- tclfile.write("# 104 Gravity and earthquake (for calculation of drift)\n")
- tclfile.write("pattern\tPlain\t104\tConstant\t{")
+ tclfile.write('# Load combinations:\n' '# 101 Dead load only\n')
+ tclfile.write('# 102 Live load only\n')
+ tclfile.write('# 103 Earthquake load only\n')
+ tclfile.write(
+ '# 104 Gravity and earthquake (for calculation of drift)\n'
+ )
+ tclfile.write('pattern\tPlain\t104\tConstant\t{')
# Gravity load on beam
- tclfile.write("# Define uniform loads on beams\n\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("eleLoad\t-ele")
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1))
- tclfile.write("\t-type\t-beamUniform"
- "\t[expr -(1.2+0.2*%.2f)*$BeamDeadLoadFloor%i - "
- "0.5*$BeamLiveLoadFloor%i]; \n"
- % (building.elf_parameters['SDS'], i, i))
- tclfile.write("\n")
- tclfile.write("\n")
+ tclfile.write('# Define uniform loads on beams\n\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level%i\n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('eleLoad\t-ele')
+ tclfile.write('\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1))
+ tclfile.write(
+ '\t-type\t-beamUniform'
+ '\t[expr -(1.2+0.2*%.2f)*$BeamDeadLoadFloor%i - '
+ '0.5*$BeamLiveLoadFloor%i]; \n'
+ % (building.elf_parameters['SDS'], i, i)
+ )
+ tclfile.write('\n')
+ tclfile.write('\n')
# Gravity load on leaning column
- tclfile.write("# Define point loads on leaning column\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i\t0\t[expr -(1.2+0.2*%.2f)*$LeaningColumnDeadLoadFloor%i -"
- "0.5*$LeaningColumnLiveLoadFloor%i]\t0;\n"
- % (building.geometry['number of X bay']+2, i,
- building.elf_parameters['SDS'], i, i))
- tclfile.write("\n")
+ tclfile.write('# Define point loads on leaning column\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i\t0\t[expr -(1.2+0.2*%.2f)*$LeaningColumnDeadLoadFloor%i -'
+ '0.5*$LeaningColumnLiveLoadFloor%i]\t0;\n'
+ % (
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.elf_parameters['SDS'],
+ i,
+ i,
+ )
+ )
+ tclfile.write('\n')
# Earthquake lateral load
- tclfile.write("# Define earthquake lateral loads\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i%i\t[lindex $LateralLoad %i]\t0.0\t0.0;\t# Level%i\n" % (1, i, 1, i-2, i))
- tclfile.write("\n\n}\n")
-
- tclfile.write("# puts \"Gravity and earthquake loads defined\"")
-
- def copy_baseline_files(self, building, for_drift_only, for_period_only):
- """
- Some .tcl files are fixed, i.e., no need to change for different OpenSees models.
+ tclfile.write('# Define earthquake lateral loads\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i%i\t[lindex $LateralLoad %i]\t0.0\t0.0;\t# Level%i\n'
+ % (1, i, 1, i - 2, i)
+ )
+ tclfile.write('\n\n}\n')
+
+ tclfile.write('# puts "Gravity and earthquake loads defined"')
+
+ def copy_baseline_files(self, building, for_drift_only, for_period_only): # noqa: PLR6301
+ """Some .tcl files are fixed, i.e., no need to change for different OpenSees models.
Therefore, just copy these .tcl files from the baseline folder
- """
+ """ # noqa: D205, D400, D401
# define a list which includes all baseline files' names
- file_list = ['Database.csv', 'DefineFunctionsAndProcedures.tcl', 'DefineVariables.tcl',
- 'EigenValueAnalysis.tcl', 'Model.tcl', 'PerformLoadsAnalysis.tcl']
+ file_list = [
+ 'Database.csv',
+ 'DefineFunctionsAndProcedures.tcl',
+ 'DefineVariables.tcl',
+ 'EigenValueAnalysis.tcl',
+ 'Model.tcl',
+ 'PerformLoadsAnalysis.tcl',
+ ]
# Change the working directory to the folder where baseline .tcl files are stored
os.chdir(building.directory['baseline files elastic'])
# Create the 'building elastic model' directory if it does not already exist
- Path(building.directory['building elastic model']).mkdir(parents=True, exist_ok=True)
+ Path(building.directory['building elastic model']).mkdir(
+ parents=True, exist_ok=True
+ )
# Copy all baseline .tcl files to building model directory
for file in file_list:
- target_file = building.directory['building elastic model'] + "/" + file
+ target_file = building.directory['building elastic model'] + '/' + file
shutil.copyfile(file, target_file)
# Remember to change the working directory to building model directory
os.chdir(building.directory['building elastic model'])
# Revise "Model.tcl" file if we only want to obtain drifts
# Firstly read all content in "Model.tcl", then revise the content, and store it back to "Model.tcl"
- old_string = '[list EigenValue DeadLoad LiveLoad EarthquakeLoad GravityEarthquake]'
+ old_string = (
+ '[list EigenValue DeadLoad LiveLoad EarthquakeLoad GravityEarthquake]'
+ )
new_string_for_drift = '[list GravityEarthquake]'
if for_drift_only:
- with open('Model.tcl', 'r') as file:
+ with open('Model.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
new_content = content.replace(old_string, new_string_for_drift)
- with open('Model.tcl', 'w') as file:
+ with open('Model.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(new_content)
# Revise "Model.tcl" file if we only want to obtain period
new_string_for_period = '[list EigenValue]'
if for_period_only:
- with open('Model.tcl', 'r') as file:
+ with open('Model.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
new_content = content.replace(old_string, new_string_for_period)
- with open('Model.tcl', 'w') as file:
+ with open('Model.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(new_content)
-
- def run_OpenSees_program(self, building):
+ def run_OpenSees_program(self, building): # noqa: ARG002, D102, N802, PLR6301
# This method is used to run the "RunModel.bat" file. OpenSees.exe program is thus run.
- cmd = "OpenSees Model.tcl"
- subprocess.Popen(cmd, shell=True).wait()
+ cmd = 'OpenSees Model.tcl'
+ subprocess.Popen(cmd, shell=True).wait() # noqa: S602
diff --git a/modules/createSAM/AutoSDA/elastic_output.py b/modules/createSAM/AutoSDA/elastic_output.py
index 162691886..c1bd3e300 100644
--- a/modules/createSAM/AutoSDA/elastic_output.py
+++ b/modules/createSAM/AutoSDA/elastic_output.py
@@ -1,4 +1,4 @@
-# This file is used to define the class of Building
+# This file is used to define the class of Building # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in Aug. 2018
# Updated on Sept. 28 2018
@@ -6,9 +6,10 @@
# Last revision: 09/2020
import os
-import numpy as np
from pathlib import Path
+import numpy as np
+
# #########################################################################
# Define a list of load sequence as global constant #
# #########################################################################
@@ -22,9 +23,8 @@
# #########################################################################
-class ElasticOutput(object):
- """
- This class is used to the following jobs:
+class ElasticOutput:
+ """This class is used to the following jobs:
(1) Read load from OpenSees output files
(2) Extract axial force, shear force, and moment for columns from matrix read in method (1)
(3) Extract shear force and moment for beams from matrix read in method (1)
@@ -36,7 +36,7 @@ class ElasticOutput(object):
Load combination #5: (0.9 - 0.2SDS)D + rho*E
Load combination #6: (0.9 - 0.2SDS)D - rho*E
(5) Determine governing load cases
- """
+ """ # noqa: D205, D400, D404
def __init__(self, building):
# Initialize attributes of elastic_output class
@@ -61,77 +61,114 @@ def __init__(self, building):
self.determine_dominate_load()
def read_raw_load(self, building):
- """
- This method is used to read the load demand for the structure subjected to certain type of load:
+ """This method is used to read the load demand for the structure subjected to certain type of load:
dead load, live load or earthquake load
:param building: user-defined class in "building_information.py" file
:return: a dictionary which contains load demands under three load scenarios
- """
+ """ # noqa: D205, D400, D401, D404
for load_type in LOAD_TYPE:
# Define the directory where the column force output is stored
- path_output = building.directory['building elastic model'] + "/" + load_type+"/GlobalColumnForces"
+ path_output = (
+ building.directory['building elastic model']
+ + '/'
+ + load_type
+ + '/GlobalColumnForces'
+ )
# Make the path if it does not exist
Path(path_output).mkdir(parents=True, exist_ok=True)
os.chdir(path_output)
# Initialize a matrix to store all column component forces: axial, shear and moment.
- column_load = np.zeros([building.geometry['number of story'], (building.geometry['number of X bay']+1)*6])
+ column_load = np.zeros(
+ [
+ building.geometry['number of story'],
+ (building.geometry['number of X bay'] + 1) * 6,
+ ]
+ )
# Read output txt files
- for story in range(0, building.geometry['number of story']):
+ for story in range(building.geometry['number of story']):
# Define the output txt file name
- file_name = 'GlobalColumnForcesStory' + str(story+1) + '.out'
+ file_name = 'GlobalColumnForcesStory' + str(story + 1) + '.out'
read_data = np.loadtxt(file_name)
column_load[story, :] = read_data[-1, 1:]
# Store column forces into different load cases
self.raw_column_load[load_type] = column_load
# Define the directory where the beam force is stored
- path_output = building.directory['building elastic model'] + "/" + load_type + "/GlobalBeamForces"
+ path_output = (
+ building.directory['building elastic model']
+ + '/'
+ + load_type
+ + '/GlobalBeamForces'
+ )
os.chdir(path_output)
# Initialize a matrix to store all beam component forces: axial, shear and moment
- beam_load = np.zeros([building.geometry['number of story'], building.geometry['number of X bay']*6])
+ beam_load = np.zeros(
+ [
+ building.geometry['number of story'],
+ building.geometry['number of X bay'] * 6,
+ ]
+ )
# Read beam load from output txt files
- for story in range(0, building.geometry['number of story']):
+ for story in range(building.geometry['number of story']):
# Define output txt file name
- file_name = 'GlobalXBeamForcesLevel' + str(story+2) + '.out'
+ file_name = 'GlobalXBeamForcesLevel' + str(story + 2) + '.out'
read_data = np.loadtxt(file_name)
beam_load[story, :] = read_data[-1, 1:]
# Store beam forces based on load scenario
self.raw_beam_load[load_type] = beam_load
- def extract_column_load(self):
+ def extract_column_load(self): # noqa: D102
# Extract axial force, shear force, and moment from the variable obtained in the previous step
# Forces at both ends of columns are stored
- N = self.raw_column_load['DeadLoad'].shape[1]
- axial_index = range(1, N, 3) # In column matrix, axial force is in column #2, 5, 8, ...
- shear_index = range(0, N, 3) # In column matrix, shear force is in column #1, 4, 7, ...
- moment_index = range(2, N, 3) # In column matrix, moment is in column #3, 6, 9, ...
+ N = self.raw_column_load['DeadLoad'].shape[1] # noqa: N806
+ axial_index = range(
+ 1, N, 3
+ ) # In column matrix, axial force is in column #2, 5, 8, ...
+ shear_index = range(
+ 0, N, 3
+ ) # In column matrix, shear force is in column #1, 4, 7, ...
+ moment_index = range(
+ 2, N, 3
+ ) # In column matrix, moment is in column #3, 6, 9, ...
for load_type in LOAD_TYPE:
axial_force = self.raw_column_load[load_type][:, axial_index]
shear_force = self.raw_column_load[load_type][:, shear_index]
moment = self.raw_column_load[load_type][:, moment_index]
if load_type == 'DeadLoad':
- self.dead_load_case = {'column axial': axial_force,
- 'column shear': shear_force,
- 'column moment': moment}
+ self.dead_load_case = {
+ 'column axial': axial_force,
+ 'column shear': shear_force,
+ 'column moment': moment,
+ }
elif load_type == 'LiveLoad':
- self.live_load_case = {'column axial': axial_force,
- 'column shear': shear_force,
- 'column moment': moment}
+ self.live_load_case = {
+ 'column axial': axial_force,
+ 'column shear': shear_force,
+ 'column moment': moment,
+ }
elif load_type == 'EarthquakeLoad':
- self.earthquake_load_case = {'column axial': axial_force,
- 'column shear': shear_force,
- 'column moment': moment}
+ self.earthquake_load_case = {
+ 'column axial': axial_force,
+ 'column shear': shear_force,
+ 'column moment': moment,
+ }
- def extract_beam_load(self):
+ def extract_beam_load(self): # noqa: D102
# Extract shear and moment from variables obtained in previous step
# Forces at both ends of beams are stored
- N = self.raw_beam_load['DeadLoad'].shape[1]
- axial_index = range(0, N, 3) # In beam matrix, axial force is in column #1, 4, 7, ...
- shear_index = range(1, N, 3) # In beam matrix, shear force is in column #2, 5, 8, ...
- moment_index = range(2, N, 3) # In beam matrix, moment is in column #3, 6, 9, ...
+ N = self.raw_beam_load['DeadLoad'].shape[1] # noqa: N806
+ axial_index = range(
+ 0, N, 3
+ ) # In beam matrix, axial force is in column #1, 4, 7, ...
+ shear_index = range(
+ 1, N, 3
+ ) # In beam matrix, shear force is in column #2, 5, 8, ...
+ moment_index = range(
+ 2, N, 3
+ ) # In beam matrix, moment is in column #3, 6, 9, ...
# Obtain the forces and store them into existing dictionary
for load_type in LOAD_TYPE:
axial_force = self.raw_beam_load[load_type][:, axial_index]
@@ -150,88 +187,122 @@ def extract_beam_load(self):
self.earthquake_load_case['beam shear'] = shear_force
self.earthquake_load_case['beam moment'] = moment
- def perform_load_combination(self, building):
- """
- This method is used to perform the load combinations, which will be used to extract the dominate load.
+ def perform_load_combination(self, building): # noqa: C901
+ """This method is used to perform the load combinations, which will be used to extract the dominate load.
There are six load combinations in total according to ASCE 7-10.
:param building: user-defined class in "building_information.py" file
:return: six dictionaries which individually represents a single load combination result.
- """
+ """ # noqa: D205, D401, D404
# Load combination 1: 1.4*D
for force in self.dead_load_case:
- self.load_combination_1[force] = 1.4*self.dead_load_case[force]
+ self.load_combination_1[force] = 1.4 * self.dead_load_case[force]
# Load combination 2: 1.2*D + 1.6*L
for force in self.dead_load_case:
- self.load_combination_2[force] = 1.2*self.dead_load_case[force] + 1.6*self.live_load_case[force]
+ self.load_combination_2[force] = (
+ 1.2 * self.dead_load_case[force] + 1.6 * self.live_load_case[force]
+ )
# Load combination 3: (1.2*D + 0.2*SDS) + 1.0(0.5)*L + rho*E
# For Load combination 3 through 6, omega should be used to replace with rho for column axial force
- SDS = building.elf_parameters['SDS']
+ SDS = building.elf_parameters['SDS'] # noqa: N806
rho = 1.0
omega = 3.0
for force in self.dead_load_case:
if force != 'column axial':
- self.load_combination_3[force] = (1.2+0.2*SDS)*self.dead_load_case[force] \
- + 0.5*self.live_load_case[force] \
- + rho*self.earthquake_load_case[force]
+ self.load_combination_3[force] = (
+ (1.2 + 0.2 * SDS) * self.dead_load_case[force]
+ + 0.5 * self.live_load_case[force]
+ + rho * self.earthquake_load_case[force]
+ )
else:
- self.load_combination_3[force] = (1.2+0.2*SDS)*self.dead_load_case[force] \
- + 0.5*self.live_load_case[force] \
- + omega*self.earthquake_load_case[force]
+ self.load_combination_3[force] = (
+ (1.2 + 0.2 * SDS) * self.dead_load_case[force]
+ + 0.5 * self.live_load_case[force]
+ + omega * self.earthquake_load_case[force]
+ )
# Load combination 4: (1.2*D + 0.2*SDS) + 1.0(0.5)*L - rho*E
for force in self.dead_load_case:
if force != 'column axial':
- self.load_combination_4[force] = (1.2+0.2*SDS)*self.dead_load_case[force] \
- + 0.5*self.live_load_case[force] \
- - rho*self.earthquake_load_case[force]
+ self.load_combination_4[force] = (
+ (1.2 + 0.2 * SDS) * self.dead_load_case[force]
+ + 0.5 * self.live_load_case[force]
+ - rho * self.earthquake_load_case[force]
+ )
else:
- self.load_combination_4[force] = (1.2+0.2*SDS)*self.dead_load_case[force] \
- + 0.5*self.live_load_case[force] \
- - omega*self.earthquake_load_case[force]
+ self.load_combination_4[force] = (
+ (1.2 + 0.2 * SDS) * self.dead_load_case[force]
+ + 0.5 * self.live_load_case[force]
+ - omega * self.earthquake_load_case[force]
+ )
# Load combination 5: (0.9 - 0.2*SDS) + rho * E
for force in self.dead_load_case:
if force != 'column axial':
- self.load_combination_5[force] = (0.9-0.2*SDS)*self.dead_load_case[force] \
- + rho*self.earthquake_load_case[force]
+ self.load_combination_5[force] = (
+ 0.9 - 0.2 * SDS
+ ) * self.dead_load_case[force] + rho * self.earthquake_load_case[
+ force
+ ]
else:
- self.load_combination_5[force] = (0.9-0.2*SDS)*self.dead_load_case[force] \
- + omega*self.earthquake_load_case[force]
+ self.load_combination_5[force] = (
+ 0.9 - 0.2 * SDS
+ ) * self.dead_load_case[force] + omega * self.earthquake_load_case[
+ force
+ ]
# Load combination 6: (0.9 - 0.2*SDS) - rho * E
for force in self.dead_load_case:
if force != 'column axial':
- self.load_combination_6[force] = (0.9-0.2*SDS)*self.dead_load_case[force] \
- - rho*self.earthquake_load_case[force]
+ self.load_combination_6[force] = (
+ 0.9 - 0.2 * SDS
+ ) * self.dead_load_case[force] - rho * self.earthquake_load_case[
+ force
+ ]
else:
- self.load_combination_6[force] = (0.9-0.2*SDS)*self.dead_load_case[force] \
- - omega*self.earthquake_load_case[force]
+ self.load_combination_6[force] = (
+ 0.9 - 0.2 * SDS
+ ) * self.dead_load_case[force] - omega * self.earthquake_load_case[
+ force
+ ]
def determine_dominate_load(self):
- """
- This method is used to determine the governing load for beam and column components.
+ """This method is used to determine the governing load for beam and column components.
:return: a dictionary which includes all six keys and associated matrices.
six keys: column axial, column shear, column moment, beam axial, beam shear, beam moment
- """
+ """ # noqa: D205, D400, D401, D404
dominate_load = {}
# Find the maximum load demand among six load cases
- for force in self.load_combination_1.keys():
- M, N = self.load_combination_1[force].shape
+ for force in self.load_combination_1.keys(): # noqa: SIM118
+ M, N = self.load_combination_1[force].shape # noqa: N806
dominate_load[force] = np.zeros([M, N])
for m in range(M):
for n in range(N):
# The demand might be either positive or negative, try to find the one with maximum absolute value
- temp_1 = np.max([self.load_combination_1[force][m, n], self.load_combination_2[force][m, n],
- self.load_combination_3[force][m, n], self.load_combination_4[force][m, n],
- self.load_combination_5[force][m, n], self.load_combination_6[force][m, n]])
+ temp_1 = np.max(
+ [
+ self.load_combination_1[force][m, n],
+ self.load_combination_2[force][m, n],
+ self.load_combination_3[force][m, n],
+ self.load_combination_4[force][m, n],
+ self.load_combination_5[force][m, n],
+ self.load_combination_6[force][m, n],
+ ]
+ )
- temp_2 = np.min([self.load_combination_1[force][m, n], self.load_combination_2[force][m, n],
- self.load_combination_3[force][m, n], self.load_combination_4[force][m, n],
- self.load_combination_5[force][m, n], self.load_combination_6[force][m, n]])
+ temp_2 = np.min(
+ [
+ self.load_combination_1[force][m, n],
+ self.load_combination_2[force][m, n],
+ self.load_combination_3[force][m, n],
+ self.load_combination_4[force][m, n],
+ self.load_combination_5[force][m, n],
+ self.load_combination_6[force][m, n],
+ ]
+ )
- if (abs(temp_1) > abs(temp_2)):
+ if abs(temp_1) > abs(temp_2):
dominate_load[force][m, n] = temp_1
else:
dominate_load[force][m, n] = temp_2
diff --git a/modules/createSAM/AutoSDA/global_variables.py b/modules/createSAM/AutoSDA/global_variables.py
index c4c23ed3b..a7a0637ca 100644
--- a/modules/createSAM/AutoSDA/global_variables.py
+++ b/modules/createSAM/AutoSDA/global_variables.py
@@ -1,4 +1,4 @@
-# This file is used to declare all global constants.
+# This file is used to declare all global constants. # noqa: CPY001, D100, INP001
# All user input parameters are summarized here.
# Developed by GUAN, XINGQUAN @ UCLA in Feb 2019
# Be cautious with line 19 - 25:
@@ -9,9 +9,8 @@
# Last revision: 09/2020
import os
-import pandas as pd
-import pathlib
+import pandas as pd
from steel_material import SteelMaterial
##########################################################################
@@ -21,10 +20,12 @@
# Variables defined in this section is used in "seismic_design.py" file
# Define the current utilized steel type:
-steel = SteelMaterial(yield_stress=50, ultimate_stress=65, elastic_modulus=29000, Ry_value=1.1) # Unit: ksi
+steel = SteelMaterial(
+ yield_stress=50, ultimate_stress=65, elastic_modulus=29000, Ry_value=1.1
+) # Unit: ksi
# The path where THIS file is located is the base directory
-baseDirectory = os.path.dirname(os.path.realpath(__file__))
+baseDirectory = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
##########################################################################
# User Defined Ratios Involved in Design #
@@ -58,7 +59,7 @@
# If the accidental torsion is considered -> one frame will take 0.55 of total lateral force
# since the center is assumed to be deviated from its actual location by 5% of the building dimension
# Then the ACCIDENTAL_TORSION = 0.55/0.50 = 1.1
-ACCIDENTAL_TORSION = 0.55/0.50
+ACCIDENTAL_TORSION = 0.55 / 0.50
# Define a boolean variable to determine whether the Section 12.8.6.2 is enforced or not
# Section 12.8.6.2:
@@ -99,4 +100,4 @@
# Array to store the random variables, if any #
# #########################################################################
-RV_ARRAY = {}
\ No newline at end of file
+RV_ARRAY = {}
diff --git a/modules/createSAM/AutoSDA/help_functions.py b/modules/createSAM/AutoSDA/help_functions.py
index 5b2b4fb27..6fbdb23c6 100644
--- a/modules/createSAM/AutoSDA/help_functions.py
+++ b/modules/createSAM/AutoSDA/help_functions.py
@@ -1,107 +1,104 @@
-# This file is used to define helpful functions that are used in either main program or user defined class
+# This file is used to define helpful functions that are used in either main program or user defined class # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in June 2018
# Updated in Sept. 2018
-import numpy as np
import re
import sys
+import numpy as np
from global_variables import SECTION_DATABASE
-def determine_Fa_coefficient(site_class, Ss):
- """
- This function is used to determine Fa coefficient, which is based on ASCE 7-10 Table 11.4-1
+def determine_Fa_coefficient(site_class, Ss): # noqa: C901, N802, N803
+ """This function is used to determine Fa coefficient, which is based on ASCE 7-10 Table 11.4-1
:param Ss: a scalar given in building class
:param site_class: a string: 'A', 'B', 'C', 'D', or 'E' given in building information
:return: a scalar which is Fa coefficient
- """
+ """ # noqa: D205, D400, D401, D404
if site_class == 'A':
- Fa = 0.8
+ Fa = 0.8 # noqa: N806
elif site_class == 'B':
- Fa = 1.0
+ Fa = 1.0 # noqa: N806
elif site_class == 'C':
- if Ss <= 0.5:
- Fa = 1.2
+ if Ss <= 0.5: # noqa: PLR2004
+ Fa = 1.2 # noqa: N806
elif Ss <= 1.0:
- Fa = 1.2 - 0.4*(Ss - 0.5)
+ Fa = 1.2 - 0.4 * (Ss - 0.5) # noqa: N806
else:
- Fa = 1.0
+ Fa = 1.0 # noqa: N806
elif site_class == 'D':
- if Ss <= 0.25:
- Fa = 1.6
- elif Ss <= 0.75:
- Fa = 1.6 - 0.8*(Ss - 0.25)
- elif Ss <= 1.25:
- Fa = 1.2 - 0.4*(Ss - 0.75)
+ if Ss <= 0.25: # noqa: PLR2004
+ Fa = 1.6 # noqa: N806
+ elif Ss <= 0.75: # noqa: PLR2004
+ Fa = 1.6 - 0.8 * (Ss - 0.25) # noqa: N806
+ elif Ss <= 1.25: # noqa: PLR2004
+ Fa = 1.2 - 0.4 * (Ss - 0.75) # noqa: N806
else:
- Fa = 1.0
+ Fa = 1.0 # noqa: N806
elif site_class == 'E':
- if Ss <= 0.25:
- Fa = 2.5
- elif Ss <= 0.5:
- Fa = 2.5 - 3.2*(Ss - 0.25)
- elif Ss <= 0.75:
- Fa = 1.7 - 2.0*(Ss - 0.5)
+ if Ss <= 0.25: # noqa: PLR2004
+ Fa = 2.5 # noqa: N806
+ elif Ss <= 0.5: # noqa: PLR2004
+ Fa = 2.5 - 3.2 * (Ss - 0.25) # noqa: N806
+ elif Ss <= 0.75: # noqa: PLR2004
+ Fa = 1.7 - 2.0 * (Ss - 0.5) # noqa: N806
elif Ss <= 1.0:
- Fa = 1.2 - 1.2*(Ss - 0.75)
+ Fa = 1.2 - 1.2 * (Ss - 0.75) # noqa: N806
else:
- Fa = 0.9
+ Fa = 0.9 # noqa: N806
else:
- Fa = None
- print("Site class is entered with an invalid value")
+ Fa = None # noqa: N806
+ print('Site class is entered with an invalid value') # noqa: T201
return Fa
-def determine_Fv_coefficient(site_class, S1):
- """
- This function is used to determine Fv coefficient, which is based on ASCE 7-10 Table 11.4-2
+def determine_Fv_coefficient(site_class, S1): # noqa: C901, N802, N803
+ """This function is used to determine Fv coefficient, which is based on ASCE 7-10 Table 11.4-2
:param S1: a scalar given in building class
:param site_class: a string 'A', 'B', 'C', 'D' or 'E' given in building class
:return: a scalar which is Fv coefficient
- """
+ """ # noqa: D205, D400, D401, D404
if site_class == 'A':
- Fv = 0.8
+ Fv = 0.8 # noqa: N806
elif site_class == 'B':
- Fv = 1.0
+ Fv = 1.0 # noqa: N806
elif site_class == 'C':
- if S1 <= 0.1:
- Fv = 1.7
- elif S1 <= 0.5:
- Fv = 1.7 - 1.0*(S1 - 0.1)
+ if S1 <= 0.1: # noqa: PLR2004
+ Fv = 1.7 # noqa: N806
+ elif S1 <= 0.5: # noqa: PLR2004
+ Fv = 1.7 - 1.0 * (S1 - 0.1) # noqa: N806
else:
- Fv = 1.3
+ Fv = 1.3 # noqa: N806
elif site_class == 'D':
- if S1 <= 0.1:
- Fv = 2.4
- elif S1 <= 0.2:
- Fv = 2.4 - 4*(S1 - 0.1)
- elif S1 <= 0.4:
- Fv = 2.0 - 2*(S1 - 0.2)
- elif S1 <= 0.5:
- Fv = 1.6 - 1*(S1 - 0.4)
+ if S1 <= 0.1: # noqa: PLR2004
+ Fv = 2.4 # noqa: N806
+ elif S1 <= 0.2: # noqa: PLR2004
+ Fv = 2.4 - 4 * (S1 - 0.1) # noqa: N806
+ elif S1 <= 0.4: # noqa: PLR2004
+ Fv = 2.0 - 2 * (S1 - 0.2) # noqa: N806
+ elif S1 <= 0.5: # noqa: PLR2004
+ Fv = 1.6 - 1 * (S1 - 0.4) # noqa: N806
else:
- Fv = 1.5
+ Fv = 1.5 # noqa: N806
elif site_class == 'E':
- if S1 <= 0.1:
- Fv = 3.5
- elif S1 <= 0.2:
- Fv = 3.5 - 3*(S1 - 0.1)
- elif S1 <= 0.4:
- Fv = 3.2 - 4*(S1 - 0.2)
+ if S1 <= 0.1: # noqa: PLR2004
+ Fv = 3.5 # noqa: N806
+ elif S1 <= 0.2: # noqa: PLR2004
+ Fv = 3.5 - 3 * (S1 - 0.1) # noqa: N806
+ elif S1 <= 0.4: # noqa: PLR2004
+ Fv = 3.2 - 4 * (S1 - 0.2) # noqa: N806
else:
- Fv = 2.4
+ Fv = 2.4 # noqa: N806
else:
- Fv = None
- print("Site class is entered with an invalid value")
+ Fv = None # noqa: N806
+ print('Site class is entered with an invalid value') # noqa: T201
return Fv
-def calculate_DBE_acceleration(Ss, S1, Fa, Fv):
- """
- This function is used to calculate design spectrum acceleration parameters,
+def calculate_DBE_acceleration(Ss, S1, Fa, Fv): # noqa: N802, N803
+ """This function is used to calculate design spectrum acceleration parameters,
which is based ASCE 7-10 Section 11.4
Note: All notations for these variables can be found in ASCE 7-10.
:param Ss: a scalar given in building information (problem statement)
@@ -109,40 +106,42 @@ def calculate_DBE_acceleration(Ss, S1, Fa, Fv):
:param Fa: a scalar computed from determine_Fa_coefficient
:param Fv: a scalar computed from determine_Fv_coefficient
:return: SMS, SM1, SDS, SD1: four scalars which are required for lateral force calculation
- """
- SMS = Fa * Ss
- SM1 = Fv * S1
- SDS = 2/3 * SMS
- SD1 = 2/3 * SM1
+ """ # noqa: D205, D400, D401, D404
+ SMS = Fa * Ss # noqa: N806
+ SM1 = Fv * S1 # noqa: N806
+ SDS = 2 / 3 * SMS # noqa: N806
+ SD1 = 2 / 3 * SM1 # noqa: N806
return SMS, SM1, SDS, SD1
-def determine_Cu_coefficient(SD1):
- """
- This function is used to determine Cu coefficient, which is based on ASCE 7-10 Table 12.8-1
+def determine_Cu_coefficient(SD1): # noqa: N802, N803
+ """This function is used to determine Cu coefficient, which is based on ASCE 7-10 Table 12.8-1
Note: All notations for these variables can be found in ASCE 7-10.
- :param SD1: a scalar calculated from funtion determine_DBE_acceleration
+ :param SD1: a scalar calculated from function determine_DBE_acceleration
:return: Cu: a scalar
- """
- if SD1 <= 0.1:
- Cu = 1.7
- elif SD1 <= 0.15:
- Cu = 1.7 - 2 * (SD1 - 0.1)
- elif SD1 <= 0.2:
- Cu = 1.6 - 2 * (SD1 - 0.15)
- elif SD1 <= 0.3:
- Cu = 1.5 - 1 * (SD1 - 0.2)
- elif SD1 <= 0.4:
- Cu = 1.4
+ """ # noqa: D205, D400, D401, D404
+ if SD1 <= 0.1: # noqa: PLR2004
+ Cu = 1.7 # noqa: N806
+ elif SD1 <= 0.15: # noqa: PLR2004
+ Cu = 1.7 - 2 * (SD1 - 0.1) # noqa: N806
+ elif SD1 <= 0.2: # noqa: PLR2004
+ Cu = 1.6 - 2 * (SD1 - 0.15) # noqa: N806
+ elif SD1 <= 0.3: # noqa: PLR2004
+ Cu = 1.5 - 1 * (SD1 - 0.2) # noqa: N806
+ elif SD1 <= 0.4: # noqa: PLR2004
+ Cu = 1.4 # noqa: N806
else:
- Cu = 1.4
+ Cu = 1.4 # noqa: N806
return Cu
-def determine_floor_height(number_of_story, first_story_height, typical_story_height):
- """
- This function is used to calculate the height for each floor level: from ground floor to roof
+def determine_floor_height(
+ number_of_story,
+ first_story_height,
+ typical_story_height,
+):
+ """This function is used to calculate the height for each floor level: from ground floor to roof
Obviously, the height for ground floor level is zero
Unit: foot (ft)
:param number_of_story: a scalar which desbribes the number of story for a certain building
@@ -150,22 +149,23 @@ def determine_floor_height(number_of_story, first_story_height, typical_story_he
:param typical_story_height: a scalar which describes the typical story height for other stories
except 1st story
:return: an array which includes the height for each floor level (ground to roof)
- """
+ """ # noqa: D205, D400, D401, D404
floor_height = np.zeros([number_of_story + 1, 1])
for level in range(1, number_of_story + 2):
if level == 1:
floor_height[level - 1] = 0
- elif level == 2:
+ elif level == 2: # noqa: PLR2004
floor_height[level - 1] = 0 + first_story_height
else:
- floor_height[level - 1] = first_story_height + typical_story_height * (level - 2)
+ floor_height[level - 1] = first_story_height + typical_story_height * (
+ level - 2
+ )
return floor_height
-def calculate_Cs_coefficient(SDS, SD1, S1, T, TL, R, Ie):
- """
- This function is used to calculate the seismic response coefficient based on ASCE 7-10 Section 12.8.1
+def calculate_Cs_coefficient(SDS, SD1, S1, T, TL, R, Ie): # noqa: N802, N803
+ """This function is used to calculate the seismic response coefficient based on ASCE 7-10 Section 12.8.1
Unit: kips, g (gravity constant), second
Note: All notations for these variables can be found in ASCE 7-10.
:param SDS: a scalar determined using Equation 11.4-3; output from "calculate_DBE_acceleration" function
@@ -177,38 +177,38 @@ def calculate_Cs_coefficient(SDS, SD1, S1, T, TL, R, Ie):
:param R: a scalar given in building information
:param Ie: a scalar given in building information
:return: Cs: seismic response coefficient; determined using Equations 12.8-2 to 12.8-6
- """
+ """ # noqa: D205, D400, D401, D404
# Equation 12.8-2
- Cs_initial = SDS/(R/Ie)
+ Cs_initial = SDS / (R / Ie) # noqa: N806
# Equation 12.8-3 or 12.8-4, Cs coefficient should not exceed the following value
if T <= TL:
- Cs_upper = SD1/(T * (R/Ie))
+ Cs_upper = SD1 / (T * (R / Ie)) # noqa: N806
else:
- Cs_upper = SD1 * TL/(T ** 2 * (R/Ie))
+ Cs_upper = SD1 * TL / (T**2 * (R / Ie)) # noqa: N806
# Equation 12.8-2 results shall be smaller than upper bound of Cs
if Cs_initial <= Cs_upper:
- Cs = Cs_initial
+ Cs = Cs_initial # noqa: N806
else:
- Cs = Cs_upper
+ Cs = Cs_upper # noqa: N806
# Equation 12.8-5, Cs shall not be less than the following value
- Cs_lower_1 = np.max([0.044*SDS*Ie, 0.01])
+ Cs_lower_1 = np.max([0.044 * SDS * Ie, 0.01]) # noqa: N806
# Compare the Cs value with lower bound
if Cs >= Cs_lower_1:
pass
else:
- Cs = Cs_lower_1
+ Cs = Cs_lower_1 # noqa: N806
# Equation 12.8-6. if S1 is equal to or greater than 0.6g, Cs shall not be less than the following value
- if S1 >= 0.6:
- Cs_lower_2 = 0.5*S1/(R/Ie)
+ if S1 >= 0.6: # noqa: PLR2004
+ Cs_lower_2 = 0.5 * S1 / (R / Ie) # noqa: N806
if Cs >= Cs_lower_2:
pass
else:
- Cs = Cs_lower_2
+ Cs = Cs_lower_2 # noqa: N806
else:
pass
@@ -216,75 +216,75 @@ def calculate_Cs_coefficient(SDS, SD1, S1, T, TL, R, Ie):
def determine_k_coeficient(period):
- """
- This function is used to determine the coefficient k based on ASCE 7-10 Section 12.8.3
+ """This function is used to determine the coefficient k based on ASCE 7-10 Section 12.8.3
:param period: building period;
:return: k: a scalar will be used in Equation 12.8-12 in ASCE 7-10
- """
- if period <= 0.5:
+ """ # noqa: D205, D400, D401, D404
+ if period <= 0.5: # noqa: PLR2004
k = 1
- elif period >= 2.5:
+ elif period >= 2.5: # noqa: PLR2004
k = 2
else:
- k = 1 + 0.5*(period - 0.5)
+ k = 1 + 0.5 * (period - 0.5)
return k
def calculate_seismic_force(base_shear, floor_weight, floor_height, k):
- """
- This function is used to calculate the seismic story force for each floor level
+ """This function is used to calculate the seismic story force for each floor level
Unit: kip, foot
:param base_shear: a scalar, total base shear for the building
:param floor_weight: a vector with a length of number_of_story
:param floor_height: a vector with a length of (number_of_story+1)
:param k: a scalar given by "determine_k_coefficient"
:return: Fx: a vector describes the lateral force for each floor level
- """
+ """ # noqa: D205, D400, D401, D404
# Calculate the product of floor weight and floor height
# Note that floor height includes ground floor, which will not be used in the actual calculation.
# Ground floor is stored here for completeness.
- weight_floor_height = floor_weight * floor_height[1:, 0]**k
+ weight_floor_height = floor_weight * floor_height[1:, 0] ** k
# Equation 12.8-12 in ASCE 7-10
- Cvx = weight_floor_height/np.sum(weight_floor_height)
+ Cvx = weight_floor_height / np.sum(weight_floor_height) # noqa: N806
# Calculate the seismic story force
seismic_force = Cvx * base_shear
# Calculate the shear force for each story: from top story to bottom story
story_shear = np.zeros([len(floor_weight), 1])
- for story in range(len(floor_weight)-1, -1, -1):
+ for story in range(len(floor_weight) - 1, -1, -1):
story_shear[story] = np.sum(seismic_force[story:])
return seismic_force, story_shear
def find_section_candidate(target_depth, section_database):
- """
- This function is used to find all possible section sizes that satisfies the user-specified depth.
+ """This function is used to find all possible section sizes that satisfies the user-specified depth.
:param target_depth: a string which defines the depth of columns or beams, e.g. W14
:param section_database: a dataframe read from SMF_Section_Property.csv in Library folder
:return: a pandas Series of strings which denotes all possible sizes based on depth
- """
+ """ # noqa: D205, D400, D401, D404
candidate_index = []
for indx in section_database['index']:
match = re.search(target_depth, section_database.loc[indx, 'section size'])
if match:
candidate_index.append(indx)
candidates = section_database.loc[candidate_index, 'section size']
- return candidates
+ return candidates # noqa: RET504
def search_member_size(target_name, target_quantity, candidate, section_database):
- """
- This function is used to find an appropriate member size based on a certain section property
+ """This function is used to find an appropriate member size based on a certain section property
:param target_name: a string which labels the name of target quantity.
Options for this string are headers of SMF_Section_Property.csv
:param target_quantity: a scalar value of the section property, such as the value of Ix or Zx
:param candidate: a list of strings which defines potential section sizes for beams or columns
:param section_database: a dataframe read from "Library" SMF_Section_Property.csv
:return: a string which states the member sizes (e.g., W14X730)
- """
+ """ # noqa: D205, D400, D401, D404
# Find the index for the candidate
- candidate_index = list(section_database.loc[section_database['section size'].isin(candidate), 'index'])
+ candidate_index = list(
+ section_database.loc[
+ section_database['section size'].isin(candidate), 'index'
+ ]
+ )
# Calculate the difference between target moment of inertia and moment of inertia of each section
difference = section_database.loc[candidate_index, target_name] - target_quantity
# Find the index which gives the minimum difference
@@ -296,18 +296,19 @@ def search_member_size(target_name, target_quantity, candidate, section_database
if not list(min_index[0]):
section_size = section_database.loc[candidate_index[0], 'section size']
else:
- section_size = section_database.loc[candidate_index[min_index[0][0]], 'section size']
+ section_size = section_database.loc[
+ candidate_index[min_index[0][0]], 'section size'
+ ]
return section_size
def search_section_property(target_size, section_database):
- """
- This function is used to obtain the section property when section size is given.
+ """This function is used to obtain the section property when section size is given.
The output will be stored in a dictionary.
:param target_size: a string which defines section size, e.g. 'W14X500'
:param section_database: a dataframe read from SMF_Section_Property.csv in "Library" folder
:return: section_info: a dictionary which includes section size, index, and associated properties.
- """
+ """ # noqa: D205, D401, D404
# Loop over the sections in the SMF section database and find the one which matches the target size
# Then the property of the target section is returned as a dictionary.
# If target size cannot match any existing sizes in database, a warning message should be given.
@@ -316,18 +317,19 @@ def search_section_property(target_size, section_database):
if target_size == section_database.loc[indx, 'section size']:
section_info = section_database.loc[indx, :]
return section_info.to_dict()
- except:
- sys.stderr.write('Error: wrong size nominated!\nNo such size exists in section database!')
+ except: # noqa: E722
+ sys.stderr.write(
+ 'Error: wrong size nominated!\nNo such size exists in section database!'
+ )
sys.exit(1)
def decrease_member_size(candidate, current_size):
- """
- This function is used to decrease the member size one step downward
+ """This function is used to decrease the member size one step downward
:param candidate: a list of strings which defines the possible sizes
:param current_size: a string which defines current member size
:return: optimized_size: a string which defines the member size after decrease
- """
+ """ # noqa: D205, D400, D401, D404
# Find the index of the current section size in candidate pool and move it to the next one
candidate_pool_index = candidate.index(current_size)
if candidate_pool_index + 1 > len(candidate):
@@ -338,31 +340,33 @@ def decrease_member_size(candidate, current_size):
def extract_depth(size):
- """
- This function is used to extract the depth of a section size when a size string is given.
+ """This function is used to extract the depth of a section size when a size string is given.
:param size: a string denoting a member size, e.g. 'W14X550'
:return: a integer which denotes the depth of section. e.g. 'W14X550' ==> 14
- """
+ """ # noqa: D205, D400, D401, D404
# Use Python regular expression to extract the char between 'W' and 'X', which then become depth
output = re.findall(r'.*W(.*)X.*', size)
return int(output[0])
def extract_weight(size):
- """
- This function is used to extract the weight of a section size when a size string is given.
+ """This function is used to extract the weight of a section size when a size string is given.
:param size: a string denoting a member size, e.g. 'W14X550'
:return: a integer which denotes the weight of the section, e.g. 'W14X550' ==> 550
- """
+ """ # noqa: D205, D400, D401, D404
# Use Python regular expression to extract the char after 'W' to the end of the string,
# which then becomes weight
output = re.findall(r'.X(.*)', size)
return int(output[0])
-def constructability_helper(section_size, identical_size_per_story, total_story, sorted_quantity):
- """
- This function is used to make every adjacent N stories have the same size and ensure that the whole list
+def constructability_helper( # noqa: C901
+ section_size,
+ identical_size_per_story,
+ total_story,
+ sorted_quantity,
+):
+ """This function is used to make every adjacent N stories have the same size and ensure that the whole list
is in a descending order.
:param section_size: a list of strings. e.g. ['W14X500', 'W14X550']
:param identical_size_per_story: a scalar to denote how many stories are supposed to have same size
@@ -370,7 +374,7 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
:param sorted_quantity:a string to indicate the members are sorted based on which quantity,
options: 'Ix' or 'Zx'
:return: a list whose every adjacent N stories have same strings and the whole list is in descending order
- """
+ """ # noqa: D205, D400, D401, D404, RUF002
# Determine the number of stories that have the identical member size for constructability
if identical_size_per_story > total_story:
per_story = total_story
@@ -378,30 +382,38 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
per_story = identical_size_per_story
# Compute the index where the member size is supposed to be varied
variation_story = []
- for i in range(0, total_story):
+ for i in range(total_story):
if i % per_story == 0:
- variation_story.append(i)
+ variation_story.append(i) # noqa: PERF401
# Pre-process the section size list:
# Sometimes, we may have the following case for the section list (M < N < K)
# Story N has larger depth than M and K, but it has smaller Ix or Zx than M.
# In this case, simply re-assign size for story N such that it has same depth with M
# and comparable Ix or Zx with old itself.
i = 0
- while (i < total_story - 1):
+ while i < total_story - 1:
# Find the index [i, j) such that they have same depths (j is exclusive)
for j in range(i + 1, total_story):
- if (extract_depth(section_size[j]) != extract_depth(section_size[i])):
+ if extract_depth(section_size[j]) != extract_depth(section_size[i]):
break
# If the current story chunk (with same depth) is not at the beginning nor end.
- if (i > 0 and j < total_story):
+ if i > 0 and j < total_story:
temp_property = []
# Find the maximum Ix or Zx in current story chunk
for k in range(i, j):
- temp_property.append(search_section_property(section_size[k], SECTION_DATABASE)[sorted_quantity])
+ temp_property.append( # noqa: PERF401
+ search_section_property(section_size[k], SECTION_DATABASE)[
+ sorted_quantity
+ ]
+ )
current_property = max(temp_property)
# Obtain the Ix or Zx for the stories just below and above the current story chunk.
- lower_property = search_section_property(section_size[i - 1], SECTION_DATABASE)[sorted_quantity]
- upper_property = search_section_property(section_size[j], SECTION_DATABASE)[sorted_quantity]
+ lower_property = search_section_property(
+ section_size[i - 1], SECTION_DATABASE
+ )[sorted_quantity]
+ upper_property = search_section_property(
+ section_size[j], SECTION_DATABASE
+ )[sorted_quantity]
# Obtain the depth for stories in current story chunk, below it, and above it.
current_depth = extract_depth(section_size[i])
lower_depth = extract_depth(section_size[i - 1])
@@ -409,15 +421,26 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
# Current story chunk has higher depth than the stories below and above
# current Ix or Zx is less than stories below.
# Stories below current chunk have the same or greater depth than the stories above current chunk.
- if (current_depth > lower_depth and current_depth > upper_depth and lower_depth >= upper_depth and
- current_property < lower_property and current_property > upper_property):
+ if (
+ current_depth > lower_depth
+ and current_depth > upper_depth
+ and lower_depth >= upper_depth
+ and current_property < lower_property
+ and current_property > upper_property
+ ):
# For this case, re-assign the size such that the current chunk has the same depth
# with stories below.
# Meanwhile, its Ix or Zx is comparable to the old itself.
- candidates = find_section_candidate('W' + str(lower_depth), SECTION_DATABASE)
+ candidates = find_section_candidate(
+ 'W' + str(lower_depth), SECTION_DATABASE
+ )
for k in range(i, j):
- section_size[k] = search_member_size(sorted_quantity, temp_property[k - i], candidates,
- SECTION_DATABASE)
+ section_size[k] = search_member_size(
+ sorted_quantity,
+ temp_property[k - i],
+ candidates,
+ SECTION_DATABASE,
+ )
# Update current index to j
i = j
@@ -426,21 +449,25 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
# It is better to trace the story from top to bottom of the building.
starting_index = total_story - 1
ending_index = variation_story[-1]
- while (starting_index > 0):
+ while starting_index > 0: # noqa: PLR1702
# For stories within "identical story block"
for indx in range(starting_index, ending_index, -1):
# Only revise those size that are not identical
if section_size[indx - 1] != section_size[indx]:
# Obtain Ix or Zx for current story and story below.
- current_property = search_section_property(section_size[indx], SECTION_DATABASE)[sorted_quantity]
- lower_property = search_section_property(section_size[indx - 1], SECTION_DATABASE)[sorted_quantity]
+ current_property = search_section_property(
+ section_size[indx], SECTION_DATABASE
+ )[sorted_quantity]
+ lower_property = search_section_property(
+ section_size[indx - 1], SECTION_DATABASE
+ )[sorted_quantity]
# Obtain depth for current story and story below.
current_depth = extract_depth(section_size[indx])
lower_depth = extract_depth(section_size[indx - 1])
# Case 1: two depths are the same or lower depth is greater
- if (current_depth <= lower_depth):
+ if current_depth <= lower_depth:
# Sub-case 1.1: lower member has smaller Ix or Zx ==> change lower size to be equal to current size
- if (current_property > lower_property):
+ if current_property > lower_property:
section_size[indx - 1] = section_size[indx]
# Sub-case 1.2: lower member has larger Ix or Zx ==> change current size to be equal to lower size
else:
@@ -451,52 +478,62 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
for k in range(indx, starting_index + 1, 1):
section_size[k] = section_size[indx]
# Case 2: lower depth is smaller
+ elif current_property > lower_property:
+ section_size[indx - 1] = section_size[indx]
+ # Sub-case 2.2: lower member has larger Zx
else:
- # Sub-case 2.1: lower member has smaller Zx
- if (current_property > lower_property):
- section_size[indx - 1] = section_size[indx]
- # Sub-case 2.2: lower member has larger Zx
- else:
- # We need to change the lower member size such that it has the same depth with current story
- # and comparable Ix or Zx with old itself.
- candidates = find_section_candidate('W' + str(current_depth), SECTION_DATABASE)
- section_size[indx - 1] = search_member_size(sorted_quantity, lower_property, candidates,
- SECTION_DATABASE)
- section_size[indx] = section_size[indx - 1]
- # Don't forget to trace back because you just change the current story size.
- # If the story above the current story is still within "identical story block".
- # Then we need to revise the story above too.
- for k in range(indx, starting_index + 1, 1):
- section_size[k] = section_size[indx]
+ # We need to change the lower member size such that it has the same depth with current story
+ # and comparable Ix or Zx with old itself.
+ candidates = find_section_candidate(
+ 'W' + str(current_depth), SECTION_DATABASE
+ )
+ section_size[indx - 1] = search_member_size(
+ sorted_quantity,
+ lower_property,
+ candidates,
+ SECTION_DATABASE,
+ )
+ section_size[indx] = section_size[indx - 1]
+ # Don't forget to trace back because you just change the current story size.
+ # If the story above the current story is still within "identical story block".
+ # Then we need to revise the story above too.
+ for k in range(indx, starting_index + 1, 1):
+ section_size[k] = section_size[indx]
# For stories at the boundary between "identical story block"
indx = variation_story[-1]
if indx == 0:
break
# We need to make sure the lower block has heavier sections
# Compute current and lower member property Ix or Zx
- current_property = search_section_property(section_size[indx], SECTION_DATABASE)[sorted_quantity]
- lower_property = search_section_property(section_size[indx - 1], SECTION_DATABASE)[sorted_quantity]
+ current_property = search_section_property(
+ section_size[indx], SECTION_DATABASE
+ )[sorted_quantity]
+ lower_property = search_section_property(
+ section_size[indx - 1], SECTION_DATABASE
+ )[sorted_quantity]
# Compute the current and lower member depth
current_depth = extract_depth(section_size[indx])
lower_depth = extract_depth(section_size[indx - 1])
# Case 1: lower member has same depth
- if (lower_depth == current_depth):
+ if lower_depth == current_depth:
# Case 1.1: lower member less Ix or Zx ==> change the lower size to be equal to current
- if (lower_property < current_property):
+ if lower_property < current_property:
section_size[indx - 1] = section_size[indx]
# Case 2: lower member has smaller depth
- elif (lower_depth < current_depth):
+ elif lower_depth < current_depth:
# Change the lower member such that it has same depth with current and comparable Ix or Zx to old itself.
- candidates = find_section_candidate('W' + str(current_depth), SECTION_DATABASE)
- section_size[indx - 1] = search_member_size(sorted_quantity, lower_property, candidates, SECTION_DATABASE)
+ candidates = find_section_candidate(
+ 'W' + str(current_depth), SECTION_DATABASE
+ )
+ section_size[indx - 1] = search_member_size(
+ sorted_quantity, lower_property, candidates, SECTION_DATABASE
+ )
# Case 3: lower member has larger depth
+ elif lower_property < current_property:
+ section_size[indx - 1] = section_size[indx]
+ # Sub-case 3.2: lower member has larger Ix or Zx
else:
- # Sub-case 3.1: lower member has smaller Ix or Zx
- if (lower_property < current_property):
- section_size[indx - 1] = section_size[indx]
- # Sub-case 3.2: lower member has larger Ix or Zx
- else:
- pass
+ pass
# Update the stating index for next "identical story block"
starting_index = variation_story[-1] - 1
if starting_index < 0:
@@ -549,15 +586,14 @@ def constructability_helper(section_size, identical_size_per_story, total_story,
def increase_member_size(candidate, current_size):
- """
- This function is used to increase the member size one step upward
+ """This function is used to increase the member size one step upward
:param candidate: a list of strings which defines the possible sizes
:param current_size: a string which denotes current member size
:return: a string which denotes the member size after one step upward
- """
+ """ # noqa: D205, D400, D401, D404
# Find the index of current section size in candidate pool and move it to previous one
candidate_pool_index = candidate.index(current_size)
- if (candidate_pool_index - 1 < 0): # Make sure the index does not exceed the bound
+ if candidate_pool_index - 1 < 0: # Make sure the index does not exceed the bound
# This means the largest candidate still fails to satisfy the requirement
sys.stderr.write('The upper bound for depth initialization is too small!\n')
return candidate[candidate_pool_index - 1]
diff --git a/modules/createSAM/AutoSDA/main_design.py b/modules/createSAM/AutoSDA/main_design.py
index dec69258f..d296c6c20 100644
--- a/modules/createSAM/AutoSDA/main_design.py
+++ b/modules/createSAM/AutoSDA/main_design.py
@@ -1,4 +1,4 @@
-# This file is the main file that calls function to perform seismic design
+# This file is the main file that calls function to perform seismic design # noqa: CPY001, D100, INP001
# Users need to specify the system argument in this file.
# Users also need to specify the variables in "global_variables.py"
@@ -15,11 +15,10 @@
# Load Necessary Packages #
##########################################################################
-import sys
import time
-from seismic_design import seismic_design
from global_variables import baseDirectory
+from seismic_design import seismic_design
# Count the starting time of the main program
start_time = time.time()
@@ -36,13 +35,13 @@
# ********************* Single Building Case Ends Here *******************
IDs = [11]
-for id in IDs:
+for id in IDs: # noqa: A001
building_id = 'Test' + str(id)
- print("Design for Building ID = ", building_id)
+ print('Design for Building ID = ', building_id) # noqa: T201
seismic_design(building_id, baseDirectory)
# ********************* Single Building Case Ends Here *******************
end_time = time.time()
-print("Running time is: %s seconds" % round(end_time - start_time, 2))
\ No newline at end of file
+print('Running time is: %s seconds' % round(end_time - start_time, 2)) # noqa: T201, UP031
diff --git a/modules/createSAM/AutoSDA/main_generation.py b/modules/createSAM/AutoSDA/main_generation.py
index 556e2d1c6..e14b6087d 100644
--- a/modules/createSAM/AutoSDA/main_generation.py
+++ b/modules/createSAM/AutoSDA/main_generation.py
@@ -1,4 +1,4 @@
-# This file is the main file that calls functions to generate the nonlinear
+# This file is the main file that calls functions to generate the nonlinear # noqa: CPY001, D100, INP001
# OpenSees models
# Users need to specify the system argument in this file.
# Users also need to specify the variables in "global_variables.py"
@@ -16,15 +16,8 @@
# Load Necessary Packages #
##########################################################################
-import os
-import pickle
-import time
-
from global_variables import baseDirectory
-from global_variables import SECTION_DATABASE
-from global_variables import COLUMN_DATABASE
-from global_variables import BEAM_DATABASE
from model_generation import model_generation
##########################################################################
@@ -32,9 +25,9 @@
##########################################################################
IDs = [11]
-for id in IDs:
+for id in IDs: # noqa: A001
building_id = 'Test' + str(id)
- print(building_id)
+ print(building_id) # noqa: T201
model_generation(building_id, baseDirectory)
##########################################################################
@@ -65,4 +58,4 @@
# # Display which model you are analyzing
# print('Current model is: ', building_id)
# # Run OpenSees.exe file
-# os.system('OpenSees Model.tcl')
\ No newline at end of file
+# os.system('OpenSees Model.tcl')
diff --git a/modules/createSAM/AutoSDA/main_program.py b/modules/createSAM/AutoSDA/main_program.py
index 11ecbac36..321924ef9 100644
--- a/modules/createSAM/AutoSDA/main_program.py
+++ b/modules/createSAM/AutoSDA/main_program.py
@@ -1,4 +1,4 @@
-# Modified by: Stevan Gavrilovic @ SimCenter, UC Berkeley
+# Modified by: Stevan Gavrilovic @ SimCenter, UC Berkeley # noqa: CPY001, D100, INP001
# Last revision: 09/2020
##########################################################################
@@ -11,62 +11,60 @@
# nonlinear structural model construction and analysis of steel moment
# resisting frames.” Engineering Structures. (Under Review)
-import argparse, posixpath, ntpath, json
-
-import sys
-import time
+import argparse
+import json
import os
-import sys
import shutil
-import pathlib
-import subprocess
-import pickle
+import sys
+import time
-from global_variables import baseDirectory
-from seismic_design import seismic_design
-from global_variables import SECTION_DATABASE, COLUMN_DATABASE, BEAM_DATABASE, RV_ARRAY
+from global_variables import (
+ RV_ARRAY,
+ baseDirectory,
+)
from model_generation import model_generation
+from seismic_design import seismic_design
-def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
+def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV): # noqa: ARG001, C901, N803, D103
start_time = time.time()
# Get the current directory
- workingDirectory = os.getcwd()
+ workingDirectory = os.getcwd() # noqa: PTH109, N806
- rootSIM = {}
+ rootSIM = {} # noqa: N806
# Try to open the BIM json
- with open(BIM_file, 'r', encoding='utf-8') as f:
- rootBIM = json.load(f)
+ with open(BIM_file, encoding='utf-8') as f: # noqa: PTH123
+ rootBIM = json.load(f) # noqa: N806
try:
- rootSIM = rootBIM['Modeling']
- except:
- raise ValueError("AutoSDA - structural information missing")
+ rootSIM = rootBIM['Modeling'] # noqa: N806
+ except: # noqa: E722
+ raise ValueError('AutoSDA - structural information missing') # noqa: B904, EM101, TRY003
# Extract the path for the directory containing the folder with the building data .csv files
# pathDataFolder = rootSIM['pathDataFolder']
- pathDataFolder = os.path.join(os.getcwd(), rootSIM['folderName'])
+ pathDataFolder = os.path.join(os.getcwd(), rootSIM['folderName']) # noqa: PTH109, PTH118, N806
# pathDataFolder = workingDirectory + "/" + rootSIM['folderName']
# Get the random variables from the input file
try:
- rootRV = rootBIM['randomVariables']
- except:
- raise ValueError("AutoSDA - randomVariables section missing")
+ rootRV = rootBIM['randomVariables'] # noqa: N806
+ except: # noqa: E722
+ raise ValueError('AutoSDA - randomVariables section missing') # noqa: B904, EM101, TRY003
# Populate the RV array with name/value pairs.
# If a random variable is used here, the RV array will contain its current value
for rv in rootRV:
# Try to get the name and value of the random variable
- rvName = rv['name']
- curVal = rv['value']
+ rvName = rv['name'] # noqa: N806
+ curVal = rv['value'] # noqa: N806
# Check if the current value a realization of a RV, i.e., is not a RV label
# If so, then set the current value as the mean
- if "RV" in str(curVal) :
- curVal = float(rv['mean'])
+ if 'RV' in str(curVal):
+ curVal = float(rv['mean']) # noqa: N806
RV_ARRAY[rvName] = curVal
@@ -75,13 +73,13 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
if getRV is False:
# *********************** Design Starts Here *************************
- print("Starting seismic design")
+ print('Starting seismic design') # noqa: T201
seismic_design(baseDirectory, pathDataFolder, workingDirectory)
- print("Seismic design complete")
+ print('Seismic design complete') # noqa: T201
# ******************* Nonlinear Model Generation Starts Here ******
# Nonlinear .tcl models are generated for EigenValue, Pushover, and Dynamic Analysis
- print("Generating nonlinear model")
+ print('Generating nonlinear model') # noqa: T201
model_generation(baseDirectory, pathDataFolder, workingDirectory)
# ******************* Perform Eigen Value Analysis ****************
@@ -98,23 +96,23 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
# os.chdir(target_model)
# subprocess.Popen("OpenSees Model.tcl", shell=True).wait()
- print("The design and model construction has been accomplished.")
+ print('The design and model construction has been accomplished.') # noqa: T201
end_time = time.time()
- print("Running time is: %s seconds" % round(end_time - start_time, 2))
+ print('Running time is: %s seconds' % round(end_time - start_time, 2)) # noqa: T201, UP031
# Now create the SAM file for export
- root_SAM = {}
+ root_SAM = {} # noqa: N806
root_SAM['mainScript'] = 'Model.tcl'
root_SAM['type'] = 'OpenSeesInput'
root_SAM['units'] = {
- "force": "kips",
- "length": "in",
- "temperature": "C",
- "time": "sec"
- }
-
+ 'force': 'kips',
+ 'length': 'in',
+ 'temperature': 'C',
+ 'time': 'sec',
+ }
+
# Number of dimensions (KZ & AZ: changed to integer)
root_SAM['ndm'] = 2
@@ -122,7 +120,7 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
root_SAM['ndf'] = 3
# Get the number of stories
- numStories = rootSIM['numStories']
+ numStories = rootSIM['numStories'] # noqa: N806
node_map = []
# Using nodes on column #1 to calculate story drift
@@ -130,30 +128,27 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
# (1, i, 1, 1) # Node at bottom of current story
# (1, i + 1, 1, 1) # Node at top of current story
for i in range(1, numStories + 2):
-
- nodeTagBot = 0
+ nodeTagBot = 0 # noqa: N806
if i == 1:
# Node tag at ground floor is different from those on upper stories (1, i, 1, 0)
- nodeTagBot = 1010 + 100*i
+ nodeTagBot = 1010 + 100 * i # noqa: N806
+ elif i > 9: # noqa: PLR2004
+ nodeTagBot = 10011 + 100 * i # noqa: N806
else:
- # KZ & AZ: minor patch for story numbers greater than 10
- if i > 9:
- nodeTagBot = 10011 + 100*i
- else:
- nodeTagBot = 1011 + 100*i
+ nodeTagBot = 1011 + 100 * i # noqa: N806
# Create the node and add it to the node mapping array
node_entry = {}
node_entry['node'] = nodeTagBot
node_entry['cline'] = 'response'
- node_entry['floor'] = '{}'.format(i-1)
+ node_entry['floor'] = f'{i - 1}'
node_map.append(node_entry)
- ## KZ & AZ: Add centroid for roof drift
+ # KZ & AZ: Add centroid for roof drift
node_entry_c = {}
node_entry_c['node'] = nodeTagBot
node_entry_c['cline'] = 'centroid'
- node_entry_c['floor'] = '{}'.format(i-1)
+ node_entry_c['floor'] = f'{i - 1}'
node_map.append(node_entry_c)
root_SAM['NodeMapping'] = node_map
@@ -162,19 +157,21 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
# Go back to the current directory before saving the SAM file
os.chdir(workingDirectory)
- with open(SAM_file, 'w') as f:
+ with open(SAM_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(root_SAM, f, indent=2)
# Copy over the .tcl files of the building model into the working directory
if getRV is False:
- pathToMainScriptFolder = workingDirectory + "/BuildingNonlinearModels/DynamicAnalysis/"
+ pathToMainScriptFolder = ( # noqa: N806
+ workingDirectory + '/BuildingNonlinearModels/DynamicAnalysis/'
+ )
- if os.path.isdir(pathToMainScriptFolder) :
- print(pathToMainScriptFolder)
+ if os.path.isdir(pathToMainScriptFolder): # noqa: PTH112
+ print(pathToMainScriptFolder) # noqa: T201
src_files = os.listdir(pathToMainScriptFolder)
for file_name in src_files:
- full_file_name = os.path.join(pathToMainScriptFolder, file_name)
- if os.path.isfile(full_file_name):
+ full_file_name = os.path.join(pathToMainScriptFolder, file_name) # noqa: PTH118
+ if os.path.isfile(full_file_name): # noqa: PTH113
shutil.copy(full_file_name, workingDirectory)
@@ -188,5 +185,13 @@ def main(BIM_file, EVENT_file, SAM_file, model_file, filePath, getRV):
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
- sys.exit(main(args.filenameAIM, args.filenameEVENT, args.filenameSAM,
- args.fileName, args.filePath, args.getRV))
+ sys.exit(
+ main(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.filenameSAM,
+ args.fileName,
+ args.filePath,
+ args.getRV,
+ )
+ )
diff --git a/modules/createSAM/AutoSDA/model_generation.py b/modules/createSAM/AutoSDA/model_generation.py
index 936d82848..da293ef35 100644
--- a/modules/createSAM/AutoSDA/model_generation.py
+++ b/modules/createSAM/AutoSDA/model_generation.py
@@ -1,30 +1,30 @@
-# This file creates a function that is called by "main_generation.py" to perform nonlinear model generation
+# This file creates a function that is called by "main_generation.py" to perform nonlinear model generation # noqa: CPY001, D100, INP001
# Modified by: Stevan Gavrilovic @ SimCenter, UC Berkeley
# Last revision: 09/2020
import os
-import pickle
+import pickle # noqa: S403
from nonlinear_analysis import NonlinearAnalysis
-def model_generation(base_directory, pathDataFolder, workingDirectory):
+def model_generation(base_directory, pathDataFolder, workingDirectory): # noqa: N803, D103
##########################################################################
# Load Building Design Result #
##########################################################################
# Change the directory to the folder where the design results are stored
- os.chdir(workingDirectory + "/BuildingDesignResults/")
+ os.chdir(workingDirectory + '/BuildingDesignResults/')
# Load all design results (stored as .pkl files)
- with open('construction_building.pkl', 'rb') as file:
- building = pickle.load(file)
- with open('construction_column_set.pkl', 'rb') as file:
- column_set = pickle.load((file))
- with open('construction_beam_set.pkl', 'rb') as file:
- beam_set = pickle.load(file)
- with open('construction_connection_set.pkl', 'rb') as file:
- connection_set = pickle.load(file)
+ with open('construction_building.pkl', 'rb') as file: # noqa: PTH123
+ building = pickle.load(file) # noqa: S301
+ with open('construction_column_set.pkl', 'rb') as file: # noqa: PTH123
+ column_set = pickle.load(file) # noqa: S301
+ with open('construction_beam_set.pkl', 'rb') as file: # noqa: PTH123
+ beam_set = pickle.load(file) # noqa: S301
+ with open('construction_connection_set.pkl', 'rb') as file: # noqa: PTH123
+ connection_set = pickle.load(file) # noqa: S301
##########################################################################
# Update the Building Directory #
@@ -34,15 +34,23 @@ def model_generation(base_directory, pathDataFolder, workingDirectory):
# As a result, the directory stored in construction_building.pkl might not work
# in this PC
# Define path to folder where the baseline .tcl files for elastic analysis are saved
- building.directory['baseline files elastic'] = base_directory + "/BaselineTclFiles/ElasticAnalysis"
+ building.directory['baseline files elastic'] = (
+ base_directory + '/BaselineTclFiles/ElasticAnalysis'
+ )
# Define path to folder where the baseline .tcl files for nonlinear analysis are stored
- building.directory['baseline files nonlinear'] = base_directory + "/BaselineTclFiles/NonlinearAnalysis"
+ building.directory['baseline files nonlinear'] = (
+ base_directory + '/BaselineTclFiles/NonlinearAnalysis'
+ )
# Define path to folder where the building data (.csv) are saved
building.directory['building data'] = pathDataFolder
# Define path to folder where the generated elastic analysis OpenSees model is saved
- building.directory['building elastic model'] = workingDirectory + "/BuildingElasticModels"
+ building.directory['building elastic model'] = (
+ workingDirectory + '/BuildingElasticModels'
+ )
# Define path to folder where the generated nonlinear analysis OpenSees model is saved
- building.directory['building nonlinear model'] = workingDirectory + "/BuildingNonlinearModels"
+ building.directory['building nonlinear model'] = (
+ workingDirectory + '/BuildingNonlinearModels'
+ )
##########################################################################
# Generate Nonlinear Analysis Model #
@@ -50,4 +58,6 @@ def model_generation(base_directory, pathDataFolder, workingDirectory):
analysis_list = ['EigenValueAnalysis', 'PushoverAnalysis', 'DynamicAnalysis']
for analysis_type in analysis_list:
- model = NonlinearAnalysis(building, column_set, beam_set, connection_set, analysis_type)
+ model = NonlinearAnalysis( # noqa: F841
+ building, column_set, beam_set, connection_set, analysis_type
+ )
diff --git a/modules/createSAM/AutoSDA/nonlinear_analysis.py b/modules/createSAM/AutoSDA/nonlinear_analysis.py
index 1e3e38b78..afe6b617b 100644
--- a/modules/createSAM/AutoSDA/nonlinear_analysis.py
+++ b/modules/createSAM/AutoSDA/nonlinear_analysis.py
@@ -1,24 +1,24 @@
-# This file is used to include all user defined classes and functions
+# This file is used to include all user defined classes and functions # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in Dec 2018
# Modified by: Stevan Gavrilovic @ SimCenter, UC Berkeley
# Last revision: 09/2020
-import numpy as np
import os
-import subprocess
-import sys
import shutil
+import subprocess # noqa: S404
+import sys
from pathlib import Path
+import numpy as np
+
# #########################################################################
# Generate Nonlinear OpenSees model (write .tcl files) #
# #########################################################################
-class NonlinearAnalysis(object):
- """
- This class generates the .tcl files required for nonlinear analysis. It includes the following methods:
+class NonlinearAnalysis: # noqa: PLR0904
+ """This class generates the .tcl files required for nonlinear analysis. It includes the following methods:
(1) OpenSees nodes
(2) boundary condition
(3) floor constraint
@@ -35,11 +35,17 @@ class NonlinearAnalysis(object):
(14) copy baseline files and revise if necessary
(15) define various recorders for output
(16) define pushover loading pattern
- """
-
- def __init__(self, building, column_set, beam_set, connection_set, analysis_type):
- """
- This function is used to call all methods to write .tcl files required for nonlinear analysis OpenSees model
+ """ # noqa: D205, D400, D404
+
+ def __init__(
+ self,
+ building,
+ column_set,
+ beam_set,
+ connection_set,
+ analysis_type,
+ ):
+ """This function is used to call all methods to write .tcl files required for nonlinear analysis OpenSees model
:param building: a class defined in "building_information.py" file
:param column_set: a two-dimensional list[x][y] and each element is a column object defined in "column_component
x: from 0 to (story number-1)
@@ -53,22 +59,29 @@ def __init__(self, building, column_set, beam_set, connection_set, analysis_type
y: from 0 to (bay number+1)
:param analysis_type: a string specifies which analysis type the current model is for
options: 'EigenValueAnalysis', 'PushoverAnalysis', 'DynamicAnalysis'
- """
+ """ # noqa: D205, D400, D401, D404
# User-hints: if wrong analysis_type is input
- if analysis_type != 'EigenValueAnalysis' and analysis_type != 'PushoverAnalysis' \
- and analysis_type != 'DynamicAnalysis':
- sys.stderr.write('Wrong analysis type input. Please input one of the followings:\n')
+ if (
+ analysis_type != 'EigenValueAnalysis' # noqa: PLR1714
+ and analysis_type != 'PushoverAnalysis'
+ and analysis_type != 'DynamicAnalysis'
+ ):
+ sys.stderr.write(
+ 'Wrong analysis type input. Please input one of the following:\n'
+ )
sys.stderr.write('EigenValueAnalys, PushoverAnalysis, DynamicAnalysis')
sys.exit(99)
# Change the working directory to the target building folder
- if not os.path.exists(building.directory['building nonlinear model']):
- os.makedirs(building.directory['building nonlinear model'])
+ if not os.path.exists(building.directory['building nonlinear model']): # noqa: PTH110
+ os.makedirs(building.directory['building nonlinear model']) # noqa: PTH103
os.chdir(building.directory['building nonlinear model'])
# Change the working directory to the desired folder (EigenValueAnalysis, PushoverAnalysis, or DynamicAnalysis)
- target_folder = building.directory['building nonlinear model'] + "/" + analysis_type
- if not os.path.exists(target_folder):
- os.makedirs(target_folder)
+ target_folder = (
+ building.directory['building nonlinear model'] + '/' + analysis_type
+ )
+ if not os.path.exists(target_folder): # noqa: PTH110
+ os.makedirs(target_folder) # noqa: PTH103
os.chdir(target_folder)
# Call methods to write .tcl files for the building
@@ -102,792 +115,1292 @@ def __init__(self, building, column_set, beam_set, connection_set, analysis_type
self.write_damping(building)
self.write_dynamic_analysis_parameters(building)
- def write_nodes(self, building, column_set, beam_set):
- """
- Create a .tcl file to write node tags and coordinates for nonlinear analysis model
+ def write_nodes(self, building, column_set, beam_set): # noqa: C901, PLR6301
+ """Create a .tcl file to write node tags and coordinates for nonlinear analysis model
:param building: a class defined in "building_information.py"
:param column_set: a list[x][y] and each element is a class defined in "column_component.py"
:param beam_set: a list[x][z] and each element is a class defined in "beam_component.py"
:return: a .tcl file
- """
- with open('DefineNodes2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define all nodes \n") # Introduce the file usage
- tclfile.write("# Units: inch\n\n\n") # Explain the units
-
- tclfile.write("# Set bay width and story height\n")
- tclfile.write("set\tBayWidth\t[expr %.2f*12]; \n" % (building.geometry['X bay width']))
- tclfile.write("set\tFirstStory\t[expr %.2f*12]; \n" % (building.geometry['first story height']))
- tclfile.write("set\tTypicalStory\t[expr %.2f*12]; \n\n" % (building.geometry['typical story height']))
+ """ # noqa: D205, D400
+ with open('DefineNodes2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define all nodes \n'
+ ) # Introduce the file usage
+ tclfile.write('# Units: inch\n\n\n') # Explain the units
+
+ tclfile.write('# Set bay width and story height\n')
+ tclfile.write(
+ 'set\tBayWidth\t[expr %.2f*12]; \n' # noqa: UP031
+ % (building.geometry['X bay width'])
+ )
+ tclfile.write(
+ 'set\tFirstStory\t[expr %.2f*12]; \n' # noqa: UP031
+ % (building.geometry['first story height'])
+ )
+ tclfile.write(
+ 'set\tTypicalStory\t[expr %.2f*12]; \n\n' # noqa: UP031
+ % (building.geometry['typical story height'])
+ )
# Define the panel sizes before building the node coordinates
- tclfile.write("# Set panel zone size as column depth and beam depth\n")
- for i in range(1, building.geometry['number of story']+2): # i is floor level number (1 for ground floor)
- tclfile.write("# Level %i \n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j is column number (1 for leftmost)
- if i == 1:
- tclfile.write("set\tPanelSizeLevel%iColumn%i\t[list %i %i];"
- "# No panel zone on ground floor so using [0, 0] is okay\n"
- % (i, j, 0, 0))
- else:
- # Note that beam size is identical in one floor level.
- # Therefore second index for beam_set doesn't need to be changed.
- tclfile.write("set\tPanelSizeLevel%iColumn%i\t[list %.1f %.1f];\n"
- % (i, j, column_set[i-2][j-1].section['d'], beam_set[i-2][0].section['d']))
- tclfile.write("\n")
+ tclfile.write('# Set panel zone size as column depth and beam depth\n')
+ for i in range(
+ 1, building.geometry['number of story'] + 2
+ ): # i is floor level number (1 for ground floor)
+ tclfile.write('# Level %i \n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is column number (1 for leftmost)
+ if i == 1:
+ tclfile.write(
+ 'set\tPanelSizeLevel%iColumn%i\t[list %i %i];'
+ '# No panel zone on ground floor so using [0, 0] is okay\n'
+ % (i, j, 0, 0)
+ )
+ else:
+ # Note that beam size is identical in one floor level.
+ # Therefore second index for beam_set doesn't need to be changed.
+ tclfile.write(
+ 'set\tPanelSizeLevel%iColumn%i\t[list %.1f %.1f];\n'
+ % (
+ i,
+ j,
+ column_set[i - 2][j - 1].section['d'],
+ beam_set[i - 2][0].section['d'],
+ )
+ )
+ tclfile.write('\n')
# Write nodes for frame using pre-defined tcl proc "NodesAroundPanelZone".
- tclfile.write("# Set max number of columns (excluding leaning column) and floors (counting 1 for ground)\n")
- tclfile.write("set\tMaximumFloor\t%i; \n" % (building.geometry['number of story']+1))
- tclfile.write("set\tMaximumCol\t%i; \n\n" % (building.geometry['number of X bay']+1))
-
- tclfile.write("# Define nodes for the frame \n")
- for i in range(1, building.geometry['number of story']+2): # i is floor level number
- tclfile.write("# Level %i \n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j is column label
- tclfile.write("NodesAroundPanelZone\t%i\t%i\t[expr %i*$BayWidth]"
- % (j, i, j-1))
- if i <= 2:
- tclfile.write("\t[expr %i*$FirstStory+0*$TypicalStory]" % (i-1))
+ tclfile.write(
+ '# Set max number of columns (excluding leaning column) and floors (counting 1 for ground)\n'
+ )
+ tclfile.write(
+ 'set\tMaximumFloor\t%i; \n'
+ % (building.geometry['number of story'] + 1)
+ )
+ tclfile.write(
+ 'set\tMaximumCol\t%i; \n\n'
+ % (building.geometry['number of X bay'] + 1)
+ )
+
+ tclfile.write('# Define nodes for the frame \n')
+ for i in range(
+ 1, building.geometry['number of story'] + 2
+ ): # i is floor level number
+ tclfile.write('# Level %i \n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is column label
+ tclfile.write(
+ 'NodesAroundPanelZone\t%i\t%i\t[expr %i*$BayWidth]'
+ % (j, i, j - 1)
+ )
+ if i <= 2: # noqa: PLR2004
+ tclfile.write(
+ '\t[expr %i*$FirstStory+0*$TypicalStory]' % (i - 1)
+ )
else:
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory]" % (i-2))
- tclfile.write("\t$PanelSizeLevel%iColumn%i" % (i, j))
- tclfile.write("\t$MaximumFloor\t$MaximumCol; \n")
- tclfile.write("\n")
- tclfile.write("puts \"Nodes for frame defined\" \n\n")
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory]' % (i - 2)
+ )
+ tclfile.write('\t$PanelSizeLevel%iColumn%i' % (i, j))
+ tclfile.write('\t$MaximumFloor\t$MaximumCol; \n')
+ tclfile.write('\n')
+ tclfile.write('puts "Nodes for frame defined" \n\n')
# Write the nodes for leaning column
- tclfile.write("# Define nodes for leaning column \n")
- for i in range(1, building.geometry['number of story']+2):
- tclfile.write("node\t %i%i" % (building.geometry['number of X bay']+2, i)) # Node label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay']+1)) # X coordinate
- if i <= 2:
- tclfile.write("\t[expr %i*$FirstStory+0*$TypicalStory];" % (i-1)) # Y coordinate
- tclfile.write("\t#Level %i\n" % i) # Comments to explain floor level
+ tclfile.write('# Define nodes for leaning column \n')
+ for i in range(1, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'node\t %i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Node label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ if i <= 2: # noqa: PLR2004
+ tclfile.write(
+ '\t[expr %i*$FirstStory+0*$TypicalStory];' % (i - 1)
+ ) # Y coordinate
+ tclfile.write(
+ '\t#Level %i\n' % i
+ ) # Comments to explain floor level
else:
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2))
- tclfile.write("\t# Level %i\n" % i)
- tclfile.write("\n")
- tclfile.write("puts \"Nodes for leaning column defined\" \n\n")
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ )
+ tclfile.write('\t# Level %i\n' % i)
+ tclfile.write('\n')
+ tclfile.write('puts "Nodes for leaning column defined" \n\n')
# Write extra nodes for leaning column springs
- tclfile.write("# Define extra nodes needed to define leaning column springs \n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write(
+ '# Define extra nodes needed to define leaning column springs \n'
+ )
+ for i in range(2, building.geometry['number of story'] + 2):
# The node below floor level
- tclfile.write("node\t%i%i%i" % (building.geometry['number of X bay']+2, i, 2)) # Node label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay'] + 1)) # X coordinate
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2)) # Y coordinate
- tclfile.write("\t# Node below floor level %i\n" % i)
+ tclfile.write(
+ 'node\t%i%i%i' % (building.geometry['number of X bay'] + 2, i, 2)
+ ) # Node label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ ) # Y coordinate
+ tclfile.write('\t# Node below floor level %i\n' % i)
# If it's top story, node above roof is not needed
# because no leaning column above roof
- if i < building.geometry['number of story']+1:
+ if i < building.geometry['number of story'] + 1:
# The node above floor level
- tclfile.write("node\t%i%i%i" % (building.geometry['number of X bay']+2, i, 4)) # Nodel label
- tclfile.write("\t[expr %i*$BayWidth]" % (building.geometry['number of X bay']+1)) # X coordinate
- tclfile.write("\t[expr 1*$FirstStory+%i*$TypicalStory];" % (i-2)) # Y coordinate
- tclfile.write("\t# Node above floor level %i\n" % i)
+ tclfile.write(
+ 'node\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i, 4)
+ ) # Model label
+ tclfile.write(
+ '\t[expr %i*$BayWidth]'
+ % (building.geometry['number of X bay'] + 1)
+ ) # X coordinate
+ tclfile.write(
+ '\t[expr 1*$FirstStory+%i*$TypicalStory];' % (i - 2)
+ ) # Y coordinate
+ tclfile.write('\t# Node above floor level %i\n' % i)
else:
pass
- tclfile.write("\n")
- tclfile.write("puts \"Extra nodes for leaning column springs defined\"\n")
+ tclfile.write('\n')
+ tclfile.write('puts "Extra nodes for leaning column springs defined"\n')
- def write_fixities(self, building):
- """
- Create a .tcl file to write boundary for the model
+ def write_fixities(self, building): # noqa: PLR6301
+ """Create a .tcl file to write boundary for the model
:param building: a class defined in "building_information.py"
:return: a .tcl file
- """
- with open('DefineFixities2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define the fixity at all column bases \n\n\n")
- tclfile.write("# Defining fixity at column base \n")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("fix\t%i%i%i%i\t1\t1\t1; \n" % (j, 1, 1, 0))
+ """ # noqa: D205, D400
+ with open('DefineFixities2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define the fixity at all column bases \n\n\n'
+ )
+ tclfile.write('# Defining fixity at column base \n')
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('fix\t%i%i%i%i\t1\t1\t1; \n' % (j, 1, 1, 0))
# Leaning column base
- tclfile.write("fix\t%i%i\t1\t1\t0; \n\n" % (building.geometry['number of X bay']+2, 1))
- tclfile.write("puts \"All column base fixities have been defined\"")
-
- def write_floor_constraint(self, building):
- """
- Create a .tcl file to write floor constraint, i.e., equal DOF
+ tclfile.write(
+ 'fix\t%i%i\t1\t1\t0; \n\n'
+ % (building.geometry['number of X bay'] + 2, 1)
+ )
+ tclfile.write('puts "All column base fixities have been defined"')
+
+ def write_floor_constraint(self, building): # noqa: PLR6301
+ """Create a .tcl file to write floor constraint, i.e., equal DOF
:param building: a class defined in "building_information.py"
:return: a .tcl file
- """
+ """ # noqa: D205, D400
# Create a .tcl file to write floor constraint, i.e., equal DOF
- with open('DefineFloorConstraint2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define floor constraint \n")
- tclfile.write("# Nodes at same floor level have identical lateral displacement\n")
- tclfile.write("# Select mid right node of each panel zone as the constrained node\n\n")
- tclfile.write("set\tConstrainDOF\t1; # X-direction\n\n")
+ with open('DefineFloorConstraint2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define floor constraint \n')
+ tclfile.write(
+ '# Nodes at same floor level have identical lateral displacement\n'
+ )
+ tclfile.write(
+ '# Select mid right node of each panel zone as the constrained node\n\n'
+ )
+ tclfile.write('set\tConstrainDOF\t1; # X-direction\n\n')
# Constraint starts from level 2
- for i in range(2, building.geometry['number of story']+2): # i is floor level
- tclfile.write("# Level %i \n" % i)
- for j in range(2, building.geometry['number of X bay']+2): # j is bay number
- tclfile.write("equalDOF\t%i%i11\t%i%i11\t$ConstrainDOF;" % (1, i, j, i))
- tclfile.write("\t# Pier 1 to Pier %i\n" % j)
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is floor level
+ tclfile.write('# Level %i \n' % i)
+ for j in range(
+ 2, building.geometry['number of X bay'] + 2
+ ): # j is bay number
+ tclfile.write(
+ 'equalDOF\t%i%i11\t%i%i11\t$ConstrainDOF;' % (1, i, j, i)
+ )
+ tclfile.write('\t# Pier 1 to Pier %i\n' % j)
# Include the leaning column nodes to floor constraint
- tclfile.write("equalDOF\t%i%i%i%i\t%i%i\t$ConstrainDOF;"
- % (1, i, 1, 1,
- building.geometry['number of X bay']+2, i))
- tclfile.write("\t#Pier 1 to Leaning column\n\n")
- tclfile.write("puts \"Floor constraint defined\"")
-
- def write_beam_hinge_material(self, building, beam_set):
- """
- Create a .tcl file to define all beam plastic hinge materials using Modified IMK material model
+ tclfile.write(
+ 'equalDOF\t%i%i%i%i\t%i%i\t$ConstrainDOF;'
+ % (1, i, 1, 1, building.geometry['number of X bay'] + 2, i)
+ )
+ tclfile.write('\t#Pier 1 to Leaning column\n\n')
+ tclfile.write('puts "Floor constraint defined"')
+
+ def write_beam_hinge_material(self, building, beam_set): # noqa: PLR6301
+ """Create a .tcl file to define all beam plastic hinge materials using Modified IMK material model
:param building: a class defined in "building_information.py"
:param beam_set: a list[x][z] and each element is a class defined in "beam_component.py"
:return: a .tcl file
- """
+ """ # noqa: D205, D400
material_tag = 70001
- with open('DefineBeamHingeMaterials2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define beam hinge material models\n\n\n")
- for i in range(2, building.geometry['number of story']+2): # i is floor level number (no beam on ground)
- for j in range(1, building.geometry['number of X bay']+1): # j is bay number (1 for leftmost bay)
- tclfile.write("# Level%iBay%i\n" % (i, j))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iTag\t%i;\n" % (i, j, material_tag))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iK0\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['K0'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iAs\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['as'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iMy\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['My'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iLambda\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['Lambda'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iThetaP\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['theta_p'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iThetaPc\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['theta_pc'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iResidual\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['residual'])))
- tclfile.write("set\tBeamHingeMaterialLevel%iBay%iThetaU\t%.4f;\n"
- % (i, j, (beam_set[i-2][j-1].plastic_hinge['theta_u'])))
- tclfile.write("CreateIMKMaterial\t$BeamHingeMaterialLevel%iBay%iTag" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iK0" % (i, j))
- tclfile.write("\t$n")
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iAs" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iMy" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iLambda" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iThetaP" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iThetaPc" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iResidual" % (i, j))
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iThetaU;\n\n" % (i, j))
+ with open('DefineBeamHingeMaterials2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define beam hinge material models\n\n\n'
+ )
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is floor level number (no beam on ground)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 1
+ ): # j is bay number (1 for leftmost bay)
+ tclfile.write('# Level%iBay%i\n' % (i, j))
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iTag\t%i;\n'
+ % (i, j, material_tag)
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iK0\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['K0']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iAs\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['as']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iMy\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['My']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iLambda\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['Lambda']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iThetaP\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['theta_p']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iThetaPc\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['theta_pc']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iResidual\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['residual']))
+ )
+ tclfile.write(
+ 'set\tBeamHingeMaterialLevel%iBay%iThetaU\t%.4f;\n'
+ % (i, j, (beam_set[i - 2][j - 1].plastic_hinge['theta_u']))
+ )
+ tclfile.write(
+ 'CreateIMKMaterial\t$BeamHingeMaterialLevel%iBay%iTag'
+ % (i, j)
+ )
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iK0' % (i, j))
+ tclfile.write('\t$n')
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iAs' % (i, j))
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iMy' % (i, j))
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iLambda' % (i, j))
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iThetaP' % (i, j))
+ tclfile.write('\t$BeamHingeMaterialLevel%iBay%iThetaPc' % (i, j))
+ tclfile.write(
+ '\t$BeamHingeMaterialLevel%iBay%iResidual' % (i, j)
+ )
+ tclfile.write(
+ '\t$BeamHingeMaterialLevel%iBay%iThetaU;\n\n' % (i, j)
+ )
material_tag += 1
- tclfile.write("\n\nputs \"Beam hinge materials defined\"")
+ tclfile.write('\n\nputs "Beam hinge materials defined"')
- def write_column_hinge_material(self, building, column_set):
- """
- Create a .tcl file to define all column plastic hinge materials using modified IMK material model
+ def write_column_hinge_material(self, building, column_set): # noqa: PLR6301
+ """Create a .tcl file to define all column plastic hinge materials using modified IMK material model
:param building: a class defined in "building_information.py"
:param column_set: a list[x][y] and each element is a class defined in "column_component.py" file
:return: a .tcl file
- """
+ """ # noqa: D205, D400
material_tag = 60001
- with open('DefineColumnHingeMaterials2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define column hinge material models\n\n\n")
- for i in range(1, building.geometry['number of story']+1): # i is story number (from 1)
- for j in range(1, building.geometry['number of X bay']+2): # j is pier number (1 for leftmost pier)
- tclfile.write("# Story%iPier%i\n" % (i, j))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iTag\t%i;\n" % (i, j, material_tag))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iK0\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['K0'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iAs\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['as'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iMy\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['My'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iLambda\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['Lambda'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iThetaP\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['theta_p'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iThetaPc\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['theta_pc'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iResidual\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['residual'])))
- tclfile.write("set\tColumnHingeMaterialStory%iPier%iThetaU\t%.4f;\n"
- % (i, j, (column_set[i-1][j-1].plastic_hinge['theta_u'])))
- tclfile.write("CreateIMKMaterial\t$ColumnHingeMaterialStory%iPier%iTag" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iK0" % (i, j))
- tclfile.write("\t$n")
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iAs" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iMy" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iLambda" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iThetaP" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iThetaPc" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iResidual" % (i, j))
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iThetaU;\n\n" % (i, j))
+ with open('DefineColumnHingeMaterials2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define column hinge material models\n\n\n'
+ )
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number (from 1)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is pier number (1 for leftmost pier)
+ tclfile.write('# Story%iPier%i\n' % (i, j))
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iTag\t%i;\n'
+ % (i, j, material_tag)
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iK0\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['K0']))
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iAs\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['as']))
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iMy\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['My']))
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iLambda\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['Lambda']))
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iThetaP\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['theta_p']))
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iThetaPc\t%.4f;\n'
+ % (
+ i,
+ j,
+ (column_set[i - 1][j - 1].plastic_hinge['theta_pc']),
+ )
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iResidual\t%.4f;\n'
+ % (
+ i,
+ j,
+ (column_set[i - 1][j - 1].plastic_hinge['residual']),
+ )
+ )
+ tclfile.write(
+ 'set\tColumnHingeMaterialStory%iPier%iThetaU\t%.4f;\n'
+ % (i, j, (column_set[i - 1][j - 1].plastic_hinge['theta_u']))
+ )
+ tclfile.write(
+ 'CreateIMKMaterial\t$ColumnHingeMaterialStory%iPier%iTag'
+ % (i, j)
+ )
+ tclfile.write('\t$ColumnHingeMaterialStory%iPier%iK0' % (i, j))
+ tclfile.write('\t$n')
+ tclfile.write('\t$ColumnHingeMaterialStory%iPier%iAs' % (i, j))
+ tclfile.write('\t$ColumnHingeMaterialStory%iPier%iMy' % (i, j))
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iLambda' % (i, j)
+ )
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iThetaP' % (i, j)
+ )
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iThetaPc' % (i, j)
+ )
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iResidual' % (i, j)
+ )
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iThetaU;\n\n' % (i, j)
+ )
material_tag += 1
- tclfile.write("\n\nputs \"Column hinge materials defined\"")
+ tclfile.write('\n\nputs "Column hinge materials defined"')
- def write_beam(self, building):
- """
- Create a .tcl file to define the beam element
+ def write_beam(self, building): # noqa: PLR6301
+ """Create a .tcl file to define the beam element
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefineBeams2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define beam elements \n\n\n")
- tclfile.write("# Define beam section sizes \n")
- for i in range(2, building.geometry['number of story']+2): # i is the floor level (from 2)
- tclfile.write("set\tBeamLevel%i\t[SectionProperty %s];\n" % (i, building.member_size['beam'][i-2]))
- tclfile.write("\n\n# Define beams \n")
- for i in range(2, building.geometry['number of story']+2): # i is the floor level (from 2)
- tclfile.write("# Level%i\n" % i)
+ """ # noqa: D205, D400
+ with open('DefineBeams2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define beam elements \n\n\n')
+ tclfile.write('# Define beam section sizes \n')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is the floor level (from 2)
+ tclfile.write(
+ 'set\tBeamLevel%i\t[SectionProperty %s];\n'
+ % (i, building.member_size['beam'][i - 2])
+ )
+ tclfile.write('\n\n# Define beams \n')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is the floor level (from 2)
+ tclfile.write('# Level%i\n' % i)
# Beam elements in frame
- for j in range(1, building.geometry['number of X bay']+1): # j is the bay number
- tclfile.write("element\telasticBeamColumn") # elastic beam-column command
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1)) # Beam element tag
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 5)) # Starting node
- tclfile.write("\t%i%i%i%i" % (j+1, i, 1, 3)) # Ending node
- tclfile.write("\t[lindex $BeamLevel%i 2]" % i) # Area of beam section
- tclfile.write("\t$Es") # Young's modulus of steel material
- tclfile.write("\t[expr ($n+1.0)/$n*[lindex $BeamLevel%i 6]]" % i) # Modified moment of inertia
- tclfile.write("\t$LinearTransf; \n") # Geometric transformation
+ for j in range(
+ 1, building.geometry['number of X bay'] + 1
+ ): # j is the bay number
+ tclfile.write(
+ 'element\telasticBeamColumn'
+ ) # elastic beam-column command
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1)
+ ) # Beam element tag
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 5)) # Starting node
+ tclfile.write('\t%i%i%i%i' % (j + 1, i, 1, 3)) # Ending node
+ tclfile.write(
+ '\t[lindex $BeamLevel%i 2]' % i
+ ) # Area of beam section
+ tclfile.write('\t$Es') # Young's modulus of steel material
+ tclfile.write(
+ '\t[expr ($n+1.0)/$n*[lindex $BeamLevel%i 6]]' % i
+ ) # Modified moment of inertia
+ tclfile.write('\t$LinearTransf; \n') # Geometric transformation
# Truss elements connecting frame and leaning column
- tclfile.write("element\ttruss") # elastic beam-column command
- tclfile.write("\t%i%i%i%i%i%i" % (2, building.geometry['number of X bay']+1, i, 1,
- building.geometry['number of X bay']+2, i))
- tclfile.write("\t%i%i%i%i" % (building.geometry['number of X bay']+1, i, 1, 1)) # Start node in frame
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # Ending node in leaning column
- tclfile.write("\t$AreaRigid\t$TrussMatID; \n") # Large area and truss element material
- tclfile.write("\n")
- tclfile.write("puts \"Beams defined\"")
-
- def write_column(self, building):
- """
- Create a .tcl file to define column element
+ tclfile.write('element\ttruss') # elastic beam-column command
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 2,
+ building.geometry['number of X bay'] + 1,
+ i,
+ 1,
+ building.geometry['number of X bay'] + 2,
+ i,
+ )
+ )
+ tclfile.write(
+ '\t%i%i%i%i'
+ % (building.geometry['number of X bay'] + 1, i, 1, 1)
+ ) # Start node in frame
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Ending node in leaning column
+ tclfile.write(
+ '\t$AreaRigid\t$TrussMatID; \n'
+ ) # Large area and truss element material
+ tclfile.write('\n')
+ tclfile.write('puts "Beams defined"')
+
+ def write_column(self, building): # noqa: PLR6301
+ """Create a .tcl file to define column element
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefineColumns2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define columns \n\n\n")
+ """ # noqa: D205, D400
+ with open('DefineColumns2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define columns \n\n\n')
# Define exterior column sizes
- tclfile.write("# Define exterior column section sizes \n")
- for i in range(1, building.geometry['number of story']+1): # i is story number
- tclfile.write("set\tExteriorColumnStory%i\t[SectionProperty %s];\n"
- % (i, building.member_size['exterior column'][i-1]))
- tclfile.write("\n\n")
+ tclfile.write('# Define exterior column section sizes \n')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number
+ tclfile.write(
+ 'set\tExteriorColumnStory%i\t[SectionProperty %s];\n'
+ % (i, building.member_size['exterior column'][i - 1])
+ )
+ tclfile.write('\n\n')
# Define interior column sizes
- tclfile.write("# Define interior column section sizes \n")
- for i in range(1, building.geometry['number of story']+1): # i is story number
- tclfile.write("set\tInteriorColumnStory%i\t[SectionProperty %s];\n"
- % (i, building.member_size['interior column'][i-1]))
-
- tclfile.write("\n\n# Define columns\n")
- for i in range(1, building.geometry['number of story']+1): # i is story number
- tclfile.write("# Story %i \n" % i)
+ tclfile.write('# Define interior column section sizes \n')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number
+ tclfile.write(
+ 'set\tInteriorColumnStory%i\t[SectionProperty %s];\n'
+ % (i, building.member_size['interior column'][i - 1])
+ )
+
+ tclfile.write('\n\n# Define columns\n')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number
+ tclfile.write('# Story %i \n' % i)
# Columns in frame
- for j in range(1, building.geometry['number of X bay']+2): # j is bay number
- tclfile.write("element\telasticBeamColumn") # element command
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, j, i, 1, j, i+1, 1)) # element tag
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 4)) # starting node
- tclfile.write("\t%i%i%i%i" % (j, i+1, 1, 6)) # ending node
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is bay number
+ tclfile.write('element\telasticBeamColumn') # element command
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (3, j, i, 1, j, i + 1, 1)
+ ) # element tag
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 4)) # starting node
+ tclfile.write('\t%i%i%i%i' % (j, i + 1, 1, 6)) # ending node
# Determine whether the column is interior or exterior column
# this would affect the column section size
- if 1 < j < building.geometry['number of X bay']+1:
- tclfile.write("\t[lindex $InteriorColumnStory%i 2]" % i) # Area of section
- tclfile.write("\t$Es") # Young's modulus of steel material
- tclfile.write("\t[expr ($n+1.0)/$n*[lindex $InteriorColumnStory%i 6]]" % i) # Modified inertia
+ if 1 < j < building.geometry['number of X bay'] + 1:
+ tclfile.write(
+ '\t[lindex $InteriorColumnStory%i 2]' % i
+ ) # Area of section
+ tclfile.write('\t$Es') # Young's modulus of steel material
+ tclfile.write(
+ '\t[expr ($n+1.0)/$n*[lindex $InteriorColumnStory%i 6]]'
+ % i
+ ) # Modified inertia
else:
- tclfile.write("\t[lindex $ExteriorColumnStory%i 2]" % i) # Area of section
- tclfile.write("\t$Es") # Young's modulus of steel material
- tclfile.write("\t[expr ($n+1.0)/$n*[lindex $ExteriorColumnStory%i 6]]" % i) # Modified inertia
- tclfile.write("\t$PDeltaTransf; \n") # Geometric transformation
+ tclfile.write(
+ '\t[lindex $ExteriorColumnStory%i 2]' % i
+ ) # Area of section
+ tclfile.write('\t$Es') # Young's modulus of steel material
+ tclfile.write(
+ '\t[expr ($n+1.0)/$n*[lindex $ExteriorColumnStory%i 6]]'
+ % i
+ ) # Modified inertia
+ tclfile.write('\t$PDeltaTransf; \n') # Geometric transformation
# Leaning column elements
- tclfile.write("element\telasticBeamColumn") # element command
+ tclfile.write('element\telasticBeamColumn') # element command
if i == 1:
- tclfile.write("\t%i%i%i%i%i%i" % (3, building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i+1, 2))
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 3,
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'] + 2,
+ i + 1,
+ 2,
+ )
+ )
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ )
+ tclfile.write(
+ '\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i + 1, 2)
+ )
else:
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, building.geometry['number of X bay']+2, i, 4,
- building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i, 4))
- tclfile.write("\t%i%i%i" % (building.geometry['number of X bay']+2, i+1, 2))
- tclfile.write("\t$AreaRigid\t$Es\t$IRigid\t$PDeltaTransf; \n\n")
- tclfile.write("puts \"Columns defined\"")
-
- def write_beam_hinge(self, building):
- """
- Create a .tcl file to define beam hinge element (rotational spring)
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i'
+ % (
+ 3,
+ building.geometry['number of X bay'] + 2,
+ i,
+ 4,
+ building.geometry['number of X bay'] + 2,
+ i + 1,
+ 2,
+ )
+ )
+ tclfile.write(
+ '\t%i%i%i' % (building.geometry['number of X bay'] + 2, i, 4)
+ )
+ tclfile.write(
+ '\t%i%i%i'
+ % (building.geometry['number of X bay'] + 2, i + 1, 2)
+ )
+ tclfile.write('\t$AreaRigid\t$Es\t$IRigid\t$PDeltaTransf; \n\n')
+ tclfile.write('puts "Columns defined"')
+
+ def write_beam_hinge(self, building): # noqa: PLR6301
+ """Create a .tcl file to define beam hinge element (rotational spring)
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefineBeamHinges2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define beam hinges \n\n\n")
-
- tclfile.write("# Define beam hinges using rotational spring with modified IMK material\n")
- for i in range(2, building.geometry['number of story']+2): # i is the floor level (from 2)
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay'] + 1): # j is the bay number
- tclfile.write("rotBeamSpring\t%i%i%i%i%i%i%i" % (7, j, i, 1, 1, 1, 5)) # element ID
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 1)) # node on mid right of panel zone
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 5)) # node on left end of beam element
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iTag" % (i, j)) # associated modified IMK material
- tclfile.write("\t$StiffMatID;\n") # stiff material ID
-
- tclfile.write("rotBeamSpring\t%i%i%i%i%i%i%i" % (7, j+1, i, 0, 9, 1, 3)) # element ID
- tclfile.write("\t%i%i%i%i" % (j+1, i, 0, 9)) # node on mid left of panel zone
- tclfile.write("\t%i%i%i%i" % (j+1, i, 1, 3)) # node on right end of beam element
- tclfile.write("\t$BeamHingeMaterialLevel%iBay%iTag" % (i, j)) # associated modified IMK material
- tclfile.write("\t$StiffMatID;\n\n") # stiff material ID
-
- tclfile.write("puts \"Beam hinges defined\"")
-
- def write_column_hinge(self, building):
- """
- Create a .tcl file to define column hinge element (rotational spring)
+ """ # noqa: D205, D400
+ with open('DefineBeamHinges2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define beam hinges \n\n\n')
+
+ tclfile.write(
+ '# Define beam hinges using rotational spring with modified IMK material\n'
+ )
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is the floor level (from 2)
+ tclfile.write('# Level%i\n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 1
+ ): # j is the bay number
+ tclfile.write(
+ 'rotBeamSpring\t%i%i%i%i%i%i%i' % (7, j, i, 1, 1, 1, 5)
+ ) # element ID
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 1, 1)
+ ) # node on mid right of panel zone
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 1, 5)
+ ) # node on left end of beam element
+ tclfile.write(
+ '\t$BeamHingeMaterialLevel%iBay%iTag' % (i, j)
+ ) # associated modified IMK material
+ tclfile.write('\t$StiffMatID;\n') # stiff material ID
+
+ tclfile.write(
+ 'rotBeamSpring\t%i%i%i%i%i%i%i' % (7, j + 1, i, 0, 9, 1, 3)
+ ) # element ID
+ tclfile.write(
+ '\t%i%i%i%i' % (j + 1, i, 0, 9)
+ ) # node on mid left of panel zone
+ tclfile.write(
+ '\t%i%i%i%i' % (j + 1, i, 1, 3)
+ ) # node on right end of beam element
+ tclfile.write(
+ '\t$BeamHingeMaterialLevel%iBay%iTag' % (i, j)
+ ) # associated modified IMK material
+ tclfile.write('\t$StiffMatID;\n\n') # stiff material ID
+
+ tclfile.write('puts "Beam hinges defined"')
+
+ def write_column_hinge(self, building): # noqa: PLR6301
+ """Create a .tcl file to define column hinge element (rotational spring)
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefineColumnHinges2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file wil be used to define column hinges\n\n\n")
- for i in range(1, building.geometry['number of story']+1): # i refers the story number
- tclfile.write("# Column hinges at bottom of story%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j refers the column number
- tclfile.write("rotColumnSpring\t%i%i%i%i%i%i%i" % (6, j, i, 1, 0, 1, 4)) # element ID
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 0)) # Node on the ground
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 4)) # Node at the bottom of column element
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iTag" % (i, j)) # associated modified IMK
- tclfile.write("\t$StiffMatID;\n") # stiff material
- tclfile.write("\n")
- tclfile.write("# Column hinges at top of story%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j refers the column number
- tclfile.write("rotColumnSpring\t%i%i%i%i%i%i%i" % (6, j, i+1, 1, 2, 1, 6)) # element ID
- tclfile.write("\t%i%i%i%i" % (j, i+1, 1, 2)) # Node on the ground
- tclfile.write("\t%i%i%i%i" % (j, i+1, 1, 6)) # Node at the bottom of column element
- tclfile.write("\t$ColumnHingeMaterialStory%iPier%iTag" % (i, j)) # associated modified IMK
- tclfile.write("\t$StiffMatID;\n") # stiff material
- tclfile.write("\n")
- tclfile.write("# Rotational springs for leaning column\n")
- for i in range(2, building.geometry['number of story']+2): # i refers to the floor level number
+ """ # noqa: D205, D400
+ with open('DefineColumnHinges2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define column hinges\n\n\n')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i refers the story number
+ tclfile.write('# Column hinges at bottom of story%i\n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j refers the column number
+ tclfile.write(
+ 'rotColumnSpring\t%i%i%i%i%i%i%i' % (6, j, i, 1, 0, 1, 4)
+ ) # element ID
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 0)) # Node on the ground
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 1, 4)
+ ) # Node at the bottom of column element
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iTag' % (i, j)
+ ) # associated modified IMK
+ tclfile.write('\t$StiffMatID;\n') # stiff material
+ tclfile.write('\n')
+ tclfile.write('# Column hinges at top of story%i\n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j refers the column number
+ tclfile.write(
+ 'rotColumnSpring\t%i%i%i%i%i%i%i' % (6, j, i + 1, 1, 2, 1, 6)
+ ) # element ID
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i + 1, 1, 2)
+ ) # Node on the ground
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i + 1, 1, 6)
+ ) # Node at the bottom of column element
+ tclfile.write(
+ '\t$ColumnHingeMaterialStory%iPier%iTag' % (i, j)
+ ) # associated modified IMK
+ tclfile.write('\t$StiffMatID;\n') # stiff material
+ tclfile.write('\n')
+ tclfile.write('# Rotational springs for leaning column\n')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i refers to the floor level number
# Write the springs below floor level i
- tclfile.write("rotLeaningCol") # procedure command to create rotational spring of leaning column
- tclfile.write("\t%i%i%i%i%i%i" % (6, building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay']+2, i, 2)) # spring ID
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # node at floor level
- tclfile.write("\t%i%i%i\t$StiffMatID;"
- % (building.geometry['number of X bay']+2, i, 2)) # node below floor level
- tclfile.write("\t# Spring below floor level %i \n" % i) # comment to explain the location of the sprubg
+ tclfile.write(
+ 'rotLeaningCol'
+ ) # procedure command to create rotational spring of leaning column
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 6,
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'] + 2,
+ i,
+ 2,
+ )
+ ) # spring ID
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # node at floor level
+ tclfile.write(
+ '\t%i%i%i\t$StiffMatID;'
+ % (building.geometry['number of X bay'] + 2, i, 2)
+ ) # node below floor level
+ tclfile.write(
+ '\t# Spring below floor level %i \n' % i
+ ) # comment to explain the location of the sprubg
# Write the springs above floor level i
# If it is roof, no springs above the roof
- if i < building.geometry['number of story']+1:
- tclfile.write("rotLeaningCol") # rotLeaningCol is user-defined process in OpenSees
- tclfile.write("\t%i%i%i%i%i%i" % (6, building.geometry['number of X bay']+2, i,
- building.geometry['number of X bay'], i, 4)) # Spring tag
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, i)) # Node at floor level
+ if i < building.geometry['number of story'] + 1:
+ tclfile.write(
+ 'rotLeaningCol'
+ ) # rotLeaningCol is user-defined process in OpenSees
+ tclfile.write(
+ '\t%i%i%i%i%i%i'
+ % (
+ 6,
+ building.geometry['number of X bay'] + 2,
+ i,
+ building.geometry['number of X bay'],
+ i,
+ 4,
+ )
+ ) # Spring tag
+ tclfile.write(
+ '\t%i%i' % (building.geometry['number of X bay'] + 2, i)
+ ) # Node at floor level
# Node above floor level
- tclfile.write("\t%i%i%i\t$StiffMatID;" % (building.geometry['number of X bay']+2, i, 4))
- tclfile.write("\t# Spring above floor level %i \n" % i)
+ tclfile.write(
+ '\t%i%i%i\t$StiffMatID;'
+ % (building.geometry['number of X bay'] + 2, i, 4)
+ )
+ tclfile.write('\t# Spring above floor level %i \n' % i)
else:
pass
- tclfile.write("\n")
- tclfile.write("puts \"Column hinge defined\"")
+ tclfile.write('\n')
+ tclfile.write('puts "Column hinge defined"')
- def write_mass(self, building):
- """
- Create a .tcl file which defines the mass of each floor at each node
+ def write_mass(self, building): # noqa: PLR6301
+ """Create a .tcl file which defines the mass of each floor at each node
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefineMasses2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define all nodal masses \n\n")
+ """ # noqa: D205, D400
+ with open('DefineMasses2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define all nodal masses \n\n')
# Write values for floor weights, tributary mass ratio, and nodal mass
- tclfile.write("# Define floor weights and each nodal mass \n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tFloor%iWeight\t%.2f; \n" % (i, building.gravity_loads['floor weight'][i-2]))
- tclfile.write("set\tFrameTributaryMassRatio\t%s; \n" % (1.0 / building.geometry['number of X LFRS']))
- tclfile.write("set\tTotalNodesPerFloor\t%i; \n" % (building.geometry['number of X bay'] + 2))
+ tclfile.write('# Define floor weights and each nodal mass \n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tFloor%iWeight\t%.2f; \n'
+ % (i, building.gravity_loads['floor weight'][i - 2])
+ )
+ tclfile.write(
+ 'set\tFrameTributaryMassRatio\t%s; \n'
+ % (1.0 / building.geometry['number of X LFRS'])
+ )
+ tclfile.write(
+ 'set\tTotalNodesPerFloor\t%i; \n'
+ % (building.geometry['number of X bay'] + 2)
+ )
for i in range(2, building.geometry['number of story'] + 2):
- tclfile.write("set\tNodalMassFloor%i" % i)
- tclfile.write("\t[expr $Floor%iWeight*$FrameTributaryMassRatio/$TotalNodesPerFloor/$g]; \n" % i)
- tclfile.write("\n\n")
+ tclfile.write('set\tNodalMassFloor%i' % i)
+ tclfile.write(
+ '\t[expr $Floor%iWeight*$FrameTributaryMassRatio/$TotalNodesPerFloor/$g]; \n'
+ % i
+ )
+ tclfile.write('\n\n')
# Write nodal masses for each floor level
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level%i \n" % i)
- for j in range(1, building.geometry['number of X bay']+3):
- if j < building.geometry['number of X bay']+2:
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level%i \n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 3):
+ if j < building.geometry['number of X bay'] + 2:
# Write mass for nodes in structural columns
- tclfile.write("mass\t%i%i%i%i" % (j, i, 1, 1)) # Nodal mass command and node tag
+ tclfile.write(
+ 'mass\t%i%i%i%i' % (j, i, 1, 1)
+ ) # Nodal mass command and node tag
else:
# Write mass for nodes in leaning column
- tclfile.write("mass\t%i%i" % (j, i)) # Nodal mass command (leaning column)
- tclfile.write("\t$NodalMassFloor%i" % i) # Mass along X direction
- tclfile.write("\t$Negligible\t$Negligible \n") # Mass along Y and RotZ doesn't matter
- tclfile.write("\n")
- tclfile.write("puts \"Nodal mass defined\"")
-
- def write_panel_zone_elements(self, building):
- """
- Create a .tcl file that defines the elements in panel zone
+ tclfile.write(
+ 'mass\t%i%i' % (j, i)
+ ) # Nodal mass command (leaning column)
+ tclfile.write(
+ '\t$NodalMassFloor%i' % i
+ ) # Mass along X direction
+ tclfile.write(
+ '\t$Negligible\t$Negligible \n'
+ ) # Mass along Y and RotZ doesn't matter
+ tclfile.write('\n')
+ tclfile.write('puts "Nodal mass defined"')
+
+ def write_panel_zone_elements(self, building): # noqa: PLR6301
+ """Create a .tcl file that defines the elements in panel zone
:param building: a class defined in "building_information.py" file
:return: a .tcl file
- """
- with open('DefinePanelZoneElements.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define elements in panel zones \n\n")
- tclfile.write("# Procedure used to produce panel zone elements:\n")
- for i in range(2, building.geometry['number of story']+2): # i refers to the floor level number
- tclfile.write("# Level%i \n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j refers to the column number
- tclfile.write("elemPanelZone2D\t%i%i%i%i%i%i" % (8, 0, 0, j, i, 1)) # panel zone starting element tag
- tclfile.write("\t%i%i%i%i" % (j, i, 0, 1)) # first node in panel zone (top left corner)
- tclfile.write("\t$Es\t$PDeltaTransf\t$LinearTransf; \n")
- tclfile.write("\n")
- tclfile.write("puts \"Panel zone elements defined\"")
-
- def write_panel_zone_springs(self, building, column_set, beam_set, connection_set):
+ """ # noqa: D205, D400
+ with open('DefinePanelZoneElements.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define elements in panel zones \n\n'
+ )
+ tclfile.write('# Procedure used to produce panel zone elements:\n')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i refers to the floor level number
+ tclfile.write('# Level%i \n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j refers to the column number
+ tclfile.write(
+ 'elemPanelZone2D\t%i%i%i%i%i%i' % (8, 0, 0, j, i, 1)
+ ) # panel zone starting element tag
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 0, 1)
+ ) # first node in panel zone (top left corner)
+ tclfile.write('\t$Es\t$PDeltaTransf\t$LinearTransf; \n')
+ tclfile.write('\n')
+ tclfile.write('puts "Panel zone elements defined"')
+
+ def write_panel_zone_springs( # noqa: D102, PLR6301
+ self,
+ building,
+ column_set,
+ beam_set,
+ connection_set,
+ ):
# Create a .tcl file that defines the springs involved in panel zones
- with open('DefinePanelZoneSprings.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define springs in panel zone \n\n")
- tclfile.write("# Procedure command:\n")
- tclfile.write("# rotPanelZone2D\teleID\tnodeR\tnodeC\tE\tFy\tdc\tbf_c\ttf_c\ttp\tdb\tRy\tas\n\n")
- for i in range(2, building.geometry['number of story']+2): # i refers to the floor level number
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # j refers to the column number
- tclfile.write("rotPanelZone2D\t%i%i%i%i%i%i" % (9, j, i, 1, 0, 0)) # panel zone spring tag
- tclfile.write("\t%i%i%i%i" % (j, i, 0, 3)) # node tag at top right corner of panel zone
- tclfile.write("\t%i%i%i%i" % (j, i, 0, 4)) # node tag at top right corner of panel zone
- tclfile.write("\t$Es\t$Fy") # Young's modulus and Yielding stress
- tclfile.write("\t%.2f" % column_set[i-2][j-1].section['d']) # column depth
- tclfile.write("\t%.2f" % column_set[i-2][j-1].section['bf']) # column flange width
- tclfile.write("\t%.2f" % column_set[i-2][j-1].section['tf']) # column flange thickness
+ with open('DefinePanelZoneSprings.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define springs in panel zone \n\n'
+ )
+ tclfile.write('# Procedure command:\n')
+ tclfile.write(
+ '# rotPanelZone2D\teleID\tnodeR\tnodeC\tE\tFy\tdc\tbf_c\ttf_c\ttp\tdb\tRy\tas\n\n'
+ )
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i refers to the floor level number
+ tclfile.write('# Level%i\n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j refers to the column number
+ tclfile.write(
+ 'rotPanelZone2D\t%i%i%i%i%i%i' % (9, j, i, 1, 0, 0)
+ ) # panel zone spring tag
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 0, 3)
+ ) # node tag at top right corner of panel zone
+ tclfile.write(
+ '\t%i%i%i%i' % (j, i, 0, 4)
+ ) # node tag at top right corner of panel zone
+ tclfile.write(
+ '\t$Es\t$Fy'
+ ) # Young's modulus and Yielding stress
+ tclfile.write(
+ '\t%.2f' % column_set[i - 2][j - 1].section['d'] # noqa: UP031
+ ) # column depth
+ tclfile.write(
+ '\t%.2f' % column_set[i - 2][j - 1].section['bf'] # noqa: UP031
+ ) # column flange width
+ tclfile.write(
+ '\t%.2f' % column_set[i - 2][j - 1].section['tf'] # noqa: UP031
+ ) # column flange thickness
# Use actual panel zone thickness rather than the assumed column web thickness
- tclfile.write("\t%.2f" % (column_set[i-2][j-1].section['tw']
- + connection_set[i-2][j-1].doubler_plate_thickness)) # panel zone thickness
- if j != building.geometry['number of X bay']+1:
+ tclfile.write(
+ '\t%.2f'
+ % (
+ column_set[i - 2][j - 1].section['tw']
+ + connection_set[i - 2][j - 1].doubler_plate_thickness
+ )
+ ) # panel zone thickness
+ if j != building.geometry['number of X bay'] + 1:
# note that j is the column number.
# the number of beam at each floor level is one less than that of columns
- tclfile.write("\t%.2f" % beam_set[i-2][j-1].section['d']) # beam depth
+ tclfile.write(
+ '\t%.2f' % beam_set[i - 2][j - 1].section['d'] # noqa: UP031
+ ) # beam depth
else:
- tclfile.write("\t%.2f" % beam_set[i-2][-1].section['d']) # beam depth
- tclfile.write("\t1.1\t0.03; \n") # Ry value and as value (both of them are constant)
- tclfile.write("\n")
- tclfile.write("puts \"Panel zone springs defined\"")
-
- def write_gravity_load(self, building):
+ tclfile.write(
+ '\t%.2f' % beam_set[i - 2][-1].section['d'] # noqa: UP031
+ ) # beam depth
+ tclfile.write(
+ '\t1.1\t0.03; \n'
+ ) # Ry value and as value (both of them are constant)
+ tclfile.write('\n')
+ tclfile.write('puts "Panel zone springs defined"')
+
+ def write_gravity_load(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write gravity load: 1.00 DL + 0.25 LL
- with open('DefineGravityLoads2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define expected gravity loads\n\n\n")
+ with open('DefineGravityLoads2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define expected gravity loads\n\n\n')
# Assign the beam dead load values
- tclfile.write("# Assign uniform beam dead load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam dead load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam dead load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamDeadLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam dead load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the beam live load values
- tclfile.write("# Assign uniform beam live load values (kip/inch)\n")
- for i in range(2, building.geometry['number of story']+2):
+ tclfile.write('# Assign uniform beam live load values (kip/inch)\n')
+ for i in range(2, building.geometry['number of story'] + 2):
# Be cautious: convert the unit from lb/ft to kip/inch
- tclfile.write("set\tBeamLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['beam live load'][i-2]*0.001/12))
- tclfile.write("\n")
+ tclfile.write(
+ 'set\tBeamLiveLoadFloor%i\t%f; \n'
+ % (
+ i,
+ building.gravity_loads['beam live load'][i - 2] * 0.001 / 12,
+ )
+ )
+ tclfile.write('\n')
# Assign the point dead load acting on leaning column
- tclfile.write("# Assign point dead load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnDeadLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column dead load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point dead load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnDeadLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column dead load'][i - 2])
+ )
+ tclfile.write('\n')
# Assign the point live load acting on leaning column
- tclfile.write("# Assign point live load values on leaning column: kip\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("set\tLeaningColumnLiveLoadFloor%i\t%f; \n"
- % (i, building.gravity_loads['leaning column live load'][i-2]))
- tclfile.write("\n")
+ tclfile.write('# Assign point live load values on leaning column: kip\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'set\tLeaningColumnLiveLoadFloor%i\t%f; \n'
+ % (i, building.gravity_loads['leaning column live load'][i - 2])
+ )
+ tclfile.write('\n')
# Define the load pattern in OpenSees
- tclfile.write("# Define uniform loads on beams\n")
- tclfile.write("# Load combinations:\n")
- tclfile.write("# 104 Expected gravity loads: 1.05 DL + 0.25 LL\n")
- tclfile.write("pattern\tPlain\t104\tConstant\t{\n\n")
+ tclfile.write('# Define uniform loads on beams\n')
+ tclfile.write('# Load combinations:\n')
+ tclfile.write('# 104 Expected gravity loads: 1.05 DL + 0.25 LL\n')
+ tclfile.write('pattern\tPlain\t104\tConstant\t{\n\n')
# Dead loads on beam
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("eleLoad\t-ele")
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j + 1, i, 1)) # Beam element tag
- tclfile.write("\t-type\t-beamUniform")
- tclfile.write("\t[expr -1.05*$BeamDeadLoadFloor%i - 0.25*$BeamLiveLoadFloor%i];\n"
- % (i, i))
- tclfile.write("\n")
- tclfile.write("\n")
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write('# Level%i\n' % i)
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('eleLoad\t-ele')
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1)
+ ) # Beam element tag
+ tclfile.write('\t-type\t-beamUniform')
+ tclfile.write(
+ '\t[expr -1.05*$BeamDeadLoadFloor%i - 0.25*$BeamLiveLoadFloor%i];\n'
+ % (i, i)
+ )
+ tclfile.write('\n')
+ tclfile.write('\n')
# Gravity load on leaning column
- tclfile.write("# Define point loads on leaning column\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("load\t%i%i\t0\t[expr -1*$LeaningColumnDeadLoadFloor%i - "
- "0.25*$LeaningColumnLiveLoadFloor%i]\t0;\n"
- % (building.geometry['number of X bay']+2, i, i, i))
- tclfile.write("\n}\n")
+ tclfile.write('# Define point loads on leaning column\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'load\t%i%i\t0\t[expr -1*$LeaningColumnDeadLoadFloor%i - '
+ '0.25*$LeaningColumnLiveLoadFloor%i]\t0;\n'
+ % (building.geometry['number of X bay'] + 2, i, i, i)
+ )
+ tclfile.write('\n}\n')
- tclfile.write("puts \"Expected gravity loads defined\"")
+ tclfile.write('puts "Expected gravity loads defined"')
- def write_pushover_loading(self, building):
+ def write_pushover_loading(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write lateral pushover loading
- with open('DefinePushoverLoading2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define pushover loading\n\n\n")
- tclfile.write("pattern\tPlain\t200\tLinear\t{\n\n")
- tclfile.write("# Pushover pattern\n")
- for i in range(2, building.geometry['number of story']+2): # Floor level
- tclfile.write("# Level%i\n" % i)
- for j in range(1, building.geometry['number of X bay']+2): # Column number
- load = building.seismic_force_for_strength['lateral story force'][i-2]\
- / np.sum(building.seismic_force_for_strength['lateral story force'])
- tclfile.write("load\t%i%i%i%i\t%.3f\t0\t0;\n" % (j, i, 1, 1, load))
- tclfile.write("\n")
- tclfile.write("}")
-
- def write_base_reaction_recorder(self, building):
+ with open('DefinePushoverLoading2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define pushover loading\n\n\n')
+ tclfile.write('pattern\tPlain\t200\tLinear\t{\n\n')
+ tclfile.write('# Pushover pattern\n')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # Floor level
+ tclfile.write('# Level%i\n' % i)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # Column number
+ load = building.seismic_force_for_strength[
+ 'lateral story force'
+ ][i - 2] / np.sum(
+ building.seismic_force_for_strength['lateral story force']
+ )
+ tclfile.write(
+ 'load\t%i%i%i%i\t%.3f\t0\t0;\n' % (j, i, 1, 1, load)
+ )
+ tclfile.write('\n')
+ tclfile.write('}')
+
+ def write_base_reaction_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write the recorders for base reactions
- with open('DefineBaseReactionRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define base node reaction recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/BaseReactions\n\n")
+ with open('DefineBaseReactionRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define base node reaction recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/BaseReactions\n\n')
# Record vertical reactions
- tclfile.write("# Vertical reactions\n")
- tclfile.write("recorder\tNode\t-file\tVerticalReactions.out\t-time\t-node")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i" % (j, 1, 1, 0))
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, 1))
- tclfile.write("\t-dof\t2\treaction;\n\n")
+ tclfile.write('# Vertical reactions\n')
+ tclfile.write(
+ 'recorder\tNode\t-file\tVerticalReactions.out\t-time\t-node'
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i' % (j, 1, 1, 0))
+ tclfile.write('\t%i%i' % (building.geometry['number of X bay'] + 2, 1))
+ tclfile.write('\t-dof\t2\treaction;\n\n')
# Record horizontal reactions
- tclfile.write("# X-Direction reactions\n")
- tclfile.write("recorder\tNode\t-file\tXReactions.out\t-time\t-node")
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i" % (j, 1, 1, 0))
- tclfile.write("\t%i%i" % (building.geometry['number of X bay']+2, 1))
- tclfile.write("\t-dof\t1\treaction;\n\n")
-
- def write_beam_hinge_recorder(self, building):
+ tclfile.write('# X-Direction reactions\n')
+ tclfile.write('recorder\tNode\t-file\tXReactions.out\t-time\t-node')
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i' % (j, 1, 1, 0))
+ tclfile.write('\t%i%i' % (building.geometry['number of X bay'] + 2, 1))
+ tclfile.write('\t-dof\t1\treaction;\n\n')
+
+ def write_beam_hinge_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to record beam hinge forces and deformation
- with open('DefineBeamHingeRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define beam hinge force-deformation recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/BeamHingeMoment\n\n")
+ with open('DefineBeamHingeRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define beam hinge force-deformation recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/BeamHingeMoment\n\n')
# Define output files to record beam hinge element forces
- tclfile.write("# X-Direction beam hinge element force recorders\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("recorder\tElement\t-file\tBeamHingeForcesLevel%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("\t%i%i%i%i%i%i%i" % (7, j, i, 1, 1, 1, 5))
- tclfile.write("\t%i%i%i%i%i%i%i" % (7, j+1, i, 0, 9, 1, 3))
- tclfile.write("\tforce;\n")
- tclfile.write("\n")
-
- tclfile.write("cd\t$baseDir/$dataDir/BeamHingeDeformations\n\n")
+ tclfile.write('# X-Direction beam hinge element force recorders\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tElement\t-file\tBeamHingeForcesLevel%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (7, j, i, 1, 1, 1, 5))
+ tclfile.write('\t%i%i%i%i%i%i%i' % (7, j + 1, i, 0, 9, 1, 3))
+ tclfile.write('\tforce;\n')
+ tclfile.write('\n')
+
+ tclfile.write('cd\t$baseDir/$dataDir/BeamHingeDeformations\n\n')
# Define output files to record beam hinge element deformations
- tclfile.write("# X-Direction beam hinge deformation recorders\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("recorder\tElement\t-file\tBeamHingeForcesLevel%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("\t%i%i%i%i%i%i%i" % (7, j, i, 1, 1, 1, 5))
- tclfile.write("\t%i%i%i%i%i%i%i" % (7, j+1, i, 0, 9, 1, 3))
- tclfile.write("\tdeformation;\n")
- tclfile.write("\n")
-
- def write_column_hinge_recorder(self, building):
+ tclfile.write('# X-Direction beam hinge deformation recorders\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tElement\t-file\tBeamHingeForcesLevel%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (7, j, i, 1, 1, 1, 5))
+ tclfile.write('\t%i%i%i%i%i%i%i' % (7, j + 1, i, 0, 9, 1, 3))
+ tclfile.write('\tdeformation;\n')
+ tclfile.write('\n')
+
+ def write_column_hinge_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to record column hinge forces and deformations
- with open('DefineColumnHingeRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define column hinge force-deformation recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/ColumnHingeMoment\n\n")
+ with open('DefineColumnHingeRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define column hinge force-deformation recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/ColumnHingeMoment\n\n')
# Define output files to record column hinge forces
- tclfile.write("# Column hinge element force recorders\n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("recorder\tElement\t-file\tColumnHingeForcesStory%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i%i%i%i" % (6, j, i, 1, 0, 1, 4))
- tclfile.write("\t%i%i%i%i%i%i%i" % (6, j, i+1, 1, 2, 1, 6))
- tclfile.write("\tforce;\n")
- tclfile.write("\n")
+ tclfile.write('# Column hinge element force recorders\n')
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write(
+ 'recorder\tElement\t-file\tColumnHingeForcesStory%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (6, j, i, 1, 0, 1, 4))
+ tclfile.write('\t%i%i%i%i%i%i%i' % (6, j, i + 1, 1, 2, 1, 6))
+ tclfile.write('\tforce;\n')
+ tclfile.write('\n')
# Define output files to record column hinge deformations
- tclfile.write("cd\t$baseDir/$dataDir/ColumnHingeDeformations\n\n")
- tclfile.write("# Column hinge element deformation recorders\n")
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("recorder\tElement\t-file\tColumnHingeForcesStory%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i%i%i%i" % (6, j, i, 1, 0, 1, 4))
- tclfile.write("\t%i%i%i%i%i%i%i" % (6, j, i+1, 1, 2, 1, 6))
- tclfile.write("\tdeformation;")
- tclfile.write("\n")
-
- def write_beam_force_recorder(self, building):
+ tclfile.write('cd\t$baseDir/$dataDir/ColumnHingeDeformations\n\n')
+ tclfile.write('# Column hinge element deformation recorders\n')
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write(
+ 'recorder\tElement\t-file\tColumnHingeForcesStory%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (6, j, i, 1, 0, 1, 4))
+ tclfile.write('\t%i%i%i%i%i%i%i' % (6, j, i + 1, 1, 2, 1, 6))
+ tclfile.write('\tdeformation;')
+ tclfile.write('\n')
+
+ def write_beam_force_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write beam element forces recorder for output
- with open('DefineGlobalBeamForceRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define global beam force recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/GlobalBeamForces\n\n")
- tclfile.write("# X-Direction beam element global force recorders\n")
- for i in range(2, building.geometry['number of story']+2):
- tclfile.write("recorder\tElement\t-file\tGlobalXBeamForcesLevel%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+1):
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1))
- tclfile.write("\tforce\n")
-
- def write_column_force_recorder(self, building):
+ with open('DefineGlobalBeamForceRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define global beam force recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/GlobalBeamForces\n\n')
+ tclfile.write('# X-Direction beam element global force recorders\n')
+ for i in range(2, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tElement\t-file\tGlobalXBeamForcesLevel%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 1):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1))
+ tclfile.write('\tforce\n')
+
+ def write_column_force_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write column element forces recorder for output
- with open('DefineGlobalColumnForceRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define global column force recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/GlobalBeamForces\n\n")
- tclfile.write("# Column element global force recorders\n")
- for i in range(1, building.geometry['number of story']+1): # i is story number
- tclfile.write("recorder\tElement\t-file\tGlobalColumnForcesStory%i.out\t-time\t-ele" % i)
- for j in range(1, building.geometry['number of X bay']+2):
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, j, i, 1, j, i+1, 1))
- tclfile.write("\tforce;\n")
- tclfile.write("\n")
-
- def write_node_displacement_recorder(self, building):
+ with open('DefineGlobalColumnForceRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define global column force recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/GlobalBeamForces\n\n')
+ tclfile.write('# Column element global force recorders\n')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number
+ tclfile.write(
+ 'recorder\tElement\t-file\tGlobalColumnForcesStory%i.out\t-time\t-ele'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
+ tclfile.write('\t%i%i%i%i%i%i%i' % (3, j, i, 1, j, i + 1, 1))
+ tclfile.write('\tforce;\n')
+ tclfile.write('\n')
+
+ def write_node_displacement_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to write the node displacements recorder for output
- with open('DefineNodeDisplacementRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define node displacement recorders\n\n\n")
- tclfile.write("cd\t$baseDir/$dataDir/NodeDisplacements\n\n")
- for i in range(1, building.geometry['number of story']+2):
- tclfile.write("recorder\tNode\t-file\tNodeDispLevel%i.out\t-time\t-node" % i)
- for j in range(1, building.geometry['number of X bay']+2):
+ with open('DefineNodeDisplacementRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define node displacement recorders\n\n\n')
+ tclfile.write('cd\t$baseDir/$dataDir/NodeDisplacements\n\n')
+ for i in range(1, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tNode\t-file\tNodeDispLevel%i.out\t-time\t-node' % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
if i == 1:
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 0))
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 0))
else:
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 1))
- tclfile.write("\t-dof\t1\tdisp;\n")
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 1))
+ tclfile.write('\t-dof\t1\tdisp;\n')
- def write_story_drift_recorder(self, building, analysis_type):
+ def write_story_drift_recorder(self, building, analysis_type): # noqa: D102, PLR6301
# Create a .tcl file to write story drift recorder for output
- with open('DefineStoryDriftRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define story drift recorders\n\n\n")
+ with open('DefineStoryDriftRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define story drift recorders\n\n\n')
if analysis_type == 'PushoverAnalysis':
- tclfile.write("cd\t$baseDir/$dataDir/StoryDrifts\n\n")
+ tclfile.write('cd\t$baseDir/$dataDir/StoryDrifts\n\n')
if analysis_type == 'DynamicAnalysis':
- tclfile.write("cd\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts\n\n")
+ tclfile.write(
+ 'cd\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts\n\n'
+ )
# Write the story drift recorder for each story
- for i in range(1, building.geometry['number of story']+1):
- tclfile.write("recorder\tDrift\t-file")
+ for i in range(1, building.geometry['number of story'] + 1):
+ tclfile.write('recorder\tDrift\t-file')
if analysis_type == 'PushoverAnalysis':
- tclfile.write("\t$baseDir/$dataDir/StoryDrifts/Story%i.out" % i)
+ tclfile.write('\t$baseDir/$dataDir/StoryDrifts/Story%i.out' % i)
if analysis_type == 'DynamicAnalysis':
- tclfile.write("\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts/Story%i.out" % i)
+ tclfile.write(
+ '\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts/Story%i.out'
+ % i
+ )
# Always use nodes on column #1 to calculate story drift
if i == 1:
# Node tag at ground floor is different from those on upper stories
- tclfile.write("\t-time\t-iNode\t%i%i%i%i" % (1, i, 1, 0)) # Node at bottom of current story
+ tclfile.write(
+ '\t-time\t-iNode\t%i%i%i%i' % (1, i, 1, 0)
+ ) # Node at bottom of current story
else:
- tclfile.write("\t-time\t-iNode\t%i%i%i%i" % (1, i, 1, 1)) # Node at bottom of current story
- tclfile.write("\t-time\t-jNode\t%i%i%i%i" % (1, i+1, 1, 1)) # Node at top of current story
- tclfile.write("\t-dof\t1\t-perpDirn\t2; \n")
+ tclfile.write(
+ '\t-time\t-iNode\t%i%i%i%i' % (1, i, 1, 1)
+ ) # Node at bottom of current story
+ tclfile.write(
+ '\t-time\t-jNode\t%i%i%i%i' % (1, i + 1, 1, 1)
+ ) # Node at top of current story
+ tclfile.write('\t-dof\t1\t-perpDirn\t2; \n')
# Write the story drift recorder for roof
- tclfile.write("recorder\tDrift\t-file")
+ tclfile.write('recorder\tDrift\t-file')
if analysis_type == 'PushoverAnalysis':
- tclfile.write("\t$baseDir/$dataDir/StoryDrifts/Roof.out")
+ tclfile.write('\t$baseDir/$dataDir/StoryDrifts/Roof.out')
if analysis_type == 'DynamicAnalysis':
- tclfile.write("\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts/Roof.out")
- tclfile.write("\t-time\t-iNode\t%i%i%i%i" % (1, 1, 1, 0))
- tclfile.write("\t-jNode\t%i%i%i%i" % (1, building.geometry['number of story']+1, 1, 1))
- tclfile.write("\t-dof\t1\t-perpDirn\t2; \n")
-
- def write_node_acceleration_recorder(self, building):
+ tclfile.write(
+ '\t$baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/StoryDrifts/Roof.out'
+ )
+ tclfile.write('\t-time\t-iNode\t%i%i%i%i' % (1, 1, 1, 0))
+ tclfile.write(
+ '\t-jNode\t%i%i%i%i'
+ % (1, building.geometry['number of story'] + 1, 1, 1)
+ )
+ tclfile.write('\t-dof\t1\t-perpDirn\t2; \n')
+
+ def write_node_acceleration_recorder(self, building): # noqa: D102, PLR6301
# Create a .tcl file to record absolute node acceleration
- with open('DefineNodeAccelerationRecorders2DModel.tcl', 'w') as tclfile:
- tclfile.write("# Define node acceleration recorders\n\n\n")
- tclfile.write("cd $baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/NodeAccelerations\n\n")
- for i in range(1, building.geometry['number of story']+2):
- tclfile.write("recorder\tNode\t-file\tNodeAccLevel%i.out\t-timeSeries\t2\t-time\t-node" % i)
- for j in range(1, building.geometry['number of X bay']+2):
+ with open('DefineNodeAccelerationRecorders2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# Define node acceleration recorders\n\n\n')
+ tclfile.write(
+ 'cd $baseDir/$dataDir/EQ_$eqNumber/Scale_$scale/NodeAccelerations\n\n'
+ )
+ for i in range(1, building.geometry['number of story'] + 2):
+ tclfile.write(
+ 'recorder\tNode\t-file\tNodeAccLevel%i.out\t-timeSeries\t2\t-time\t-node'
+ % i
+ )
+ for j in range(1, building.geometry['number of X bay'] + 2):
if i == 1:
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 0))
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 0))
else:
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 1))
- tclfile.write("\t-dof\t1\taccel;\n")
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 1))
+ tclfile.write('\t-dof\t1\taccel;\n')
- def write_damping(self, building):
+ def write_damping(self, building): # noqa: D102, PLR6301
# Create a .tcl file to define damping for dynamic analysis
- with open('DefineDamping2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define damping\n\n")
-
- tclfile.write("# A damping ratio of 2% is used for steel buildings\n")
- tclfile.write("set\tdampingRatio\t0.02;\n")
-
- tclfile.write("# Define the value for pi\n")
- tclfile.write("set\tpi\t[expr 2.0*asin(1.0)];\n\n")
-
- tclfile.write("# Defining damping parameters\n")
- tclfile.write("set\tomegaI\t[expr (2.0*$pi) / $periodForRayleighDamping_1];\n")
- tclfile.write("set\tomegaJ\t[expr (2.0*$pi) / $periodForRayleighDamping_2];\n")
- tclfile.write("set\talpha0\t[expr ($dampingRatio*2.0*$omegaI*$omegaJ) / ($omegaI+$omegaJ)];\n")
- tclfile.write("set\talpha1\t[expr ($dampingRatio*2.0) / ($omegaI+$omegaJ) * ($n+1.0) / $n];")
- tclfile.write("\t # (n+1.0)/n factor is because stiffness for elastic elements have been modified\n\n")
-
- tclfile.write("# Assign damping to beam elements\n")
- tclfile.write("region\t1\t-ele")
- for i in range(2, building.geometry['number of story']+2): # i is the floor level (from 2)
- for j in range(1, building.geometry['number of X bay']+1): # j is the bay number
- tclfile.write("\t%i%i%i%i%i%i%i" % (2, j, i, 1, j+1, i, 1)) # Beam element tag
- tclfile.write("\t-rayleigh\t0.0\t0.0\t$alpha1\t0.0;\n")
-
- tclfile.write("# Assign damping to column elements\n")
- tclfile.write("region\t2\t-ele")
- for i in range(1, building.geometry['number of story']+1): # i is story number
- for j in range(1, building.geometry['number of X bay']+2): # j is bay number
- tclfile.write("\t%i%i%i%i%i%i%i" % (3, j, i, 1, j, i+1, 1)) # element tag
- tclfile.write("\t-rayleigh\t0.0\t0.0\t$alpha1\t0.0;\n")
-
- tclfile.write("# Assign damping to nodes\n")
- tclfile.write("region\t3\t-node")
- for i in range(2, building.geometry['number of story']+2):
- for j in range(1, building.geometry['number of X bay']+3):
- if j == building.geometry['number of X bay']+2:
- tclfile.write("\t%i%i" % (j, i))
+ with open('DefineDamping2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write('# This file will be used to define damping\n\n')
+
+ tclfile.write('# A damping ratio of 2% is used for steel buildings\n')
+ tclfile.write('set\tdampingRatio\t0.02;\n')
+
+ tclfile.write('# Define the value for pi\n')
+ tclfile.write('set\tpi\t[expr 2.0*asin(1.0)];\n\n')
+
+ tclfile.write('# Defining damping parameters\n')
+ tclfile.write(
+ 'set\tomegaI\t[expr (2.0*$pi) / $periodForRayleighDamping_1];\n'
+ )
+ tclfile.write(
+ 'set\tomegaJ\t[expr (2.0*$pi) / $periodForRayleighDamping_2];\n'
+ )
+ tclfile.write(
+ 'set\talpha0\t[expr ($dampingRatio*2.0*$omegaI*$omegaJ) / ($omegaI+$omegaJ)];\n'
+ )
+ tclfile.write(
+ 'set\talpha1\t[expr ($dampingRatio*2.0) / ($omegaI+$omegaJ) * ($n+1.0) / $n];'
+ )
+ tclfile.write(
+ '\t # (n+1.0)/n factor is because stiffness for elastic elements have been modified\n\n'
+ )
+
+ tclfile.write('# Assign damping to beam elements\n')
+ tclfile.write('region\t1\t-ele')
+ for i in range(
+ 2, building.geometry['number of story'] + 2
+ ): # i is the floor level (from 2)
+ for j in range(
+ 1, building.geometry['number of X bay'] + 1
+ ): # j is the bay number
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (2, j, i, 1, j + 1, i, 1)
+ ) # Beam element tag
+ tclfile.write('\t-rayleigh\t0.0\t0.0\t$alpha1\t0.0;\n')
+
+ tclfile.write('# Assign damping to column elements\n')
+ tclfile.write('region\t2\t-ele')
+ for i in range(
+ 1, building.geometry['number of story'] + 1
+ ): # i is story number
+ for j in range(
+ 1, building.geometry['number of X bay'] + 2
+ ): # j is bay number
+ tclfile.write(
+ '\t%i%i%i%i%i%i%i' % (3, j, i, 1, j, i + 1, 1)
+ ) # element tag
+ tclfile.write('\t-rayleigh\t0.0\t0.0\t$alpha1\t0.0;\n')
+
+ tclfile.write('# Assign damping to nodes\n')
+ tclfile.write('region\t3\t-node')
+ for i in range(2, building.geometry['number of story'] + 2):
+ for j in range(1, building.geometry['number of X bay'] + 3):
+ if j == building.geometry['number of X bay'] + 2:
+ tclfile.write('\t%i%i' % (j, i))
else:
- tclfile.write("\t%i%i%i%i" % (j, i, 1, 1))
- tclfile.write("\t-rayleigh\t$alpha0\t0.0\t0.0\t0.0;\n\n")
- tclfile.write("puts \"Rayleigh damping defined\"")
+ tclfile.write('\t%i%i%i%i' % (j, i, 1, 1))
+ tclfile.write('\t-rayleigh\t$alpha0\t0.0\t0.0\t0.0;\n\n')
+ tclfile.write('puts "Rayleigh damping defined"')
- def write_dynamic_analysis_parameters(self, building):
+ def write_dynamic_analysis_parameters(self, building): # noqa: D102, PLR6301
# Create a .tcl file to define all parameters pertinent to dynamic analysis solver
- with open('DefineDynamicAnalysisParameters2DModel.tcl', 'w') as tclfile:
- tclfile.write("# This file will be used to define analysis parameters relevant to dynamic solver\n\n\n")
- tclfile.write("set\tNStories\t%i; \n" % building.geometry['number of story'])
+ with open('DefineDynamicAnalysisParameters2DModel.tcl', 'w') as tclfile: # noqa: PLW1514, PTH123
+ tclfile.write(
+ '# This file will be used to define analysis parameters relevant to dynamic solver\n\n\n'
+ )
+ tclfile.write(
+ 'set\tNStories\t%i; \n' % building.geometry['number of story']
+ )
# The height shall be converted from ft to inch
- tclfile.write("set\tHTypicalStory\t%.2f; \n" % (building.geometry['typical story height']*12.0))
- tclfile.write("set\tHFirstStory\t%.2f; \n" % (building.geometry['first story height']*12.0))
- tclfile.write("set\tFloorNodes\t[list")
- for i in range(1, building.geometry['number of story']+2):
+ tclfile.write(
+ 'set\tHTypicalStory\t%.2f; \n'
+ % (building.geometry['typical story height'] * 12.0)
+ )
+ tclfile.write(
+ 'set\tHFirstStory\t%.2f; \n'
+ % (building.geometry['first story height'] * 12.0)
+ )
+ tclfile.write('set\tFloorNodes\t[list')
+ for i in range(1, building.geometry['number of story'] + 2):
if i == 1:
- tclfile.write("\t1110")
+ tclfile.write('\t1110')
else:
- tclfile.write("\t%i%i%i%i" % (1, i, 1, 1))
- tclfile.write("];\n\n")
- tclfile.write("puts \"Dynamic analysis parameters defined\"")
+ tclfile.write('\t%i%i%i%i' % (1, i, 1, 1))
+ tclfile.write('];\n\n')
+ tclfile.write('puts "Dynamic analysis parameters defined"')
- def copy_baseline_eigen_files(self, building, analysis_type):
- """
- Some .tcl files are fixed, i.e., no need to change for different OpenSees models.
+ def copy_baseline_eigen_files(self, building, analysis_type): # noqa: PLR6301
+ """Some .tcl files are fixed, i.e., no need to change for different OpenSees models.
Therefore, just copy those .tcl files from the baseline folder
:param building: a class defined in "building_information.py"
:param analysis_type: a string specifies the analysis type that the current nonlinear model is for
options: 'EigenValueAnalysis', 'PushoverAnalysis', 'DynamicAnalysis'
:return:
- """
+ """ # noqa: D205, D400, D401
# Change the working directory to the folder where baseline .tcl files are stored
- source_dir = building.directory['baseline files nonlinear'] + "/" + analysis_type
+ source_dir = (
+ building.directory['baseline files nonlinear'] + '/' + analysis_type
+ )
os.chdir(source_dir)
# Copy all baseline .tcl files to building model directory
for _, _, files in os.walk(source_dir):
for file in files:
- target_file = building.directory['building nonlinear model'] + "/" + analysis_type + "/" + file
+ target_file = (
+ building.directory['building nonlinear model']
+ + '/'
+ + analysis_type
+ + '/'
+ + file
+ )
shutil.copy(file, target_file)
# Remember to change the working directory to building model directory
- os.chdir(building.directory['building nonlinear model'] + "/" + analysis_type)
+ os.chdir(
+ building.directory['building nonlinear model'] + '/' + analysis_type
+ )
# Update necessary information in .tcl files for different analysis
if analysis_type == 'EigenValueAnalysis':
@@ -896,43 +1409,54 @@ def copy_baseline_eigen_files(self, building, analysis_type):
# Revise the baseline file: EigenValueAnalysis.tcl if building has less than four stories.
# Default EigenValueAnalysis.tcl file analyzes four modes.
# For buildings which have three stories or below, they might only have 1st mode, 2nd mode, and 3rd mode.
- if building.geometry['number of story'] <= 3:
+ if building.geometry['number of story'] <= 3: # noqa: PLR2004
# This is to change the number of desired mode
new_mode = 'set nEigenL 3'
# Releast the equal DOF constraints for buildings with less than 3 stories
- with open('Model.tcl', 'r') as file:
+ with open('Model.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
- new_content = content.replace('source DefineFloorConstraint2DModel.tcl',
- '# source DefineFloorConstraint2DModel.tcl')
- with open('Model.tcl', 'w') as file:
+ new_content = content.replace(
+ 'source DefineFloorConstraint2DModel.tcl',
+ '# source DefineFloorConstraint2DModel.tcl',
+ )
+ with open('Model.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(new_content)
# This is to change the node tag to record eigen vector
old_string = '**EIGENVECTOR_NODE**'
new_string = '1110'
- for floor in range(1, building.geometry['number of story']+1):
- new_string += (' %i%i%i%i' % (1, floor+1, 1, 1))
- with open('EigenValueAnalysis.tcl', 'r') as file:
+ for floor in range(1, building.geometry['number of story'] + 1):
+ new_string += ' %i%i%i%i' % (1, floor + 1, 1, 1)
+ with open('EigenValueAnalysis.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
new_content = content.replace(old_mode, new_mode)
new_content = new_content.replace(old_string, new_string)
- with open('EigenValueAnalysis.tcl', 'w') as file:
+ with open('EigenValueAnalysis.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(new_content)
# Perform Eigen Analysis to obtain the periods which will be necessary for raleigh damping in dynamic part
- cmd = "OpenSees Model.tcl"
- subprocess.Popen(cmd, shell=True).wait()
+ cmd = 'OpenSees Model.tcl'
+ subprocess.Popen(cmd, shell=True).wait() # noqa: S602
# Update pushover parameters contained Model.tcl when performing pushover analysis
elif analysis_type == 'PushoverAnalysis':
# This is to update the pushover analysis parameters
- old_string = ['**ControlNode**', '**ControlDOF**', '**DisplacementIncrement**', '**DisplacementMaximum**']
- new_string = ['%i%i%i%i' % (1, building.geometry['number of story']+1, 1, 1), '%i' % 1, '0.01',
- '%.2f' % (0.1*building.geometry['floor height'][-1]*12)] # DisplamentMaximum should be in inch.
- with open('Model.tcl', 'r') as file:
+ old_string = [
+ '**ControlNode**',
+ '**ControlDOF**',
+ '**DisplacementIncrement**',
+ '**DisplacementMaximum**',
+ ]
+ new_string = [
+ '%i%i%i%i' % (1, building.geometry['number of story'] + 1, 1, 1),
+ '%i' % 1,
+ '0.01',
+ '%.2f' % (0.1 * building.geometry['floor height'][-1] * 12),
+ ] # DisplamentMaximum should be in inch.
+ with open('Model.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
for indx in range(len(old_string)):
content = content.replace(old_string[indx], new_string[indx])
- with open('Model.tcl', 'w') as file:
+ with open('Model.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(content)
# Update Model.tcl and RunIDA2DModel.tcl files for dynamic analysis
@@ -940,28 +1464,41 @@ def copy_baseline_eigen_files(self, building, analysis_type):
# This is to update periods for rayleigh damping
old_periods = ['**firstModePeriod**', '**thirdModePeriod**']
# The path to Eigen value analysis results
- periods_dir = building.directory['building nonlinear model'] + "/EigenValueAnalysis/EigenAnalysisOutput"
+ periods_dir = (
+ building.directory['building nonlinear model']
+ + '/EigenValueAnalysis/EigenAnalysisOutput'
+ )
# Create path to Eigen value analysis results if it does not exist
Path(periods_dir).mkdir(parents=True, exist_ok=True)
# Read the periods from .out files generated by Eigen value analysis
os.chdir(periods_dir)
periods = np.loadtxt('Periods.out')
# Update period variables in Model.tcl
- os.chdir(building.directory['building nonlinear model'] + "/" + analysis_type)
- with open('Model.tcl', 'r') as file:
+ os.chdir(
+ building.directory['building nonlinear model'] + '/' + analysis_type
+ )
+ with open('Model.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
- content = content.replace(old_periods[0], str(periods[0])) # First-mode period
- content = content.replace(old_periods[1], str(periods[2])) # Third-mode period
+ content = content.replace(
+ old_periods[0], str(periods[0])
+ ) # First-mode period
+ content = content.replace(
+ old_periods[1], str(periods[2])
+ ) # Third-mode period
# Write the updated content into Model.tcl
- with open('Model.tcl', 'w') as file:
+ with open('Model.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(content)
# Update dynamic parameters in RunIDA2DModel.tcl
- with open('RunIDA2DModel.tcl', 'r') as file:
+ with open('RunIDA2DModel.tcl') as file: # noqa: FURB101, PLW1514, PTH123
content = file.read()
- old_string = ['**NumberOfGroundMotions**', '**IntensityScales**', '**MCEScaleFactor**']
+ old_string = [
+ '**NumberOfGroundMotions**',
+ '**IntensityScales**',
+ '**MCEScaleFactor**',
+ ]
new_string = [240, 100, 1.0]
for indx in range(len(old_string)):
content = content.replace(old_string[indx], str(new_string[indx]))
# Write the new content back into RunIDA2DModel.tcl
- with open('RunIDA2DModel.tcl', 'w') as file:
+ with open('RunIDA2DModel.tcl', 'w') as file: # noqa: FURB103, PLW1514, PTH123
file.write(content)
diff --git a/modules/createSAM/AutoSDA/seismic_design.py b/modules/createSAM/AutoSDA/seismic_design.py
index b29c0fe1a..e72757b6e 100644
--- a/modules/createSAM/AutoSDA/seismic_design.py
+++ b/modules/createSAM/AutoSDA/seismic_design.py
@@ -1,12 +1,11 @@
-"""
-This file creates a function that is called by "main_design.py" to perform seismic design
+"""This file creates a function that is called by "main_design.py" to perform seismic design
Developed by GUAN, XINGQUAN @ UCLA, March 29 2018
Revised in Feb. 2019
# Modified by: Stevan Gavrilovic @ SimCenter, UC Berkeley
# Last revision: 09/2020
-"""
+""" # noqa: CPY001, D400, D404, INP001
##########################################################################
# Load Built-in Packages #
@@ -14,31 +13,32 @@
# Please add all the imported modules in the part below
import copy
-import numpy as np
import os
-import pandas as pd
-import pickle
+import pickle # noqa: S403
import sys
-
from pathlib import Path
+
+import numpy as np
+import pandas as pd
+from beam_component import Beam
from building_information import Building
-from elastic_analysis import ElasticAnalysis
-from elastic_output import ElasticOutput
from column_component import Column
-from beam_component import Beam
from connection_part import Connection
-
-from global_variables import steel
-from global_variables import BEAM_TO_COLUMN_RATIO
-from global_variables import UPPER_LOWER_COLUMN_Zx
-from global_variables import RBS_STIFFNESS_FACTOR
-
+from elastic_analysis import ElasticAnalysis
+from elastic_output import ElasticOutput
+from global_variables import (
+ BEAM_TO_COLUMN_RATIO,
+ RBS_STIFFNESS_FACTOR,
+ UPPER_LOWER_COLUMN_Zx,
+ steel,
+)
##########################################################################
# Function Implementation #
##########################################################################
-def seismic_design(base_directory, pathDataFolder, workingDirectory):
+
+def seismic_design(base_directory, pathDataFolder, workingDirectory): # noqa: C901, D103, N803, PLR0912, PLR0914, PLR0915
# **************** Debug using only **********************************
# building_id = 'Test3'
# from global_variables import base_directory
@@ -85,7 +85,9 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
building_1.compute_seismic_force()
# Create an elastic analysis model for building instance above using "ElasticAnalysis" class
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=False
+ )
# Read elastic analysis drift
building_1.read_story_drift()
@@ -97,37 +99,49 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
iteration = 0
# Perform the optimization process
last_member = copy.deepcopy(building_1.member_size)
- while np.max(building_1.elastic_response['story drift']) * 5.5 * RBS_STIFFNESS_FACTOR \
- <= 0.025/building_1.elf_parameters['rho']:
- print("Member size after optimization %i" % iteration)
- print("Exterior column:", building_1.member_size['exterior column'])
- print("Interior column:", building_1.member_size['interior column'])
- print("Beam:", building_1.member_size['beam'])
- print("Current story drifts: (%)")
- print(building_1.elastic_response['story drift'] * 5.5 * RBS_STIFFNESS_FACTOR * 100)
+ while (
+ np.max(building_1.elastic_response['story drift'])
+ * 5.5
+ * RBS_STIFFNESS_FACTOR
+ <= 0.025 / building_1.elf_parameters['rho']
+ ):
+ print('Member size after optimization %i' % iteration) # noqa: T201
+ print('Exterior column:', building_1.member_size['exterior column']) # noqa: T201
+ print('Interior column:', building_1.member_size['interior column']) # noqa: T201
+ print('Beam:', building_1.member_size['beam']) # noqa: T201
+ print('Current story drifts: (%)') # noqa: T201
+ print( # noqa: T201
+ building_1.elastic_response['story drift']
+ * 5.5
+ * RBS_STIFFNESS_FACTOR
+ * 100
+ )
# Before optimization, record the size in the last step.
last_member = copy.deepcopy(building_1.member_size)
# Perform optimization
building_1.optimize_member_for_drift()
# Update the design period and thus the design seismic forces
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=True)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=True
+ )
building_1.read_modal_period()
building_1.compute_seismic_force()
# Update the design story drifts
- model_1 = ElasticAnalysis(building_1, for_drift_only=True, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=True, for_period_only=False
+ )
building_1.read_story_drift()
- iteration = iteration + 1
+ iteration = iteration + 1 # noqa: PLR6104
# Assign the last member size to building instance
building_1.member_size = copy.deepcopy(last_member)
# Add a check here: if the program does not go into previous while loop,
# probably the initial size is not strong enough ==> not necessary to go into following codes
if iteration == 0:
- sys.stderr.write("Initial section size is not strong enough!")
- sys.stderr.write("Please increase initial depth!")
+ sys.stderr.write('Initial section size is not strong enough!')
+ sys.stderr.write('Please increase initial depth!')
sys.exit(99)
-
# *******************************************************************
# ///////////////// Check Column Strength ///////////////////////////
# *******************************************************************
@@ -136,97 +150,160 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
building_1.read_modal_period()
building_1.compute_seismic_force()
# Obtain the story drift using the last member size
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=False
+ )
building_1.read_story_drift()
# Extract the load output from elastic analysis and perform load combination
elastic_demand = ElasticOutput(building_1)
# Check all columns to see whether they have enough strengths
# Initialize a list to store all column instances
- column_set = [[0] * (building_1.geometry['number of X bay'] + 1)
- for story in range(building_1.geometry['number of story'])]
+ column_set = [
+ [0] * (building_1.geometry['number of X bay'] + 1)
+ for story in range(building_1.geometry['number of story'])
+ ]
not_feasible_column = [] # Used to record which column [story][column_no] is not feasible
for story in range(building_1.geometry['number of story']):
for column_no in range(building_1.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand.dominate_load['column axial'][story, 2 * column_no])
- shear_demand = abs(elastic_demand.dominate_load['column shear'][story, 2 * column_no])
- moment_bottom = elastic_demand.dominate_load['column moment'][story, 2 * column_no]
- moment_top = elastic_demand.dominate_load['column moment'][story, 2 * column_no + 1]
+ axial_demand = abs(
+ elastic_demand.dominate_load['column axial'][story, 2 * column_no]
+ )
+ shear_demand = abs(
+ elastic_demand.dominate_load['column shear'][story, 2 * column_no]
+ )
+ moment_bottom = elastic_demand.dominate_load['column moment'][
+ story, 2 * column_no
+ ]
+ moment_top = elastic_demand.dominate_load['column moment'][
+ story, 2 * column_no + 1
+ ]
if column_no == 0 or column_no == building_1.geometry['number of X bay']:
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
- building_1.geometry['floor height'][story + 1] - building_1.geometry['floor height'][story])
+ building_1.geometry['floor height'][story + 1]
+ - building_1.geometry['floor height'][story]
+ )
# Build instance for each column member
- column_set[story][column_no] = Column(building_1.member_size[column_type][story],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
+ column_set[story][column_no] = Column(
+ building_1.member_size[column_type][story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Check the flag of each column
if not column_set[story][column_no].check_flag():
- sys.stderr.write('column_%s%s is not feasible!!!\n' % (story, column_no))
+ sys.stderr.write(
+ 'column_%s%s is not feasible!!!\n' % (story, column_no) # noqa: UP031
+ )
not_feasible_column.append([story, column_no])
# sys.exit(1)
-
# *******************************************************************
# ///////// Revise Column to Satisfy Strength Requirement ///////////
# *******************************************************************
for [target_story_index, target_column_no] in not_feasible_column:
while not column_set[target_story_index][target_column_no].check_flag():
# Upscale the unsatisfied column
- if target_column_no == 0 or target_column_no == building_1.geometry['number of X bay']:
+ if (
+ target_column_no == 0
+ or target_column_no == building_1.geometry['number of X bay']
+ ):
type_column = 'exterior column'
else:
type_column = 'interior column'
building_1.upscale_column(target_story_index, type_column)
# Update the modal period and seismic forces
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=True)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=True
+ )
building_1.read_modal_period()
building_1.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=False
+ )
building_1.read_story_drift()
elastic_demand = ElasticOutput(building_1)
# Re-construct the column objects (only those revised columns)
- for column_no in range(building_1.geometry['number of X bay']+1):
- axial_demand = abs(elastic_demand.dominate_load['column axial'][target_story_index, 2 * column_no])
- shear_demand = abs(elastic_demand.dominate_load['column shear'][target_story_index, 2 * column_no])
- moment_bottom = elastic_demand.dominate_load['column moment'][target_story_index, 2 * column_no]
- moment_top = elastic_demand.dominate_load['column moment'][target_story_index, 2 * column_no + 1]
- if column_no == 0 or column_no == building_1.geometry['number of X bay']:
+ for column_no in range(building_1.geometry['number of X bay'] + 1):
+ axial_demand = abs(
+ elastic_demand.dominate_load['column axial'][
+ target_story_index, 2 * column_no
+ ]
+ )
+ shear_demand = abs(
+ elastic_demand.dominate_load['column shear'][
+ target_story_index, 2 * column_no
+ ]
+ )
+ moment_bottom = elastic_demand.dominate_load['column moment'][
+ target_story_index, 2 * column_no
+ ]
+ moment_top = elastic_demand.dominate_load['column moment'][
+ target_story_index, 2 * column_no + 1
+ ]
+ if (
+ column_no == 0
+ or column_no == building_1.geometry['number of X bay']
+ ):
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
building_1.geometry['floor height'][target_story_index + 1]
- - building_1.geometry['floor height'][target_story_index])
+ - building_1.geometry['floor height'][target_story_index]
+ )
# Build instance for each column member
- column_set[target_story_index][column_no] = Column(building_1.member_size[column_type][target_story_index],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
-
+ column_set[target_story_index][column_no] = Column(
+ building_1.member_size[column_type][target_story_index],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# *******************************************************************
# ///////////////// Check Beam Strength /////////////////////////////
# *******************************************************************
# Initialize a list to store all beam instances
- beam_set = [[0] * building_1.geometry['number of X bay'] for story in range(building_1.geometry['number of story'])]
+ beam_set = [
+ [0] * building_1.geometry['number of X bay']
+ for story in range(building_1.geometry['number of story'])
+ ]
not_feasible_beam = [] # Used to record which beam [story, bay] does not have enough strength.
for story in range(building_1.geometry['number of story']):
for bay in range(building_1.geometry['number of X bay']):
length = building_1.geometry['X bay width']
- shear_demand = abs(elastic_demand.dominate_load['beam shear'][story, 2 * bay])
+ shear_demand = abs(
+ elastic_demand.dominate_load['beam shear'][story, 2 * bay]
+ )
moment_left = elastic_demand.dominate_load['beam moment'][story, 2 * bay]
- moment_right = elastic_demand.dominate_load['beam moment'][story, 2 * bay + 1]
- beam_set[story][bay] = Beam(building_1.member_size['beam'][story], length,
- shear_demand, moment_left, moment_right, steel)
+ moment_right = elastic_demand.dominate_load['beam moment'][
+ story, 2 * bay + 1
+ ]
+ beam_set[story][bay] = Beam(
+ building_1.member_size['beam'][story],
+ length,
+ shear_demand,
+ moment_left,
+ moment_right,
+ steel,
+ )
# Check the flag of each beam
if not beam_set[story][bay].check_flag():
- sys.stderr.write('beam_%s%s is not feasible!!!\n' % (story, bay))
+ sys.stderr.write('beam_%s%s is not feasible!!!\n' % (story, bay)) # noqa: UP031
not_feasible_beam.append([story, bay])
# sys.exit(1)
-
# *******************************************************************
# ////////// Revise Beam to Satisfy Strength Requirement ////////////
# *******************************************************************
@@ -235,200 +312,306 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
# Upscale the unsatisfied beam
building_1.upscale_beam(target_story_index)
# Update modal period and seismic forces
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=True)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=True
+ )
building_1.read_modal_period()
building_1.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=False
+ )
building_1.read_story_drift()
elastic_demand = ElasticOutput(building_1)
# Re-construct the beam objects (only for those revised by previous upscale activity)
for bay in range(building_1.geometry['number of X bay']):
length = building_1.geometry['X bay width']
- shear_demand = abs(elastic_demand.dominate_load['beam shear'][target_story_index, 2 * bay])
- moment_left = elastic_demand.dominate_load['beam moment'][target_story_index, 2 * bay]
- moment_right = elastic_demand.dominate_load['beam moment'][target_story_index, 2 * bay + 1]
- beam_set[target_story_index][bay] = Beam(building_1.member_size['beam'][target_story_index], length,
- shear_demand, moment_left, moment_right, steel)
-
+ shear_demand = abs(
+ elastic_demand.dominate_load['beam shear'][
+ target_story_index, 2 * bay
+ ]
+ )
+ moment_left = elastic_demand.dominate_load['beam moment'][
+ target_story_index, 2 * bay
+ ]
+ moment_right = elastic_demand.dominate_load['beam moment'][
+ target_story_index, 2 * bay + 1
+ ]
+ beam_set[target_story_index][bay] = Beam(
+ building_1.member_size['beam'][target_story_index],
+ length,
+ shear_demand,
+ moment_left,
+ moment_right,
+ steel,
+ )
# ********************************************************************
# ///////////////// Check Beam-Column Connection /////////////////////
# ********************************************************************
# Check each beam-column connection to see if they satisfy the AISC/ANSI
# Initialize a list to store all connection instances
- connection_set = [[0] * (building_1.geometry['number of X bay'] + 1)
- for story in range(building_1.geometry['number of story'])]
+ connection_set = [
+ [0] * (building_1.geometry['number of X bay'] + 1)
+ for story in range(building_1.geometry['number of story'])
+ ]
# Record which connection [story#, column#] is not feasible.
not_feasible_connection = []
for story in range(building_1.geometry['number of story']):
- for connection_no in range(building_1.geometry['number of X bay']+1):
- dead_load = building_1.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_1.gravity_loads['beam live load'][story] # Unit: lb/ft
+ for connection_no in range(building_1.geometry['number of X bay'] + 1):
+ dead_load = building_1.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_1.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_1.geometry['X bay width'] # Unit: ft
if story != (building_1.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=column_set[story + 1][connection_no],
- bottom_column=column_set[story][connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
elif connection_no == building_1.geometry['number of X bay']:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no-1],
- right_beam=None,
- top_column=column_set[story + 1][connection_no],
- bottom_column=column_set[story][connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
else:
# The connection is an interior joint
- connection_set[story][connection_no] = Connection('typical interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=beam_set[story][connection_no],
- top_column=column_set[story + 1][connection_no],
- bottom_column=column_set[story][connection_no])
- else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][connection_no])
- elif connection_no == building_1.geometry['number of X bay']:
- # The connection is an right top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no-1],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=beam_set[story][connection_no],
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == building_1.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
- else:
- # The connection is an top interior joint
- connection_set[story][connection_no] = Connection('top interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=beam_set[story][connection_no],
- top_column=None,
- bottom_column=column_set[story][connection_no])
+ else:
+ # The connection is an top interior joint
+ connection_set[story][connection_no] = Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=beam_set[story][connection_no],
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
if not connection_set[story][connection_no].check_flag():
- sys.stderr.write('connection_%s%s is not feasible!!!\n' % (story, connection_no))
+ sys.stderr.write(
+ 'connection_%s%s is not feasible!!!\n' % (story, connection_no) # noqa: UP031
+ )
not_feasible_connection.append([story, connection_no])
# sys.exit(1)
-
# ********************************************************************
# ///////// Revise Member to Satisfy Connection Requirement //////////
# ********************************************************************
- for [target_story_index, target_connection_no] in not_feasible_connection:
+ for [target_story_index, target_connection_no] in not_feasible_connection: # noqa: PLR1702
# For connection not satisfy the geometry limit
- while not connection_set[target_story_index][target_connection_no].is_feasible['geometry limits']:
+ while not connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['geometry limits']:
# This would never be achieved as all beams and columns have been selected from a database that non-prequalified
# sizes have been removed.
pass
# For connection not satisfy the shear or flexural strength requirement -> upscale the beam
- while (not connection_set[target_story_index][target_connection_no].is_feasible['shear strength']) \
- or (not connection_set[target_story_index][target_connection_no].is_feasible['flexural strength']):
+ while (
+ not connection_set[target_story_index][target_connection_no].is_feasible[
+ 'shear strength'
+ ]
+ ) or (
+ not connection_set[target_story_index][target_connection_no].is_feasible[
+ 'flexural strength'
+ ]
+ ):
# Upscale the unsatisfied beam
building_1.upscale_beam(target_story_index)
# Update the modal period and seismic forces
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=True)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=True
+ )
building_1.read_modal_period()
building_1.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=False
+ )
building_1.read_story_drift()
elastic_demand = ElasticOutput(building_1)
# Re-construct the beam objects (only for those revised by previous upscale activity)
for bay in range(building_1.geometry['number of X bay']):
length = building_1.geometry['X bay width']
- shear_demand = abs(elastic_demand.dominate_load['beam shear'][target_story_index, 2 * bay])
- moment_left = elastic_demand.dominate_load['beam moment'][target_story_index, 2 * bay]
- moment_right = elastic_demand.dominate_load['beam moment'][target_story_index, 2 * bay + 1]
- beam_set[target_story_index][bay] = Beam(building_1.member_size['beam'][target_story_index], length,
- shear_demand, moment_left, moment_right, steel)
+ shear_demand = abs(
+ elastic_demand.dominate_load['beam shear'][
+ target_story_index, 2 * bay
+ ]
+ )
+ moment_left = elastic_demand.dominate_load['beam moment'][
+ target_story_index, 2 * bay
+ ]
+ moment_right = elastic_demand.dominate_load['beam moment'][
+ target_story_index, 2 * bay + 1
+ ]
+ beam_set[target_story_index][bay] = Beam(
+ building_1.member_size['beam'][target_story_index],
+ length,
+ shear_demand,
+ moment_left,
+ moment_right,
+ steel,
+ )
# Re-construct the connection objects (only for those affected by updated beam object)
- for story in range(target_story_index, target_story_index+1):
- for connection_no in range(building_1.geometry['number of X bay'] + 1):
+ for story in range(target_story_index, target_story_index + 1):
+ for connection_no in range(
+ building_1.geometry['number of X bay'] + 1
+ ):
# for connection_no in range(1, 2):
- dead_load = building_1.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_1.gravity_loads['beam live load'][story] # Unit: lb/ft
+ dead_load = building_1.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_1.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_1.geometry['X bay width'] # Unit: ft
if story != (building_1.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
elif connection_no == building_1.geometry['number of X bay']:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
else:
# The connection is an interior joint
- connection_set[story][connection_no] = Connection('typical interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=beam_set[story][connection_no],
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
- else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
- elif connection_no == building_1.geometry['number of X bay']:
- # The connection is an right top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=beam_set[story][connection_no],
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == building_1.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
- else:
- # The connection is an top interior joint
- connection_set[story][connection_no] = Connection('top interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=beam_set[story][connection_no],
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
+ else:
+ # The connection is an top interior joint
+ connection_set[story][connection_no] = Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=beam_set[story][connection_no],
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
i = 0
# For connection not satisfy the strong-column-weak beam -> upscale the column
- while not connection_set[target_story_index][target_connection_no].is_feasible['SCWB']: # Not feasible connection -> go into loop
+ while not connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['SCWB']: # Not feasible connection -> go into loop
# Determine which story should upscale
# If it is roof connection which does not satisfy SCWB, we can only upscale top story column
# because no column exists upper than roof.
@@ -436,125 +619,181 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
target_story = target_story_index
# If it is not roof connection: we need to see whether upper column is significantly smaller than lower column
# If that's the case, we should pick up the smaller upper column to upscale.
+ elif (
+ column_set[target_story_index + 1][target_connection_no].section[
+ 'Zx'
+ ]
+ < UPPER_LOWER_COLUMN_Zx
+ * column_set[target_story_index][target_connection_no].section['Zx']
+ ):
+ target_story = target_story_index + 1
else:
- if (column_set[target_story_index+1][target_connection_no].section['Zx']
- < UPPER_LOWER_COLUMN_Zx*column_set[target_story_index][target_connection_no].section['Zx']):
- target_story = target_story_index + 1
- else:
- target_story = target_story_index
+ target_story = target_story_index
# Upscale the unsatisfied column on the determined story
- if target_connection_no == 0 or target_connection_no == building_1.geometry['number of X bay']:
+ if (
+ target_connection_no == 0
+ or target_connection_no == building_1.geometry['number of X bay']
+ ):
type_column = 'exterior column'
else:
type_column = 'interior column'
building_1.upscale_column(target_story, type_column)
# Update modal period and seismic forces
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=True)
+ model_1 = ElasticAnalysis(
+ building_1, for_drift_only=False, for_period_only=True
+ )
building_1.read_modal_period()
building_1.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_1 = ElasticAnalysis(building_1, for_drift_only=False, for_period_only=False)
+ model_1 = ElasticAnalysis( # noqa: F841
+ building_1, for_drift_only=False, for_period_only=False
+ )
building_1.read_story_drift()
-
# **************************** Debug Using Only *************************************
i += 1
- print("Optimal member size after upscale column%i" % i)
- print("Exterior column:", building_1.member_size['exterior column'])
- print("Interior column:", building_1.member_size['interior column'])
- print("Beam:", building_1.member_size['beam'])
- print('After upscale column, current story drift is: ')
- print(building_1.elastic_response['story drift'] * 5.5 * 1.1 * 100)
+ print('Optimal member size after upscale column%i' % i) # noqa: T201
+ print('Exterior column:', building_1.member_size['exterior column']) # noqa: T201
+ print('Interior column:', building_1.member_size['interior column']) # noqa: T201
+ print('Beam:', building_1.member_size['beam']) # noqa: T201
+ print('After upscale column, current story drift is: ') # noqa: T201
+ print(building_1.elastic_response['story drift'] * 5.5 * 1.1 * 100) # noqa: T201
# **************************** Debug Ends Here **************************************
-
elastic_demand = ElasticOutput(building_1)
# Re-construct the column objects in the target_story (only update those revised by the previous algorithm)
for column_no in range(building_1.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand.dominate_load['column axial'][target_story, 2 * column_no])
- shear_demand = abs(elastic_demand.dominate_load['column shear'][target_story, 2 * column_no])
- moment_bottom = elastic_demand.dominate_load['column moment'][target_story, 2 * column_no]
- moment_top = elastic_demand.dominate_load['column moment'][target_story, 2 * column_no + 1]
- if column_no == 0 or column_no == building_1.geometry['number of X bay']:
+ axial_demand = abs(
+ elastic_demand.dominate_load['column axial'][
+ target_story, 2 * column_no
+ ]
+ )
+ shear_demand = abs(
+ elastic_demand.dominate_load['column shear'][
+ target_story, 2 * column_no
+ ]
+ )
+ moment_bottom = elastic_demand.dominate_load['column moment'][
+ target_story, 2 * column_no
+ ]
+ moment_top = elastic_demand.dominate_load['column moment'][
+ target_story, 2 * column_no + 1
+ ]
+ if (
+ column_no == 0
+ or column_no == building_1.geometry['number of X bay']
+ ):
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
building_1.geometry['floor height'][target_story + 1]
- - building_1.geometry['floor height'][target_story])
+ - building_1.geometry['floor height'][target_story]
+ )
# Build instance for each column member
- column_set[target_story][column_no] = Column(building_1.member_size[column_type][target_story],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
+ column_set[target_story][column_no] = Column(
+ building_1.member_size[column_type][target_story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Re-construct the connection-objects (only update the joint connections that the column connects)
- for story in range(target_story-1 >= 0, target_story+1):
- for connection_no in range(building_1.geometry['number of X bay'] + 1):
- dead_load = building_1.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_1.gravity_loads['beam live load'][story] # Unit: lb/ft
+ for story in range(target_story - 1 >= 0, target_story + 1):
+ for connection_no in range(
+ building_1.geometry['number of X bay'] + 1
+ ):
+ dead_load = building_1.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_1.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_1.geometry['X bay width'] # Unit: ft
if story != (building_1.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
elif connection_no == building_1.geometry['number of X bay']:
# The connection is an exterior joint
- connection_set[story][connection_no] = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
else:
# The connection is an interior joint
- connection_set[story][connection_no] = Connection('typical interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=beam_set[story][connection_no],
- top_column=column_set[story + 1][
- connection_no],
- bottom_column=column_set[story][
- connection_no])
- else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
- elif connection_no == building_1.geometry['number of X bay']:
- # The connection is an right top exterior joint
- connection_set[story][connection_no] = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
-
- else:
- # The connection is an top interior joint
- connection_set[story][connection_no] = Connection('top interior',
- steel, dead_load, live_load, span,
- left_beam=beam_set[story][connection_no],
- right_beam=beam_set[story][connection_no],
- top_column=None,
- bottom_column=column_set[story][
- connection_no])
+ connection_set[story][connection_no] = Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=beam_set[story][connection_no],
+ top_column=column_set[story + 1][connection_no],
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
+ elif connection_no == building_1.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
+ else:
+ # The connection is an top interior joint
+ connection_set[story][connection_no] = Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=beam_set[story][connection_no],
+ right_beam=beam_set[story][connection_no],
+ top_column=None,
+ bottom_column=column_set[story][connection_no],
+ )
# ********************************************************************
# /////// Revise Beam Member to Consider Constructability ////////////
@@ -571,220 +810,402 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
building_2.read_modal_period()
building_2.compute_seismic_force()
# Perform elastic analysis for member sizes after adjustment for constructability
- model_2 = ElasticAnalysis(building_2, for_drift_only=False, for_period_only=False)
+ model_2 = ElasticAnalysis(
+ building_2, for_drift_only=False, for_period_only=False
+ )
building_2.read_story_drift()
# Read elastic analysis results
elastic_demand_2 = ElasticOutput(building_2)
-
# Construct new beam objects after considering constructability
- construction_beam_set = [[0] * building_2.geometry['number of X bay']
- for story in range(building_2.geometry['number of story'])]
+ construction_beam_set = [
+ [0] * building_2.geometry['number of X bay']
+ for story in range(building_2.geometry['number of story'])
+ ]
for story in range(building_2.geometry['number of story']):
for bay in range(building_2.geometry['number of X bay']):
length = building_2.geometry['X bay width']
- shear_demand = abs(elastic_demand_2.dominate_load['beam shear'][story, 2 * bay])
- moment_left = elastic_demand_2.dominate_load['beam moment'][story, 2 * bay]
- moment_right = elastic_demand_2.dominate_load['beam moment'][story, 2 * bay + 1]
- construction_beam_set[story][bay] = Beam(building_2.member_size['beam'][story], length,
- shear_demand, moment_left, moment_right, steel)
+ shear_demand = abs(
+ elastic_demand_2.dominate_load['beam shear'][story, 2 * bay]
+ )
+ moment_left = elastic_demand_2.dominate_load['beam moment'][
+ story, 2 * bay
+ ]
+ moment_right = elastic_demand_2.dominate_load['beam moment'][
+ story, 2 * bay + 1
+ ]
+ construction_beam_set[story][bay] = Beam(
+ building_2.member_size['beam'][story],
+ length,
+ shear_demand,
+ moment_left,
+ moment_right,
+ steel,
+ )
# Check the flag of each beam (might not be necessary)
if not construction_beam_set[story][bay].check_flag():
- sys.stderr.write('Construction beam_%s%s is not feasible!!!\n' % (story, bay))
-
+ sys.stderr.write(
+ 'Construction beam_%s%s is not feasible!!!\n' % (story, bay) # noqa: UP031
+ )
# Construct new column objects after considering constructability
- construction_column_set = [[0] * (building_2.geometry['number of X bay'] + 1)
- for story in range(building_2.geometry['number of story'])]
+ construction_column_set = [
+ [0] * (building_2.geometry['number of X bay'] + 1)
+ for story in range(building_2.geometry['number of story'])
+ ]
for story in range(building_2.geometry['number of story']):
for column_no in range(building_2.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand_2.dominate_load['column axial'][story, 2 * column_no])
- shear_demand = abs(elastic_demand_2.dominate_load['column shear'][story, 2 * column_no])
- moment_bottom = elastic_demand_2.dominate_load['column moment'][story, 2 * column_no]
- moment_top = elastic_demand_2.dominate_load['column moment'][story, 2 * column_no + 1]
+ axial_demand = abs(
+ elastic_demand_2.dominate_load['column axial'][story, 2 * column_no]
+ )
+ shear_demand = abs(
+ elastic_demand_2.dominate_load['column shear'][story, 2 * column_no]
+ )
+ moment_bottom = elastic_demand_2.dominate_load['column moment'][
+ story, 2 * column_no
+ ]
+ moment_top = elastic_demand_2.dominate_load['column moment'][
+ story, 2 * column_no + 1
+ ]
if column_no == 0 or column_no == building_2.geometry['number of X bay']:
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
- building_2.geometry['floor height'][story + 1] - building_2.geometry['floor height'][story])
+ building_2.geometry['floor height'][story + 1]
+ - building_2.geometry['floor height'][story]
+ )
# Build instance for each column member
- construction_column_set[story][column_no] = Column(building_2.member_size[column_type][story],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
+ construction_column_set[story][column_no] = Column(
+ building_2.member_size[column_type][story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Check the flag of each column (May not be necessary)
if not construction_column_set[story][column_no].check_flag():
- sys.stderr.write('Construction column_%s%s is not feasible!!!\n' % (story, column_no))
-
+ sys.stderr.write(
+ 'Construction column_%s%s is not feasible!!!\n' # noqa: UP031
+ % (story, column_no)
+ )
# ********************************************************************
# /// Revise Column to SCWB after Adjusting Beam Constructability ///
# ********************************************************************
# Construct new connection objects after considering constructability
- construction_connection_set = [[0] * (building_2.geometry['number of X bay'] + 1)
- for story in range(building_2.geometry['number of story'])]
+ construction_connection_set = [
+ [0] * (building_2.geometry['number of X bay'] + 1)
+ for story in range(building_2.geometry['number of story'])
+ ]
not_feasible_construction_connection = []
for story in range(building_2.geometry['number of story']):
- for connection_no in range(building_2.geometry['number of X bay']+1):
- dead_load = building_2.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_2.gravity_loads['beam live load'][story] # Unit: lb/ft
+ for connection_no in range(building_2.geometry['number of X bay'] + 1):
+ dead_load = building_2.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_2.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_2.geometry['X bay width'] # Unit: ft
if story != (building_2.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][connection_no],
+ bottom_column=construction_column_set[story][connection_no],
+ )
elif connection_no == building_2.geometry['number of X bay']:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no-1],
- right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][connection_no],
+ bottom_column=construction_column_set[story][connection_no],
+ )
else:
# The connection is an interior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical interior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=construction_beam_set[story][connection_no],
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no - 1],
+ right_beam=construction_beam_set[story][connection_no],
+ top_column=construction_column_set[story + 1][connection_no],
+ bottom_column=construction_column_set[story][connection_no],
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ construction_connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][connection_no],
+ )
+ elif connection_no == building_2.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ construction_connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no - 1],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][connection_no],
+ )
else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- elif connection_no == building_2.geometry['number of X bay']:
- # The connection is an right top exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('top exterior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no-1],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- else:
- # The connection is an top interior joint
- construction_connection_set[story][connection_no] \
- = Connection('top interior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=construction_beam_set[story][connection_no],
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- if not construction_connection_set[story][connection_no].check_flag(): # (Might not be necessary)
- sys.stderr.write('Construction connection_%s%s is not feasible!!!\n' % (story, connection_no))
+ # The connection is an top interior joint
+ construction_connection_set[story][connection_no] = Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no],
+ right_beam=construction_beam_set[story][connection_no],
+ top_column=None,
+ bottom_column=construction_column_set[story][connection_no],
+ )
+ if not construction_connection_set[story][
+ connection_no
+ ].check_flag(): # (Might not be necessary)
+ sys.stderr.write(
+ 'Construction connection_%s%s is not feasible!!!\n' # noqa: UP031
+ % (story, connection_no)
+ )
not_feasible_construction_connection.append([story, connection_no])
# Revise column sizes for new construction connection because of SCWB
- for [target_story_index, target_connection_no] in not_feasible_construction_connection:
+ for [ # noqa: PLR1702
+ target_story_index,
+ target_connection_no,
+ ] in not_feasible_construction_connection:
# For connection not satisfy the geometry limit
- while not construction_connection_set[target_story_index][target_connection_no].is_feasible['geometry limits']:
+ while not construction_connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['geometry limits']:
# This would never be achieved as all beams and columns have been selected from a database that non-prequalified
# sizes have been removed.
pass
# For connection not satisfy the shear or flexural strength requirement -> upscale the beam
- while (not construction_connection_set[target_story_index][target_connection_no].is_feasible['shear strength']) \
- or (not construction_connection_set[target_story_index][target_connection_no].is_feasible['flexural strength']):
+ while (
+ not construction_connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['shear strength']
+ ) or (
+ not construction_connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['flexural strength']
+ ):
# Upscale the unsatisfied beam
building_2.upscale_beam(target_story_index)
# Update modal period and seismic forces
- model_2 = ElasticAnalysis(building_2, for_drift_only=False, for_period_only=True)
+ model_2 = ElasticAnalysis(
+ building_2, for_drift_only=False, for_period_only=True
+ )
building_2.read_modal_period()
building_2.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_2 = ElasticAnalysis(building_2, for_drift_only=False, for_period_only=False)
+ model_2 = ElasticAnalysis(
+ building_2, for_drift_only=False, for_period_only=False
+ )
building_2.read_story_drift()
elastic_demand_2 = ElasticOutput(building_2)
# Re-construct the beam objects (only for those revised by previous upscale activity)
for bay in range(building_2.geometry['number of X bay']):
length = building_2.geometry['X bay width']
- shear_demand = abs(elastic_demand_2.dominate_load['beam shear'][target_story_index, 2 * bay])
- moment_left = elastic_demand_2.dominate_load['beam moment'][target_story_index, 2 * bay]
- moment_right = elastic_demand_2.dominate_load['beam moment'][target_story_index, 2 * bay + 1]
- construction_beam_set[target_story_index][bay] = Beam(building_1.member_size['beam'][target_story_index],
- length, shear_demand,
- moment_left, moment_right, steel)
+ shear_demand = abs(
+ elastic_demand_2.dominate_load['beam shear'][
+ target_story_index, 2 * bay
+ ]
+ )
+ moment_left = elastic_demand_2.dominate_load['beam moment'][
+ target_story_index, 2 * bay
+ ]
+ moment_right = elastic_demand_2.dominate_load['beam moment'][
+ target_story_index, 2 * bay + 1
+ ]
+ construction_beam_set[target_story_index][bay] = Beam(
+ building_1.member_size['beam'][target_story_index],
+ length,
+ shear_demand,
+ moment_left,
+ moment_right,
+ steel,
+ )
# Re-construct the connection objects (only for those affected by updated beam object)
- for story in range(target_story_index, target_story_index+1):
- for connection_no in range(building_2.geometry['number of X bay'] + 1):
+ for story in range(target_story_index, target_story_index + 1):
+ for connection_no in range(
+ building_2.geometry['number of X bay'] + 1
+ ):
# for connection_no in range(1, 2):
- dead_load = building_2.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_2.gravity_loads['beam live load'][story] # Unit: lb/ft
+ dead_load = building_2.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_2.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_2.geometry['X bay width'] # Unit: ft
if story != (building_2.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no], right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no
+ ],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
elif connection_no == building_2.geometry['number of X bay']:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
else:
# The connection is an interior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical interior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=construction_beam_set[story][connection_no],
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
- else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('top exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- elif connection_no == building_2.geometry['number of X bay']:
- # The connection is an right top exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('top exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=construction_beam_set[story][
+ connection_no
+ ],
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no
+ ],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
+ elif connection_no == building_2.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
- else:
- # The connection is an top interior joint
- construction_connection_set[story][connection_no] \
- = Connection('top interior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=construction_beam_set[story][connection_no],
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
+ else:
+ # The connection is an top interior joint
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no
+ ],
+ right_beam=construction_beam_set[story][
+ connection_no
+ ],
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
i = 0
# For connection not satisfy the strong-column-weak beam -> upscale the column
- while not construction_connection_set[target_story_index][target_connection_no].is_feasible['SCWB']: # Not feasible connection -> go into loop
+ while not construction_connection_set[target_story_index][
+ target_connection_no
+ ].is_feasible['SCWB']: # Not feasible connection -> go into loop
# Determine which story should upscale
# If it is roof connection which does not satisfy SCWB, we can only upscale top story column
# because no column exists upper than roof.
@@ -792,117 +1213,222 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
target_story = target_story_index
# If it is not roof connection: we need to see whether upper column is significantly smaller than lower column
# If that's the case, we should pick up the smaller upper column to upscale.
+ elif (
+ column_set[target_story_index + 1][target_connection_no].section[
+ 'Zx'
+ ]
+ < UPPER_LOWER_COLUMN_Zx
+ * column_set[target_story_index][target_connection_no].section['Zx']
+ ):
+ target_story = target_story_index + 1
else:
- if (column_set[target_story_index+1][target_connection_no].section['Zx']
- < UPPER_LOWER_COLUMN_Zx*column_set[target_story_index][target_connection_no].section['Zx']):
- target_story = target_story_index + 1
- else:
- target_story = target_story_index
+ target_story = target_story_index
# Upscale the unsatisfied column on the determined story
- if target_connection_no == 0 or target_connection_no == building_2.geometry['number of X bay']:
+ if (
+ target_connection_no == 0
+ or target_connection_no == building_2.geometry['number of X bay']
+ ):
type_column = 'exterior column'
else:
type_column = 'interior column'
building_2.upscale_column(target_story, type_column)
# Update modal period and seismic forces
- model_2 = ElasticAnalysis(building_2, for_drift_only=False, for_period_only=True)
+ model_2 = ElasticAnalysis(
+ building_2, for_drift_only=False, for_period_only=True
+ )
building_2.read_modal_period()
building_2.compute_seismic_force()
# Re-perform the elastic analysis to obtain the updated demand
- model_2 = ElasticAnalysis(building_2, for_drift_only=False, for_period_only=False)
+ model_2 = ElasticAnalysis( # noqa: F841
+ building_2, for_drift_only=False, for_period_only=False
+ )
building_2.read_story_drift()
-
# **************************** Debug Using Only *************************************
i += 1
- print("Construction#1 member size after upscale column%i" % iteration)
- print("Exterior column:", building_1.member_size['exterior column'])
- print("Interior column:", building_1.member_size['interior column'])
- print("Beam:", building_1.member_size['beam'])
- print('After upscale column, current story drift is: ')
- print(building_1.elastic_response['story drift'] * 5.5 * 1.1 * 100)
+ print('Construction#1 member size after upscale column%i' % iteration) # noqa: T201
+ print('Exterior column:', building_1.member_size['exterior column']) # noqa: T201
+ print('Interior column:', building_1.member_size['interior column']) # noqa: T201
+ print('Beam:', building_1.member_size['beam']) # noqa: T201
+ print('After upscale column, current story drift is: ') # noqa: T201
+ print(building_1.elastic_response['story drift'] * 5.5 * 1.1 * 100) # noqa: T201
# **************************** Debug Ends Here **************************************
-
-
elastic_demand_2 = ElasticOutput(building_2)
# Re-construct the column objects in the target_story (only update those revised by the previous algorithm)
for column_no in range(building_2.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand_2.dominate_load['column axial'][target_story, 2 * column_no])
- shear_demand = abs(elastic_demand_2.dominate_load['column shear'][target_story, 2 * column_no])
- moment_bottom = elastic_demand_2.dominate_load['column moment'][target_story, 2 * column_no]
- moment_top = elastic_demand_2.dominate_load['column moment'][target_story, 2 * column_no + 1]
- if column_no == 0 or column_no == building_1.geometry['number of X bay']:
+ axial_demand = abs(
+ elastic_demand_2.dominate_load['column axial'][
+ target_story, 2 * column_no
+ ]
+ )
+ shear_demand = abs(
+ elastic_demand_2.dominate_load['column shear'][
+ target_story, 2 * column_no
+ ]
+ )
+ moment_bottom = elastic_demand_2.dominate_load['column moment'][
+ target_story, 2 * column_no
+ ]
+ moment_top = elastic_demand_2.dominate_load['column moment'][
+ target_story, 2 * column_no + 1
+ ]
+ if (
+ column_no == 0
+ or column_no == building_1.geometry['number of X bay']
+ ):
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
building_1.geometry['floor height'][target_story + 1]
- - building_1.geometry['floor height'][target_story])
+ - building_1.geometry['floor height'][target_story]
+ )
# Build instance for each column member
- construction_column_set[target_story][column_no] \
- = Column(building_2.member_size[column_type][target_story], axial_demand, shear_demand,
- moment_bottom, moment_top, length, length, steel)
+ construction_column_set[target_story][column_no] = Column(
+ building_2.member_size[column_type][target_story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Re-construct the connection-objects (only update the joint connections that the column connects)
- for story in range(target_story-1 >= 0, target_story+1):
- for connection_no in range(building_2.geometry['number of X bay'] + 1):
- dead_load = building_2.gravity_loads['beam dead load'][story] # Unit: lb/ft
- live_load = building_2.gravity_loads['beam live load'][story] # Unit: lb/ft
+ for story in range(target_story - 1 >= 0, target_story + 1):
+ for connection_no in range(
+ building_2.geometry['number of X bay'] + 1
+ ):
+ dead_load = building_2.gravity_loads['beam dead load'][
+ story
+ ] # Unit: lb/ft
+ live_load = building_2.gravity_loads['beam live load'][
+ story
+ ] # Unit: lb/ft
span = building_2.geometry['X bay width'] # Unit: ft
if story != (building_2.geometry['number of story'] - 1):
# The connection is not on roof
if connection_no == 0:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior',
- steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no
+ ],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
elif connection_no == building_2.geometry['number of X bay']:
# The connection is an exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=None,
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
else:
# The connection is an interior joint
- construction_connection_set[story][connection_no] \
- = Connection('typical interior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=construction_beam_set[story][connection_no],
- top_column=construction_column_set[story + 1][connection_no],
- bottom_column=construction_column_set[story][connection_no])
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'typical interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=construction_beam_set[story][
+ connection_no
+ ],
+ top_column=construction_column_set[story + 1][
+ connection_no
+ ],
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
+ elif connection_no == 0:
+ # The connection is an left top exterior joint
+ connection_set[story][connection_no] = Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][connection_no],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ elif connection_no == building_2.geometry['number of X bay']:
+ # The connection is an right top exterior joint
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'top exterior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no - 1
+ ],
+ right_beam=None,
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
else:
- # The connection is not on roof
- if connection_no == 0:
- # The connection is an left top exterior joint
- connection_set[story][connection_no] \
- = Connection('top exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- elif connection_no == building_2.geometry['number of X bay']:
- # The connection is an right top exterior joint
- construction_connection_set[story][connection_no] \
- = Connection('top exterior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no - 1],
- right_beam=None,
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
- else:
- # The connection is an top interior joint
- construction_connection_set[story][connection_no] \
- = Connection('top interior', steel, dead_load, live_load, span,
- left_beam=construction_beam_set[story][connection_no],
- right_beam=construction_beam_set[story][connection_no],
- top_column=None,
- bottom_column=construction_column_set[story][connection_no])
-
+ # The connection is an top interior joint
+ construction_connection_set[story][connection_no] = (
+ Connection(
+ 'top interior',
+ steel,
+ dead_load,
+ live_load,
+ span,
+ left_beam=construction_beam_set[story][
+ connection_no
+ ],
+ right_beam=construction_beam_set[story][
+ connection_no
+ ],
+ top_column=None,
+ bottom_column=construction_column_set[story][
+ connection_no
+ ],
+ )
+ )
# ********************************************************************
# ////////////// Revise Column to for Constructability //////////////
@@ -926,7 +1452,9 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
building_3.compute_seismic_force()
# Perform elastic analysis for construction sizes
- model_3 = ElasticAnalysis(building_3, for_drift_only=False, for_period_only=False)
+ model_3 = ElasticAnalysis( # noqa: F841
+ building_3, for_drift_only=False, for_period_only=False
+ )
building_3.read_story_drift()
# Obtain the elastic response
elastic_demand_3 = ElasticOutput(building_3)
@@ -934,54 +1462,95 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
# Re-create column after adjusting the column
for story in range(building_3.geometry['number of story']):
for column_no in range(building_3.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand_3.dominate_load['column axial'][story, 2 * column_no])
- shear_demand = abs(elastic_demand_3.dominate_load['column shear'][story, 2 * column_no])
- moment_bottom = elastic_demand_3.dominate_load['column moment'][story, 2 * column_no]
- moment_top = elastic_demand_3.dominate_load['column moment'][story, 2 * column_no + 1]
+ axial_demand = abs(
+ elastic_demand_3.dominate_load['column axial'][story, 2 * column_no]
+ )
+ shear_demand = abs(
+ elastic_demand_3.dominate_load['column shear'][story, 2 * column_no]
+ )
+ moment_bottom = elastic_demand_3.dominate_load['column moment'][
+ story, 2 * column_no
+ ]
+ moment_top = elastic_demand_3.dominate_load['column moment'][
+ story, 2 * column_no + 1
+ ]
if column_no == 0 or column_no == building_3.geometry['number of X bay']:
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
- building_3.geometry['floor height'][story + 1] - building_3.geometry['floor height'][story])
+ building_3.geometry['floor height'][story + 1]
+ - building_3.geometry['floor height'][story]
+ )
# Build instance for each column member
- construction_column_set[story][column_no] = Column(building_3.member_size[column_type][story],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
+ construction_column_set[story][column_no] = Column(
+ building_3.member_size[column_type][story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Check the flag of each column (May not be necessary)
if not construction_column_set[story][column_no].check_flag():
- sys.stderr.write('Construction column_%s%s is not feasible!!!\n' % (story, column_no))
+ sys.stderr.write(
+ 'Construction column_%s%s is not feasible!!!\n' # noqa: UP031
+ % (story, column_no)
+ )
# Re-create connection objects after adjusting column
for story in range(building_3.geometry['number of story']):
for column_no in range(building_3.geometry['number of X bay'] + 1):
- axial_demand = abs(elastic_demand_3.dominate_load['column axial'][story, 2 * column_no])
- shear_demand = abs(elastic_demand_3.dominate_load['column shear'][story, 2 * column_no])
- moment_bottom = elastic_demand_3.dominate_load['column moment'][story, 2 * column_no]
- moment_top = elastic_demand_3.dominate_load['column moment'][story, 2 * column_no + 1]
+ axial_demand = abs(
+ elastic_demand_3.dominate_load['column axial'][story, 2 * column_no]
+ )
+ shear_demand = abs(
+ elastic_demand_3.dominate_load['column shear'][story, 2 * column_no]
+ )
+ moment_bottom = elastic_demand_3.dominate_load['column moment'][
+ story, 2 * column_no
+ ]
+ moment_top = elastic_demand_3.dominate_load['column moment'][
+ story, 2 * column_no + 1
+ ]
if column_no == 0 or column_no == building_3.geometry['number of X bay']:
column_type = 'exterior column'
else:
column_type = 'interior column'
length = np.ndarray.item(
- building_3.geometry['floor height'][story + 1] - building_3.geometry['floor height'][story])
+ building_3.geometry['floor height'][story + 1]
+ - building_3.geometry['floor height'][story]
+ )
# Build instance for each column member
- construction_column_set[story][column_no] = Column(building_3.member_size[column_type][story],
- axial_demand, shear_demand, moment_bottom, moment_top,
- length, length, steel)
+ construction_column_set[story][column_no] = Column(
+ building_3.member_size[column_type][story],
+ axial_demand,
+ shear_demand,
+ moment_bottom,
+ moment_top,
+ length,
+ length,
+ steel,
+ )
# Check the flag of each column (May not be necessary)
if not construction_column_set[story][column_no].check_flag():
- sys.stderr.write('Construction column_%s%s is not feasible!!!\n' % (story, column_no))
+ sys.stderr.write(
+ 'Construction column_%s%s is not feasible!!!\n' # noqa: UP031
+ % (story, column_no)
+ )
# ********************************************************************
# //////////// Check Column Width Greater than Beam //////////////////
# ********************************************************************
- for story in range(0, building_3.geometry['number of story']):
- for col_no in range(0, building_3.geometry['number of X bay']+1):
- if construction_column_set[story][col_no].section['bf'] \
- < construction_beam_set[story][0].section['bf']:
- print("Column width in Story %i is less than beam" % (story))
-
+ for story in range(building_3.geometry['number of story']):
+ for col_no in range(building_3.geometry['number of X bay'] + 1):
+ if (
+ construction_column_set[story][col_no].section['bf']
+ < construction_beam_set[story][0].section['bf']
+ ):
+ print('Column width in Story %i is less than beam' % (story)) # noqa: T201
# ********************************************************************
# ///////////////// Store Design Results /////////////////////////////
@@ -990,57 +1559,78 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
# building_3: construction design results
# Create the directory for the design results if does not already exist
- Path(building_1.directory['building design results']).mkdir(parents=True, exist_ok=True)
+ Path(building_1.directory['building design results']).mkdir(
+ parents=True, exist_ok=True
+ )
# Change the working directory for the design results
os.chdir(building_1.directory['building design results'])
# Nonlinear model generation may require information for building, beam/column hinge, and panel zone thickness.
# Store the building class to "building.pkl"
- with open('optimal_building.pkl', 'wb') as output_file:
+ with open('optimal_building.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(building_1, output_file)
- with open('construction_building.pkl', 'wb') as output_file:
+ with open('construction_building.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(building_3, output_file)
# Store the beam set to "beam_set.pkl"
- with open('optimal_beam_set.pkl', 'wb') as output_file:
+ with open('optimal_beam_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(beam_set, output_file)
# Store the column set to "column_set.pkl"
- with open('optimal_column_set.pkl', 'wb') as output_file:
+ with open('optimal_column_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(column_set, output_file)
# Store the connection set to "connection_set.pkl"
- with open('optimal_connection_set.pkl', 'wb') as output_file:
+ with open('optimal_connection_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(connection_set, output_file)
# Store the construction beam set
- with open('construction_beam_set.pkl', 'wb') as output_file:
+ with open('construction_beam_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(construction_beam_set, output_file)
# Store the construction column set
- with open('construction_column_set.pkl', 'wb') as output_file:
+ with open('construction_column_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(construction_column_set, output_file)
- with open('construction_connection_set.pkl', 'wb') as output_file:
+ with open('construction_connection_set.pkl', 'wb') as output_file: # noqa: PTH123
pickle.dump(construction_connection_set, output_file)
-
# Store the member sizes and story drift into csv files.
- optimal_member_size = pd.DataFrame(columns=['exterior column', 'interior column', 'beam'])
- construction_size = pd.DataFrame(columns=['exterior column', 'interior column', 'beam'])
+ optimal_member_size = pd.DataFrame(
+ columns=['exterior column', 'interior column', 'beam']
+ )
+ construction_size = pd.DataFrame(
+ columns=['exterior column', 'interior column', 'beam']
+ )
optimal_drift = pd.DataFrame(columns=['story drift'])
construction_drift = pd.DataFrame(columns=['story drift'])
for story in range(building_1.geometry['number of story']):
- optimal_member_size.loc[story, 'exterior column'] = building_1.member_size['exterior column'][story]
- optimal_member_size.loc[story, 'interior column'] = building_1.member_size['interior column'][story]
- optimal_member_size.loc[story, 'beam'] = building_1.member_size['beam'][story]
- construction_size.loc[story, 'exterior column'] = building_3.construction_size['exterior column'][story]
- construction_size.loc[story, 'interior column'] = building_3.construction_size['interior column'][story]
- construction_size.loc[story, 'beam'] = building_3.construction_size['beam'][story]
- optimal_drift.loc[story, 'story drift'] = building_1.elastic_response['story drift'][story]
- construction_drift.loc[story, 'story drift'] = building_3.elastic_response['story drift'][story]
+ optimal_member_size.loc[story, 'exterior column'] = building_1.member_size[
+ 'exterior column'
+ ][story]
+ optimal_member_size.loc[story, 'interior column'] = building_1.member_size[
+ 'interior column'
+ ][story]
+ optimal_member_size.loc[story, 'beam'] = building_1.member_size['beam'][
+ story
+ ]
+ construction_size.loc[story, 'exterior column'] = (
+ building_3.construction_size['exterior column'][story]
+ )
+ construction_size.loc[story, 'interior column'] = (
+ building_3.construction_size['interior column'][story]
+ )
+ construction_size.loc[story, 'beam'] = building_3.construction_size['beam'][
+ story
+ ]
+ optimal_drift.loc[story, 'story drift'] = building_1.elastic_response[
+ 'story drift'
+ ][story]
+ construction_drift.loc[story, 'story drift'] = building_3.elastic_response[
+ 'story drift'
+ ][story]
optimal_member_size.to_csv('OptimalMemberSize.csv', sep=',', index=False)
construction_size.to_csv('ConstructionSize.csv', sep=',', index=False)
optimal_drift.to_csv('OptimalStoryDrift.csv', sep=',', index=False)
@@ -1048,24 +1638,30 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
# Store the doubler plate thickness
header = []
- for bay in range(building_1.geometry['number of X bay']+1):
- header.append('connection %s' % bay)
+ for bay in range(building_1.geometry['number of X bay'] + 1):
+ header.append('connection %s' % bay) # noqa: PERF401, UP031
# Initialize the dataframe to store doubler plate thickness
optimal_doubler_plate = pd.DataFrame(columns=header)
construction_doubler_plate = pd.DataFrame(columns=header)
# Fill this two dataframes
for row in range(building_1.geometry['number of story']):
- for col in range(building_1.geometry['number of X bay']+1):
+ for col in range(building_1.geometry['number of X bay'] + 1):
name = header[col]
- optimal_doubler_plate.loc[row, name] = connection_set[row][col].doubler_plate_thickness
- construction_doubler_plate.loc[row, name] = construction_connection_set[row][col].doubler_plate_thickness
+ optimal_doubler_plate.loc[row, name] = connection_set[row][
+ col
+ ].doubler_plate_thickness
+ construction_doubler_plate.loc[row, name] = construction_connection_set[
+ row
+ ][col].doubler_plate_thickness
optimal_doubler_plate.to_csv('OptimalDoublerPlate.csv', sep=',', index=False)
- construction_doubler_plate.to_csv('ConstructionDoublerPlate.csv', sep=',', index=False)
+ construction_doubler_plate.to_csv(
+ 'ConstructionDoublerPlate.csv', sep=',', index=False
+ )
# Store the strong column beam ratio
# Define the headers for the dataframe
header = []
- for bay in range(building_1.geometry['number of X bay']+1):
+ for bay in range(building_1.geometry['number of X bay'] + 1):
if bay == 0 or bay == building_1.geometry['number of X bay']:
header.append('exterior joint')
else:
@@ -1076,69 +1672,106 @@ def seismic_design(base_directory, pathDataFolder, workingDirectory):
construction_column_beam_ratio = pd.DataFrame(columns=header)
# Fill this dataframe
for row in range(building_1.geometry['number of story']):
- for col in range(building_1.geometry['number of X bay']+2):
+ for col in range(building_1.geometry['number of X bay'] + 2):
if col == 0 or col == building_1.geometry['number of X bay']:
name = 'exterior joint'
- elif col == building_1.geometry['number of X bay']+1:
+ elif col == building_1.geometry['number of X bay'] + 1:
name = 'design ratio'
else:
name = 'interior joint'
- if row == building_1.geometry['number of story']-1:
+ if row == building_1.geometry['number of story'] - 1:
optimal_column_beam_ratio.loc[row, name] = 'NA'
construction_column_beam_ratio.loc[row, name] = 'NA'
+ elif col != building_1.geometry['number of X bay'] + 1:
+ optimal_column_beam_ratio.loc[row, name] = (
+ connection_set[row][col].moment['Mpc']
+ / connection_set[row][col].moment['Mpb']
+ )
+ construction_column_beam_ratio.loc[row, name] = (
+ construction_connection_set[row][col].moment['Mpc']
+ / construction_connection_set[row][col].moment['Mpb']
+ )
else:
- if col != building_1.geometry['number of X bay']+1:
- optimal_column_beam_ratio.loc[row, name] = connection_set[row][col].moment['Mpc'] \
- / connection_set[row][col].moment['Mpb']
- construction_column_beam_ratio.loc[row, name] = construction_connection_set[row][col].moment['Mpc']\
- /construction_connection_set[row][col].moment['Mpb']
- else:
- optimal_column_beam_ratio.loc[row, name] = 1 / BEAM_TO_COLUMN_RATIO
- construction_column_beam_ratio.loc[row, name] = 1 / BEAM_TO_COLUMN_RATIO
- optimal_column_beam_ratio.to_csv('OptimalColumnBeamRatio.csv', sep=',', index=False)
- construction_column_beam_ratio.to_csv('ConstructionColumnBeamRatio.csv', sep=',', index=False)
-
+ optimal_column_beam_ratio.loc[row, name] = 1 / BEAM_TO_COLUMN_RATIO
+ construction_column_beam_ratio.loc[row, name] = (
+ 1 / BEAM_TO_COLUMN_RATIO
+ )
+ optimal_column_beam_ratio.to_csv(
+ 'OptimalColumnBeamRatio.csv', sep=',', index=False
+ )
+ construction_column_beam_ratio.to_csv(
+ 'ConstructionColumnBeamRatio.csv', sep=',', index=False
+ )
# Store the demand to capacity ratio for columns
# Define the headers for the columns DC ratio
header = []
- for bay in range(building_1.geometry['number of X bay']+1):
- header.extend(['column %s' % bay])
+ for bay in range(building_1.geometry['number of X bay'] + 1):
+ header.extend(['column %s' % bay]) # noqa: UP031
force_list = ['axial', 'shear', 'flexural']
for force in force_list:
- column_DC = [[0] * (building_1.geometry['number of X bay'] + 1)
- for story in range(building_1.geometry['number of story'])]
- construction_column_DC = [[0] * (building_1.geometry['number of X bay'] + 1)
- for story in range(building_1.geometry['number of story'])]
- for story in range(0, building_1.geometry['number of story']):
- for bay in range(0, building_1.geometry['number of X bay']+1):
- column_DC[story][bay] = column_set[story][bay].demand_capacity_ratio[force]
- construction_column_DC[story][bay] = construction_column_set[story][bay].demand_capacity_ratio[force]
+ column_DC = [ # noqa: N806
+ [0] * (building_1.geometry['number of X bay'] + 1)
+ for story in range(building_1.geometry['number of story'])
+ ]
+ construction_column_DC = [ # noqa: N806
+ [0] * (building_1.geometry['number of X bay'] + 1)
+ for story in range(building_1.geometry['number of story'])
+ ]
+ for story in range(building_1.geometry['number of story']):
+ for bay in range(building_1.geometry['number of X bay'] + 1):
+ column_DC[story][bay] = column_set[story][bay].demand_capacity_ratio[
+ force
+ ]
+ construction_column_DC[story][bay] = construction_column_set[story][
+ bay
+ ].demand_capacity_ratio[force]
file_name = 'OptimalColumn' + force[0].upper() + force[1:] + 'DCRatio.csv'
- (pd.DataFrame(columns=header, data=column_DC)).to_csv(file_name, sep=',', index=False)
- file_name = 'ConstructionColumn' + force[0].upper() + force[1:] + 'DCRatio.csv'
- (pd.DataFrame(columns=header, data=construction_column_DC).to_csv(file_name, sep=',', index=False))
-
+ (pd.DataFrame(columns=header, data=column_DC)).to_csv(
+ file_name, sep=',', index=False
+ )
+ file_name = (
+ 'ConstructionColumn' + force[0].upper() + force[1:] + 'DCRatio.csv'
+ )
+ (
+ pd.DataFrame(columns=header, data=construction_column_DC).to_csv(
+ file_name, sep=',', index=False
+ )
+ )
# Store the demand to capacity ratio for beams
# Define the headers for the beams DC ratio
header = []
for bay in range(building_1.geometry['number of X bay']):
- header.extend(['beam %s' % bay])
+ header.extend(['beam %s' % bay]) # noqa: UP031
force_list = ['shear', 'flexural']
for force in force_list:
- beam_DC = [[0] * (building_1.geometry['number of X bay'])
- for story in range(building_1.geometry['number of story'])]
- construction_beam_DC = [[0] * (building_1.geometry['number of X bay'])
- for story in range(building_1.geometry['number of story'])]
- for story in range(0, building_1.geometry['number of story']):
- for bay in range(0, building_1.geometry['number of X bay']):
- beam_DC[story][bay] = beam_set[story][bay].demand_capacity_ratio[force]
- construction_beam_DC[story][bay] = construction_beam_set[story][bay].demand_capacity_ratio[force]
- file_name = 'OptimalBeam' + force[0].upper() + force[1:] + 'DCRatio.csv'
- (pd.DataFrame(columns=header, data=beam_DC)).to_csv(file_name, sep=',', index=False)
+ beam_DC = [ # noqa: N806
+ [0] * (building_1.geometry['number of X bay'])
+ for story in range(building_1.geometry['number of story'])
+ ]
+ construction_beam_DC = [ # noqa: N806
+ [0] * (building_1.geometry['number of X bay'])
+ for story in range(building_1.geometry['number of story'])
+ ]
+ for story in range(building_1.geometry['number of story']):
+ for bay in range(building_1.geometry['number of X bay']):
+ beam_DC[story][bay] = beam_set[story][bay].demand_capacity_ratio[
+ force
+ ]
+ construction_beam_DC[story][bay] = construction_beam_set[story][
+ bay
+ ].demand_capacity_ratio[force]
+ file_name = 'OptimalBeam' + force[0].upper() + force[1:] + 'DCRatio.csv'
+ (pd.DataFrame(columns=header, data=beam_DC)).to_csv(
+ file_name, sep=',', index=False
+ )
file_name = 'ConstructionBeam' + force[0].upper() + force[1:] + 'DCRatio.csv'
- (pd.DataFrame(columns=header, data=construction_beam_DC).to_csv(file_name, sep=',', index=False))
+ (
+ pd.DataFrame(columns=header, data=construction_beam_DC).to_csv(
+ file_name, sep=',', index=False
+ )
+ )
# Go back to base directory
os.chdir(base_directory)
diff --git a/modules/createSAM/AutoSDA/steel_material.py b/modules/createSAM/AutoSDA/steel_material.py
index 81bce66a9..13bfabd55 100644
--- a/modules/createSAM/AutoSDA/steel_material.py
+++ b/modules/createSAM/AutoSDA/steel_material.py
@@ -1,4 +1,4 @@
-# This file is used to define the class of Building
+# This file is used to define the class of Building # noqa: CPY001, D100, INP001
# Developed by GUAN, XINGQUAN @ UCLA in June 2018
# Updated in Sept. 2018
@@ -8,22 +8,26 @@
# #########################################################################
-class SteelMaterial(object):
- """
- This class is used to define the steel material.
+class SteelMaterial:
+ """This class is used to define the steel material.
It includes the following physical quantities:
(1) Yield stress (Fy)
(2) Ultimate stress (Fu)
(3) Young's modulus (E)
(4) Ry value
- """
+ """ # noqa: D205, D400, D404
- def __init__(self, yield_stress=50, ultimate_stress=65, elastic_modulus=29000, Ry_value=1.1):
- """
- :param yield_stress: Fy of steel material, default value is 50 ksi
+ def __init__(
+ self,
+ yield_stress=50,
+ ultimate_stress=65,
+ elastic_modulus=29000,
+ Ry_value=1.1, # noqa: N803
+ ):
+ """:param yield_stress: Fy of steel material, default value is 50 ksi
:param elastic_modulus: E of steel material, default value is 29000 ksi
- """
+ """ # noqa: D205
self.Fy = yield_stress
self.Fu = ultimate_stress
self.E = elastic_modulus
- self.Ry = Ry_value
\ No newline at end of file
+ self.Ry = Ry_value
diff --git a/modules/createSAM/MDOF-LU/Building.cpp b/modules/createSAM/MDOF-LU/Building.cpp
index 273b213d5..9d88a860d 100644
--- a/modules/createSAM/MDOF-LU/Building.cpp
+++ b/modules/createSAM/MDOF-LU/Building.cpp
@@ -201,7 +201,7 @@ Building::readBIM(const char *event, const char *bim, const char *sam)
const char *seismicZoneType = NULL;
if (NULL == zType)
{
- std::cout << "No seismic zone specificed, Assuming seismic zone 4.\n";
+ std::cout << "No seismic zone specified, Assuming seismic zone 4.\n";
seismicZoneType = "Z4";
}
else
diff --git a/modules/createSAM/MDOF-LU/HazusSAM_Generator.cpp b/modules/createSAM/MDOF-LU/HazusSAM_Generator.cpp
index f0976f5a6..e7c19223c 100644
--- a/modules/createSAM/MDOF-LU/HazusSAM_Generator.cpp
+++ b/modules/createSAM/MDOF-LU/HazusSAM_Generator.cpp
@@ -217,7 +217,7 @@ double HazusSAM_Generator::GetAlpha1(int n)
double HazusSAM_Generator::GetAlpha2(int n)
{
- // calculate the hight coefficient for the first natural mode
+ // calculate the height coefficient for the first natural mode
// assuming shear deformation and the k and m are identical for each story.
if(n<=1)
return 1.0;
diff --git a/modules/createSAM/RCFIAP/RC_FIAP_main.py b/modules/createSAM/RCFIAP/RC_FIAP_main.py
index e6e522fc7..24a83b68b 100644
--- a/modules/createSAM/RCFIAP/RC_FIAP_main.py
+++ b/modules/createSAM/RCFIAP/RC_FIAP_main.py
@@ -1,59 +1,71 @@
-## ############################################################### ##
-## RC_FIAP (Reinforced Concrete Frame Inelastic Analysis Platform) ##
-## ##
-## Developed by: ##
-## Victor F. Ceballos (vceballos@uninorte.edu.co) ##
-## Carlos A. Arteta (carteta@uninorte.edu.co ##
-## RC_FIAP_main.py : this is the main script that calls ##
-## GUIFrameNonLinearACI.py : graphical environment ##
-## mplwidget.py : cript to help plot the plastic hinge projector ##
-## ############################################################### ##
+# ############################################################### ## # noqa: CPY001, D100, INP001
+# RC_FIAP (Reinforced Concrete Frame Inelastic Analysis Platform) ##
+# ##
+# Developed by: ##
+# Victor F. Ceballos (vceballos@uninorte.edu.co) ##
+# Carlos A. Arteta (carteta@uninorte.edu.co ##
+# RC_FIAP_main.py : this is the main script that calls ##
+# GUIFrameNonLinearACI.py : graphical environment ##
+# mplwidget.py : script to help plot the plastic hinge projector ##
+# ############################################################### ##
# Modified by Dr. Stevan Gavrilovic @ SimCenter, UC Berkeley
-import sys, os
-import argparse, json
-
-from math import pi, sqrt, ceil, floor
-from scipy import interpolate
+import argparse
+import json
+import os
+import sys
+from math import ceil, floor, pi, sqrt
-import openseespy.opensees as op
import numpy as np # load the numpy module, calling it np
-
+import openseespy.opensees as op
import pandas as pd
-import os
-from mpl_toolkits.axes_grid1 import make_axes_locatable
+from scipy import interpolate
# Definition of units
-m = 1. # define basic units -- output units
-kN = 1. # define basic units -- output units
-sec = 1. # define basic units -- output units
-mm = m / 1000. # define engineering units
-cm = m / 100.
-N = kN / 1000.
-MPa = N / mm ** 2
+m = 1.0 # define basic units -- output units
+kN = 1.0 # define basic units -- output units # noqa: N816
+sec = 1.0 # define basic units -- output units
+mm = m / 1000.0 # define engineering units
+cm = m / 100.0
+N = kN / 1000.0
+MPa = N / mm**2
GPa = MPa * 1000
-m2 = m ** 2 # m^2
-m3 = m ** 3 # m^3
-m4 = m ** 4 # m^4
+m2 = m**2 # m^2
+m3 = m**3 # m^3
+m4 = m**4 # m^4
inch = cm * 2.54
-ft = 12. * inch
-g = 9.81 * m / sec ** 2 # gravitational acceleration
+ft = 12.0 * inch
+g = 9.81 * m / sec**2 # gravitational acceleration
kip = 4.448 * kN
-ksi = kip / inch ** 2
-psi = ksi / 1000.
-lbf = psi * inch ** 2 # pounds force
-pcf = lbf / ft ** 3 # pounds per cubic foot
-psf = lbf / ft ** 3 # pounds per square foot
-in2 = inch ** 2 # inch^2
-in4 = inch ** 4 # inch^4
-GConc = 24. * kN / m ** 3 # Specific gravity of concrete
+ksi = kip / inch**2
+psi = ksi / 1000.0
+lbf = psi * inch**2 # pounds force
+pcf = lbf / ft**3 # pounds per cubic foot
+psf = lbf / ft**3 # pounds per square foot
+in2 = inch**2 # inch^2
+in4 = inch**4 # inch^4
+GConc = 24.0 * kN / m**3 # Specific gravity of concrete
cbar = False
np.set_printoptions(precision=6)
-class BeamElasticElement:
- def __init__(self, EleTag, Nod_ini, Nod_end, AEle, EcEle, IzEle, LEle, BEle, HEle, ElegTr, RZi, RZe):
+class BeamElasticElement: # noqa: D101
+ def __init__(
+ self,
+ EleTag, # noqa: N803
+ Nod_ini, # noqa: N803
+ Nod_end, # noqa: N803
+ AEle, # noqa: N803
+ EcEle, # noqa: N803
+ IzEle, # noqa: N803
+ LEle, # noqa: N803
+ BEle, # noqa: N803
+ HEle, # noqa: N803
+ ElegTr, # noqa: N803
+ RZi, # noqa: N803
+ RZe, # noqa: N803
+ ):
self.EleTag = EleTag
self.Nod_ini = Nod_ini
self.Nod_end = Nod_end
@@ -67,9 +79,36 @@ def __init__(self, EleTag, Nod_ini, Nod_end, AEle, EcEle, IzEle, LEle, BEle, HEl
self.RZi = RZi
self.RZe = RZe
-class BeamDesing:
- def __init__(self, EleTag, b, h, Ast1, dt1, Mn_n1, Asb1, db1, Mn_p1, ns1, ss1, Ast2, dt2, Mn_n2, Asb2, db2, Mn_p2,
- ns2, ss2, Nod_ini, Nod_end, db_t1, db_b1, db_t2, db_b2):
+
+class BeamDesing: # noqa: D101
+ def __init__( # noqa: PLR0913, PLR0917
+ self,
+ EleTag, # noqa: N803
+ b,
+ h,
+ Ast1, # noqa: N803
+ dt1,
+ Mn_n1, # noqa: N803
+ Asb1, # noqa: N803
+ db1,
+ Mn_p1, # noqa: N803
+ ns1,
+ ss1,
+ Ast2, # noqa: N803
+ dt2,
+ Mn_n2, # noqa: N803
+ Asb2, # noqa: N803
+ db2,
+ Mn_p2, # noqa: N803
+ ns2,
+ ss2,
+ Nod_ini, # noqa: N803
+ Nod_end, # noqa: N803
+ db_t1,
+ db_b1,
+ db_t2,
+ db_b2,
+ ):
self.EleTag = EleTag
self.b = b
self.h = h
@@ -97,9 +136,31 @@ def __init__(self, EleTag, b, h, Ast1, dt1, Mn_n1, Asb1, db1, Mn_p1, ns1, ss1, A
self.db_b2 = db_b2
-class ColDesing:
- def __init__(self, EleTag, b, h, nbH, nbB, db, As, Pu_v, Mu_v, fiPn, fiMn, Mn_i, d, dist, ro, Mu_i,
- sst, nsB, nsH, Nod_ini, Nod_end):
+class ColDesing: # noqa: D101
+ def __init__( # noqa: PLR0913, PLR0917
+ self,
+ EleTag, # noqa: N803
+ b,
+ h,
+ nbH, # noqa: N803
+ nbB, # noqa: N803
+ db,
+ As, # noqa: N803
+ Pu_v, # noqa: N803
+ Mu_v, # noqa: N803
+ fiPn, # noqa: N803
+ fiMn, # noqa: N803
+ Mn_i, # noqa: N803
+ d,
+ dist,
+ ro,
+ Mu_i, # noqa: N803
+ sst,
+ nsB, # noqa: N803
+ nsH, # noqa: N803
+ Nod_ini, # noqa: N803
+ Nod_end, # noqa: N803
+ ):
self.EleTag = EleTag
self.b = b
self.h = h
@@ -123,189 +184,191 @@ def __init__(self, EleTag, b, h, nbH, nbB, db, As, Pu_v, Mu_v, fiPn, fiMn, Mn_i,
self.Nod_end = Nod_end
-class DuctilityCurve:
- def __init__(self, xi, xe, yi, ye, CD_i, CD_e):
+class DuctilityCurve: # noqa: D101
+ def __init__(self, xi, xe, yi, ye, CD_i, CD_e): # noqa: N803
self.xi = xi
self.xe = xe
self.yi = yi
self.ye = ye
self.CD_i = CD_i
self.CD_e = CD_e
-
-class TclLogger:
+
+class TclLogger: # noqa: D101
def __init__(self):
- self.list_of_lines = ['# This is an autogenerated .tcl file from SimCenter workflow']
-
+ self.list_of_lines = [
+ '# This is an autogenerated .tcl file from SimCenter workflow'
+ ]
+
# Add a string line to the output file
- def add_line(self, line, addNewLine = True):
- if addNewLine == True:
- self.list_of_lines.append(line+'\n')
- else :
+ def add_line(self, line, addNewLine=True): # noqa: FBT002, N803, D102
+ if addNewLine == True: # noqa: E712
+ self.list_of_lines.append(line + '\n')
+ else:
self.list_of_lines.append(line)
-
+
# Convenience function to create a line from an array of inputs to openseespy function
- def add_array(self, line, addNewLine = True):
-
- outLine = ''
+ def add_array(self, line, addNewLine=True): # noqa: FBT002, N803, D102
+ outLine = '' # noqa: N806
for item in line:
- outLine += str(item) + ' '
-
-# # Remove the last space
-# outLine = outLine.rstrip()
-#
-# # Add the ; char to the end of the line
-# outLine += ';'
- self.add_line(outLine,addNewLine)
-
+ outLine += str(item) + ' ' # noqa: N806
+
+ # # Remove the last space
+ # outLine = outLine.rstrip()
+ #
+ # # Add the ; char to the end of the line
+ # outLine += ';'
+ self.add_line(outLine, addNewLine)
+
# Save the output file
- def save_as_file(self):
-
+ def save_as_file(self): # noqa: D102
# Get the current directory
- workingDirectory = os.getcwd()
-
- pathFile = os.path.join(workingDirectory, 'Model.tcl')
-
- if os.path.exists(pathFile):
- os.remove(pathFile)
-
- with open(pathFile, "a+") as file_object:
-
- appendEOL = False
+ workingDirectory = os.getcwd() # noqa: PTH109, N806
+
+ pathFile = os.path.join(workingDirectory, 'Model.tcl') # noqa: PTH118, N806
+
+ if os.path.exists(pathFile): # noqa: PTH110
+ os.remove(pathFile) # noqa: PTH107
+
+ with open(pathFile, 'a+') as file_object: # noqa: PLW1514, PTH123
+ appendEOL = False # noqa: N806
# Move read cursor to the start of file.
file_object.seek(0)
-
+
# Check if file is not empty
data = file_object.read(100)
-
+
if len(data) > 0:
- appendEOL = True
-
+ appendEOL = True # noqa: N806
+
# Iterate over each string in the list
for line in self.list_of_lines:
# If file is not empty then append '\n' before first line for
# other lines always append '\n' before appending line
- if appendEOL == True:
- file_object.write("\n")
+ if appendEOL == True: # noqa: E712
+ file_object.write('\n')
else:
- appendEOL = True
+ appendEOL = True # noqa: N806
# Append element at the end of file
file_object.write(line)
-
+
# print(self.list_of_lines)
-def runBuildingDesign(BIM_file, EVENT_file, SAM_file, getRV):
-
+def runBuildingDesign(BIM_file, EVENT_file, SAM_file, getRV): # noqa: ARG001, N802, N803, D103
# Get the current directory
- workingDirectory = os.getcwd()
+ workingDirectory = os.getcwd() # noqa: PTH109, N806, F841
- rootSIM = {}
+ rootSIM = {} # noqa: N806
# Try to open the BIM json
- with open(BIM_file, 'r', encoding='utf-8') as f:
- rootBIM = json.load(f)
+ with open(BIM_file, encoding='utf-8') as f: # noqa: PTH123
+ rootBIM = json.load(f) # noqa: N806
try:
- #rootSIM = rootBIM['StructuralInformation']
- rootSIM = rootBIM['Modeling']
+ # rootSIM = rootBIM['StructuralInformation']
+ rootSIM = rootBIM['Modeling'] # noqa: N806
# KZ: append simulation attribute
- rootSIM['Simulation'] = rootBIM.get('Simulation',None)
- except:
- raise ValueError("RC_FIAP - structural information missing")
-
+ rootSIM['Simulation'] = rootBIM.get('Simulation', None)
+ except: # noqa: E722
+ raise ValueError('RC_FIAP - structural information missing') # noqa: B904, EM101, TRY003
# Get the random variables from the input file
try:
- rootRV = rootBIM['randomVariables']
- except:
- raise ValueError("RC_FIAP - randomVariables section missing")
-
- RV_ARRAY = {}
+ rootRV = rootBIM['randomVariables'] # noqa: N806
+ except: # noqa: E722
+ raise ValueError('RC_FIAP - randomVariables section missing') # noqa: B904, EM101, TRY003
+
+ RV_ARRAY = {} # noqa: N806
# Populate the RV array with name/value pairs.
# If a random variable is used here, the RV array will contain its current value
for rv in rootRV:
# Try to get the name and value of the random variable
- rvName = rv['name']
- curVal = rv['value']
+ rvName = rv['name'] # noqa: N806
+ curVal = rv['value'] # noqa: N806
# Check if the current value a realization of a RV, i.e., is not a RV label
# If so, then set the current value as the mean
- if "RV" in str(curVal) :
- curVal = float(rv['mean'])
+ if 'RV' in str(curVal):
+ curVal = float(rv['mean']) # noqa: N806
RV_ARRAY[rvName] = curVal
-
# *********************** Design Starts Here *************************
- #if getRV == "False":
+ # if getRV == "False":
if getRV is False:
-
- print("Running seismic design in FIAP")
+ print('Running seismic design in FIAP') # noqa: T201
# Create the tcl output logger
- outputLogger = TclLogger()
-
- outputLogger.add_line('# Reinforced Concrete Frame Inelastic Analysis Platform (RCFIAP)',False)
- outputLogger.add_line('# Developed by Victor Ceballos & Carlos Arteta',False)
- outputLogger.add_line('# Modified by Stevan Gavrilovic - NHERI SimCenter for use in EE-UQ')
-
+ outputLogger = TclLogger() # noqa: N806
+
+ outputLogger.add_line(
+ '# Reinforced Concrete Frame Inelastic Analysis Platform (RCFIAP)',
+ False, # noqa: FBT003
+ )
+ outputLogger.add_line(
+ '# Developed by Victor Ceballos & Carlos Arteta',
+ False, # noqa: FBT003
+ )
+ outputLogger.add_line(
+ '# Modified by Stevan Gavrilovic - NHERI SimCenter for use in EE-UQ'
+ )
+
# Create a class object
- RCDes = RCFIAP()
-
- print("Starting seismic design")
-
+ RCDes = RCFIAP() # noqa: N806
+
+ print('Starting seismic design') # noqa: T201
+
# Run the building design
RCDes.Design(rootSIM)
-
- print("Creating nonlinear model")
+
+ print('Creating nonlinear model') # noqa: T201
# Run a pushover analysis - for testing to compare with original code
- doPushover = False
+ doPushover = False # noqa: N806
# Create the nonlinear model
- RCDes.CreateNLM(rootSIM,outputLogger,doPushover)
-
+ RCDes.CreateNLM(rootSIM, outputLogger, doPushover)
+
# Save the output file from the logger
outputLogger.save_as_file()
- if doPushover == True:
- print("Running pushover analysis")
+ if doPushover == True: # noqa: E712
+ print('Running pushover analysis') # noqa: T201
RCDes.Pushover(rootSIM)
-
# Now create the SAM file for export
- root_SAM = {}
+ root_SAM = {} # noqa: N806
root_SAM['mainScript'] = 'Model.tcl'
root_SAM['type'] = 'OpenSeesInput'
root_SAM['units'] = {
- "force": "kN",
- "length": "m",
- "temperature": "C",
- "time": "sec"
- }
-
+ 'force': 'kN',
+ 'length': 'm',
+ 'temperature': 'C',
+ 'time': 'sec',
+ }
+
# Number of dimensions
root_SAM['ndm'] = 2
# Number of degrees of freedom at each node
root_SAM['ndf'] = 3
-
+
# The number of stories
- vecHeights = rootSIM["VecStoryHeights"]
- vecHeights = vecHeights.split(',')
- vecHeights = np.array(vecHeights, dtype=float)
-
- numStories = len(vecHeights)
+ vecHeights = rootSIM['VecStoryHeights'] # noqa: N806
+ vecHeights = vecHeights.split(',') # noqa: N806
+ vecHeights = np.array(vecHeights, dtype=float) # noqa: N806
+
+ numStories = len(vecHeights) # noqa: N806
root_SAM['numStory'] = numStories
-
+
# The number of spans
- vecSpans = rootSIM["VecSpans"]
- vecSpans = vecSpans.split(',')
- vecSpans = np.array(vecSpans, dtype=float)
- numSpans = len(vecSpans)
-
+ vecSpans = rootSIM['VecSpans'] # noqa: N806
+ vecSpans = vecSpans.split(',') # noqa: N806
+ vecSpans = np.array(vecSpans, dtype=float) # noqa: N806
+ numSpans = len(vecSpans) # noqa: N806
+
# Get the node mapping
# Consider a structure with 3 stories and 2 spans
# Then the node numbering scheme is
@@ -320,343 +383,394 @@ def runBuildingDesign(BIM_file, EVENT_file, SAM_file, getRV):
# | | |
# #0 #1 #2
- clineOffset = 0
+ clineOffset = 0 # noqa: N806
if numSpans > 1:
- clineOffset = int(numSpans/2)
-
+ clineOffset = int(numSpans / 2) # noqa: N806
+
node_map = []
-
- # Using nodes on column #1 to calculate story drift
- for i in range(0, numStories+1):
- nodeTag = i*(numSpans+1)
+ # Using nodes on column #1 to calculate story drift
+ for i in range(numStories + 1):
+ nodeTag = i * (numSpans + 1) # noqa: N806
# Create the node and add it to the node mapping array
node_entry = {}
node_entry['node'] = nodeTag
node_entry['cline'] = 'response'
- node_entry['floor'] = '{}'.format(i)
+ node_entry['floor'] = f'{i}'
node_map.append(node_entry)
- ## KZ & AZ: Add centroid for roof drift
+ # KZ & AZ: Add centroid for roof drift
node_entry_c = {}
node_entry_c['node'] = nodeTag + clineOffset
node_entry_c['cline'] = 'centroid'
- node_entry_c['floor'] = '{}'.format(i)
+ node_entry_c['floor'] = f'{i}'
node_map.append(node_entry_c)
root_SAM['NodeMapping'] = node_map
- with open(SAM_file, 'w') as f:
+ with open(SAM_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(root_SAM, f, indent=2)
+
# Main functionality
-class RCFIAP:
- def Design(self, rootSIM):
- def __init__(rootSIM):
+class RCFIAP: # noqa: D101
+ def Design(self, rootSIM): # noqa: C901, D102, N802, N803, PLR0914, PLR0915
+ def __init__(rootSIM): # noqa: N803, N807
self.rootSIM = rootSIM
-
- global Loc_span, Loc_heigth, ListNodes, Elements, DataBeamDesing, DataColDesing, WDL, WLL, WDLS, Wtotal, cover
+
+ global Loc_span, Loc_heigth, ListNodes, Elements, DataBeamDesing, DataColDesing, WDL, WLL, WDLS, Wtotal, cover # noqa: PLW0603
# Function: Reads Beams design data from table that allows the user to modify the default design from TAB2 of GUI
def data_beams_table(self):
self.registros_beams = []
- for DB in DataBeamDesing:
+ for DB in DataBeamDesing: # noqa: N806
b = DB.b / cm
h = DB.h / cm
- L_As_top = DB.Ast1 / cm ** 2
- L_As_bot = DB.Asb1 / cm ** 2
- R_As_top = DB.Ast2 / cm ** 2
- R_As_bot = DB.Asb2 / cm ** 2
- L_Leg_n = DB.ns1
- R_Leg_n = DB.ns2
- L_Sstirrup = DB.ss1 / cm
- R_Sstirrup = DB.ss2 / cm
- registro = RegistroBeams(DB.EleTag, b, h, L_As_top, L_As_bot, L_Leg_n, L_Sstirrup, R_As_top, R_As_bot, R_Leg_n, R_Sstirrup)
+ L_As_top = DB.Ast1 / cm**2 # noqa: N806
+ L_As_bot = DB.Asb1 / cm**2 # noqa: N806
+ R_As_top = DB.Ast2 / cm**2 # noqa: N806
+ R_As_bot = DB.Asb2 / cm**2 # noqa: N806
+ L_Leg_n = DB.ns1 # noqa: N806
+ R_Leg_n = DB.ns2 # noqa: N806
+ L_Sstirrup = DB.ss1 / cm # noqa: N806
+ R_Sstirrup = DB.ss2 / cm # noqa: N806
+ registro = RegistroBeams( # noqa: F821
+ DB.EleTag,
+ b,
+ h,
+ L_As_top,
+ L_As_bot,
+ L_Leg_n,
+ L_Sstirrup,
+ R_As_top,
+ R_As_bot,
+ R_Leg_n,
+ R_Sstirrup,
+ )
self.registros_beams.append(registro)
# Function: Reads Columns design data from table that allows the user to modify the default design from TAB2 of GUI.
def data_columns_table(self):
self.registros_cols = []
-
- for DC in DataColDesing:
+
+ for DC in DataColDesing: # noqa: N806
b = DC.b / cm
h = DC.h / cm
db = DC.db / mm
- nbH = DC.nbH
- nbB = DC.nbB
- nsH = DC.nsH
- nsB = DC.nsB
+ nbH = DC.nbH # noqa: N806
+ nbB = DC.nbB # noqa: N806
+ nsH = DC.nsH # noqa: N806
+ nsB = DC.nsB # noqa: N806
sst = DC.sst / cm
- registro = RegistroColumns(DC.EleTag, b, h, db, nbH, nbB, nsH, nsB, sst)
+ registro = RegistroColumns( # noqa: F821
+ DC.EleTag, b, h, db, nbH, nbB, nsH, nsB, sst
+ )
self.registros_cols.append(registro)
# Compression block parameters beta as function f'c
def beta1(fc):
if fc <= 28 * MPa:
- Beta1 = 0.85
+ Beta1 = 0.85 # noqa: N806
else:
- Beta1 = max([0.85 - 0.05 * (fc - 28.) / 7., 0.65])
+ Beta1 = max([0.85 - 0.05 * (fc - 28.0) / 7.0, 0.65]) # noqa: N806
return Beta1
# Design load combinations
- def Combo_ACI(DL, LL, E):
- U1 = 1.2 * DL + 1.6 * LL
- U2 = 1.2 * DL + 1.0 * LL + 1.0 * E
- U3 = 1.2 * DL + 1.0 * LL - 1.0 * E
- U4 = 0.9 * DL + 1.0 * E
- U5 = 0.9 * DL - 1.0 * E
+ def Combo_ACI(DL, LL, E): # noqa: N802, N803
+ U1 = 1.2 * DL + 1.6 * LL # noqa: N806
+ U2 = 1.2 * DL + 1.0 * LL + 1.0 * E # noqa: N806
+ U3 = 1.2 * DL + 1.0 * LL - 1.0 * E # noqa: N806
+ U4 = 0.9 * DL + 1.0 * E # noqa: N806
+ U5 = 0.9 * DL - 1.0 * E # noqa: N806
return U1, U2, U3, U4, U5
# Flexural beams design
- def AsBeam(Mu, EleTag):
+ def AsBeam(Mu, EleTag): # noqa: N802, N803
b, h = BBeam, HBeam
- Mu = abs(Mu)
+ Mu = abs(Mu) # noqa: N806
db_v = np.array([4, 5, 6, 7, 8, 10])
for ndb in db_v:
- db = ndb / 8. * inch
+ db = ndb / 8.0 * inch
d = h - cover - dst - 0.5 * db
if Mu == 0.0:
ro_req = ro_min_b
else:
- ro_req = 0.85 * fcB / fy * (1. - sqrt(1. - 2. * (Mu / 0.9 / b / d ** 2) / 0.85 / fcB))
- if ro_req < ro_min_b:
- ro_req = ro_min_b
- As_req = ro_req * b * d
- Ab = pi * db ** 2 / 4.
- nb = max(2., ceil(As_req / Ab))
- As_con = nb * Ab
- slb = (b - 2 * cover - 2 * dst - nb * db) / (nb - 1.) # free clear bars
- if slb >= max(1. * inch, db):
+ ro_req = (
+ 0.85
+ * fcB
+ / fy
+ * (
+ 1.0
+ - sqrt(1.0 - 2.0 * (Mu / 0.9 / b / d**2) / 0.85 / fcB)
+ )
+ )
+ ro_req = max(ro_req, ro_min_b)
+ As_req = ro_req * b * d # noqa: N806
+ Ab = pi * db**2 / 4.0 # noqa: N806
+ nb = max(2.0, ceil(As_req / Ab))
+ As_con = nb * Ab # noqa: N806
+ slb = (b - 2 * cover - 2 * dst - nb * db) / (
+ nb - 1.0
+ ) # free clear bars
+ if slb >= max(1.0 * inch, db):
break
if ro_req > ro_max_b:
- print("Steel percentage greater than the maximum in Beam " + str(EleTag))
- if slb < min(1. * inch, db):
- print("Bar separation is not ok in Beam " + str(EleTag))
+ print( # noqa: T201
+ 'Steel percentage greater than the maximum in Beam '
+ + str(EleTag)
+ )
+ if slb < min(1.0 * inch, db):
+ print('Bar separation is not ok in Beam ' + str(EleTag)) # noqa: T201
a = fy * As_con / 0.85 / fcB / b
- Mn = fy * As_con * (d - a / 2.)
+ Mn = fy * As_con * (d - a / 2.0) # noqa: N806
return As_con, d, Mn, db
# Shear beams design
- def AvBeam(Vu, db, d, EleTag):
- Vc = 0.17 * sqrt(fcB / 1000.) * MPa * BBeam * d
- Vs = (Vu - 0.75 * Vc) / 0.75
- if Vs > 4. * Vc:
- print("reshape by shear in Beam " + str(EleTag))
- se_1 = min(d / 4., 8. * db, 24. * dst, 300. * mm)
+ def AvBeam(Vu, db, d, EleTag): # noqa: N802, N803
+ Vc = 0.17 * sqrt(fcB / 1000.0) * MPa * BBeam * d # noqa: N806
+ Vs = (Vu - 0.75 * Vc) / 0.75 # noqa: N806
+ if Vs > 4.0 * Vc:
+ print('reshape by shear in Beam ' + str(EleTag)) # noqa: T201
+ se_1 = min(d / 4.0, 8.0 * db, 24.0 * dst, 300.0 * mm)
nr_v = np.array([2, 3, 4]) # vector de numero de ramas
- if Vs <= 0.:
+ if Vs <= 0.0:
se = se_1
- nra = 2.
+ nra = 2.0
else:
for nra in nr_v:
- Ave = Ast * nra # area transversal del estribo
+ Ave = Ast * nra # area transversal del estribo # noqa: N806
se_2 = Ave * fy * d / Vs
se = min(se_1, se_2)
- if se >= 60. * mm:
+ if se >= 60.0 * mm:
break
se = floor(se / cm) * cm
- if se < 60. * mm:
- print("Stirrup spacing is less than 6 cm in beam " + str(EleTag))
+ if se < 60.0 * mm:
+ print('Stirrup spacing is less than 6 cm in beam ' + str(EleTag)) # noqa: T201
return nra, se
# Colmuns P-M design
- def AsColumn():
+ def AsColumn(): # noqa: C901, N802
verif = False
- while verif == False:
+ while verif == False: # noqa: E712, PLR1702
for ndb in db_v:
- db = ndb / 8. * inch
- Ab = pi * db ** 2. / 4.
+ db = ndb / 8.0 * inch
+ Ab = pi * db**2.0 / 4.0 # noqa: N806
dp = cover + dst + 0.5 * db
d = h - dp
- for nbH in nbH_v:
- for nbB in nbB_v:
- nbT = 2. * (nbB + nbH - 2.) # numero total de barras
- Ast = nbT * Ab
+ for nbH in nbH_v: # noqa: N806
+ for nbB in nbB_v: # noqa: N806
+ # numero total de barras
+ nbT = 2.0 * (nbB + nbH - 2.0) # noqa: N806
+ Ast = nbT * Ab # noqa: N806
ro = Ast / b / h
- As = np.hstack([nbB * Ab, np.ones(nbH - 2) * 2 * Ab, nbB * Ab])
+ As = np.hstack( # noqa: N806
+ [nbB * Ab, np.ones(nbH - 2) * 2 * Ab, nbB * Ab]
+ )
dist = np.linspace(dp, h - dp, nbH)
if ro >= ro_min:
- Pn_max = 0.80 * (0.85 * fcC * (b * h - Ast) + fy * Ast)
- Tn_max = -fy * Ast
+ Pn_max = 0.80 * ( # noqa: N806
+ 0.85 * fcC * (b * h - Ast) + fy * Ast
+ )
+ Tn_max = -fy * Ast # noqa: N806
c = np.linspace(1.1 * h / npts, 1.1 * h, npts)
a = Beta1C * c
- Pconc = 0.85 * fcC * a * b
- Mconc = Pconc * (h - a) / 2.
+ Pconc = 0.85 * fcC * a * b # noqa: N806
+ Mconc = Pconc * (h - a) / 2.0 # noqa: N806
et = ecu * (d - c) / c
fiv = np.copy(et)
- fiv = np.where(fiv >= 0.005, 0.9, fiv)
- fiv = np.where(fiv <= 0.002, 0.65, fiv)
- fiv = np.where((fiv > 0.002) & (fiv < 0.005), (0.65 + 0.25 * (fiv - 0.002) / 0.003),
- fiv)
+ fiv = np.where(fiv >= 0.005, 0.9, fiv) # noqa: PLR2004
+ fiv = np.where(fiv <= 0.002, 0.65, fiv) # noqa: PLR2004
+ fiv = np.where(
+ (fiv > 0.002) & (fiv < 0.005), # noqa: PLR2004
+ (0.65 + 0.25 * (fiv - 0.002) / 0.003),
+ fiv,
+ )
c = c[:, np.newaxis]
es = ecu * (c - dist) / c
fs = Es * es
fs = np.where(fs > fy, fy, fs)
fs = np.where(fs < -fy, -fy, fs)
- Pacer = np.sum(fs * As, axis=1)
- Macer = np.sum(fs * As * (h / 2. - dist), axis=1)
- Pn = np.hstack(
- [Tn_max, np.where(Pconc + Pacer > Pn_max, Pn_max, Pconc + Pacer), Pn_max])
- Mn = np.hstack([0, Mconc + Macer, 0])
+ Pacer = np.sum(fs * As, axis=1) # noqa: N806
+ Macer = np.sum(fs * As * (h / 2.0 - dist), axis=1) # noqa: N806
+ Pn = np.hstack( # noqa: N806
+ [
+ Tn_max,
+ np.where(
+ Pconc + Pacer > Pn_max,
+ Pn_max,
+ Pconc + Pacer,
+ ),
+ Pn_max,
+ ]
+ )
+ Mn = np.hstack([0, Mconc + Macer, 0]) # noqa: N806
fiv = np.hstack([0.9, fiv, 0.65])
- fiPn = fiv * Pn
- fiMn = fiv * Mn
+ fiPn = fiv * Pn # noqa: N806
+ fiMn = fiv * Mn # noqa: N806
if np.all((Pu_v >= min(fiPn)) & (Pu_v <= max(fiPn))):
- Mu_i = np.interp(Pu_v, fiPn, fiMn)
- Mn_i = np.interp(Pu_v, Pn, Mn)
- if np.all(Mu_i >= Mu_v) == True:
+ Mu_i = np.interp(Pu_v, fiPn, fiMn) # noqa: N806
+ Mn_i = np.interp(Pu_v, Pn, Mn) # noqa: N806
+ if np.all(Mu_i >= Mu_v) == True: # noqa: E712
verif = True
break
- if verif == True:
+ if verif == True: # noqa: E712
break
- if verif == True:
+ if verif == True: # noqa: E712
break
if ndb == db_v[-1] and ro > ro_max:
- print('column ' + str(EleTag) + 'needs to be resized by reinforcement ratio')
+ print( # noqa: T201
+ 'column '
+ + str(EleTag)
+ + 'needs to be resized by reinforcement ratio'
+ )
break
return nbH, nbB, db, As, fiPn, fiMn, Mn_i, d, dist, ro, Mu_i
# Shear columns design
- def AvColumn():
+ def AvColumn(): # noqa: N802
fiv = 0.75
- Ag = b * h
- se_1 = min(8. * db, b / 2., h / 2., 200. * mm) # separacion minima c.18.4.3.3 ACI-19
+ Ag = b * h # noqa: N806
+ se_1 = min(
+ 8.0 * db, b / 2.0, h / 2.0, 200.0 * mm
+ ) # separacion minima c.18.4.3.3 ACI-19
dp = cover + dst + db / 2
d = h - dp
- neH = floor(nbH / 2) + 1
- neB = floor(nbB / 2) + 1
+ neH = floor(nbH / 2) + 1 # noqa: N806
+ neB = floor(nbB / 2) + 1 # noqa: N806
- Ash_H = neH * Ast
- Ash_B = neB * Ast
+ Ash_H = neH * Ast # noqa: N806, F841
+ Ash_B = neB * Ast # noqa: N806
- Vc = (0.17 * sqrt(fcC * MPa) + Nu_min / 6 / Ag) * b * d
- Vs = (Vu - fiv * Vc) / fiv
+ Vc = (0.17 * sqrt(fcC * MPa) + Nu_min / 6 / Ag) * b * d # noqa: N806
+ Vs = (Vu - fiv * Vc) / fiv # noqa: N806
if Vs <= 1 / 3 * sqrt(fcC * MPa) * b * d:
- se_1 = se_1
+ se_1 = se_1 # noqa: PLW0127
elif Vs >= 1 / 3 * sqrt(fcC * MPa) * b * d:
se_1 = min(se_1, h / 4)
if Vs > 0.66 * sqrt(fcC * MPa) * b * d:
- print('Resize the column' + str(EleTag) + ' by shear ')
+ print('Resize the column' + str(EleTag) + ' by shear ') # noqa: T201
- if Vs <= 0.:
+ if Vs <= 0.0:
se = se_1
else:
- Ave = Ash_B # area transversal del estribo
+ Ave = Ash_B # area transversal del estribo # noqa: N806
se_2 = Ave * fy * d / Vs
se = min([se_1, se_2])
- if se < 60. * mm:
- print('Minimum spacing of stirrups is not met in column ' + str(EleTag))
+ if se < 60.0 * mm:
+ print( # noqa: T201
+ 'Minimum spacing of stirrups is not met in column ' + str(EleTag)
+ )
return se, neB, neH
# Input geometric, materials and seismic design parameters from TAB1 of GUI
-
+
# Lafg = float(self.ui.Lafg.text())
- Lafg = float(rootSIM["TribLengthGravity"])
-
+ Lafg = float(rootSIM['TribLengthGravity']) # noqa: N806
+
# Lafs = float(self.ui.Lafs.text())
- Lafs = float(rootSIM["TribLengthSeismic"])
+ Lafs = float(rootSIM['TribLengthSeismic']) # noqa: N806
# DL = float(self.ui.DL.text())
- DL = float(rootSIM["DeadLoad"])
-
+ DL = float(rootSIM['DeadLoad']) # noqa: N806
+
# LL = float(self.ui.LL.text())
- LL = float(rootSIM["LiveLoad"])
-
+ LL = float(rootSIM['LiveLoad']) # noqa: N806
+
# HColi = float(self.ui.HColi.text()) # Column inside Depth
- HColi = float(rootSIM["IntColDepth"])
-
+ HColi = float(rootSIM['IntColDepth']) # noqa: N806
+
# BColi = float(self.ui.BColi.text()) # Column inside Width
- BColi = float(rootSIM["IntColWidth"])
+ BColi = float(rootSIM['IntColWidth']) # noqa: N806
# HCole = float(self.ui.HCole.text()) # Column outside Depth
- HCole = float(rootSIM["ExtColDepth"])
+ HCole = float(rootSIM['ExtColDepth']) # noqa: N806
# BCole = float(self.ui.BCole.text()) # Column outside Width
- BCole = float(rootSIM["ExtColWidth"])
+ BCole = float(rootSIM['ExtColWidth']) # noqa: N806
# HBeam = float(self.ui.HBeam.text())
- HBeam = float(rootSIM["BeamDepth"])
-
+ HBeam = float(rootSIM['BeamDepth']) # noqa: N806
+
# BBeam = float(self.ui.BBeam.text())
- BBeam = float(rootSIM["BeamWidth"])
+ BBeam = float(rootSIM['BeamWidth']) # noqa: N806
# IFC = float(self.ui.InertiaColumnsFactor.text())
- IFC = float(rootSIM["ColIg"])
-
+ IFC = float(rootSIM['ColIg']) # noqa: N806
+
# IFB = float(self.ui.InertiaBeamsFactor.text())
- IFB = float(rootSIM["BeamIg"])
-
+ IFB = float(rootSIM['BeamIg']) # noqa: N806
+
# heigth_v = self.ui.heigth_v.text()
- heigth_v = rootSIM["VecStoryHeights"]
+ heigth_v = rootSIM['VecStoryHeights']
heigth_v = heigth_v.split(',')
heigth_v = np.array(heigth_v, dtype=float)
-
+
# span_v = self.ui.span_v.text()
- span_v = rootSIM["VecSpans"]
+ span_v = rootSIM['VecSpans']
span_v = span_v.split(',')
span_v = np.array(span_v, dtype=float)
-
+
# fy = float(self.ui.fy.text()) * MPa
- fy = float(rootSIM["FySteel"]) * MPa
-
+ fy = float(rootSIM['FySteel']) * MPa
+
# fcB = float(self.ui.fcB.text()) * MPa
- fcB = float(rootSIM["BeamFpc"]) * MPa
+ fcB = float(rootSIM['BeamFpc']) * MPa # noqa: N806
# fcC = float(self.ui.fcC.text()) * MPa
- fcC = float(rootSIM["ColFpc"]) * MPa
+ fcC = float(rootSIM['ColFpc']) * MPa # noqa: N806
# R = float(self.ui.R.text())
- R = float(rootSIM["RParam"])
+ R = float(rootSIM['RParam']) # noqa: N806
# Cd = float(self.ui.Cd.text())
- Cd = float(rootSIM["CdParam"])
+ Cd = float(rootSIM['CdParam']) # noqa: N806, F841
# Omo = float(self.ui.Omo.text())
- Omo = float(rootSIM["OmegaParam"])
+ Omo = float(rootSIM['OmegaParam']) # noqa: N806
# Sds = float(self.ui.Sds.text())
- Sds = float(rootSIM["SDSParam"])
+ Sds = float(rootSIM['SDSParam']) # noqa: N806
# Sd1 = float(self.ui.Sd1.text())
- Sd1 = float(rootSIM["SD1Param"])
+ Sd1 = float(rootSIM['SD1Param']) # noqa: N806
# Tl = float(self.ui.Tl.text())
- Tl = float(rootSIM["TLParam"])
+ Tl = float(rootSIM['TLParam']) # noqa: N806
WDL = Lafg * DL
WDLS = Lafs * DL
WLL = Lafg * LL
-
-# print("heigth_v: ")
-# print(heigth_v)
-# print("span_v: ")
-# print(span_v)
-# print("Lafg: "+str(Lafg))
-# print("Lafs: "+str(Lafs))
-# print("DL: "+str(DL))
-# print("LL: "+str(LL))
-# print("HColi: "+str(HColi))
-# print("BColi: "+str(BColi))
-# print("HCole: "+str(HCole))
-# print("BCole: "+str(BCole))
-# print("HBeam: "+str(HBeam))
-# print("BBeam: "+str(BBeam))
-# print("IFC: "+str(IFC))
-# print("IFB: "+str(IFB))
- print("********************fy: ",fy)
-# print("fcB: "+str(fcB))
-# print("fcC: "+str(fcC))
-# print("R: "+str(R))
-# print("Cd: "+str(Cd))
-# print("Omo: "+str(Omo))
-# print("Sds: "+str(Sds))
-# print("Sd1: "+str(Sd1))
-# print("Tl: "+str(Tl))
+
+ # print("heigth_v: ")
+ # print(heigth_v)
+ # print("span_v: ")
+ # print(span_v)
+ # print("Lafg: "+str(Lafg))
+ # print("Lafs: "+str(Lafs))
+ # print("DL: "+str(DL))
+ # print("LL: "+str(LL))
+ # print("HColi: "+str(HColi))
+ # print("BColi: "+str(BColi))
+ # print("HCole: "+str(HCole))
+ # print("BCole: "+str(BCole))
+ # print("HBeam: "+str(HBeam))
+ # print("BBeam: "+str(BBeam))
+ # print("IFC: "+str(IFC))
+ # print("IFB: "+str(IFB))
+ print('********************fy: ', fy) # noqa: T201
+ # print("fcB: "+str(fcB))
+ # print("fcC: "+str(fcC))
+ # print("R: "+str(R))
+ # print("Cd: "+str(Cd))
+ # print("Omo: "+str(Omo))
+ # print("Sds: "+str(Sds))
+ # print("Sd1: "+str(Sd1))
+ # print("Tl: "+str(Tl))
# plt.close('all')
op.wipe()
@@ -671,110 +785,173 @@ def AvColumn():
yn_vf = np.ravel(yn_v)
num_nodes = len(Loc_span) * len(Loc_heigth)
ListNodes = np.empty([num_nodes, 3])
- nodeTag = 0
- for (xn, yn) in zip(xn_vf, yn_vf):
+ nodeTag = 0 # noqa: N806
+ for xn, yn in zip(xn_vf, yn_vf):
ListNodes[nodeTag, :] = [nodeTag, xn, yn]
op.node(nodeTag, xn, yn)
- if yn == 0.:
+ if yn == 0.0:
op.fix(nodeTag, 1, 1, 1)
- nodeTag += 1
+ nodeTag += 1 # noqa: SIM113, N806
for node in ListNodes:
- if node[2] > 0. and node[1] == 0.:
- MasterNode = node[0]
- if node[2] > 0. and node[1] != 0.:
+ if node[2] > 0.0 and node[1] == 0.0:
+ MasterNode = node[0] # noqa: N806
+ if node[2] > 0.0 and node[1] != 0.0:
op.equalDOF(int(MasterNode), int(node[0]), 1)
- ListNodesDrift = ListNodes[np.where(ListNodes[:, 1] == 0.)]
- MassType = "-lMass" # -lMass, -cMass
+ ListNodesDrift = ListNodes[np.where(ListNodes[:, 1] == 0.0)] # noqa: N806
+ MassType = '-lMass' # -lMass, -cMass # noqa: N806
# Columns creation for elastic analysis
op.geomTransf('Linear', 1, '-jntOffset', 0, 0, 0, -HBeam / 2)
op.geomTransf('Linear', 2, '-jntOffset', 0, HBeam / 2, 0, -HBeam / 2)
- AColi = BColi * HColi # cross-sectional area
- ACole = BCole * HCole # cross-sectional area
- EcC = 4700 * sqrt(fcC * MPa)
- IzColi = 1. / 12. * BColi * HColi ** 3 # Column moment of inertia
- IzCole = 1. / 12. * BCole * HCole ** 3 # Column moment of inertia
- EleTag = 1
+ AColi = BColi * HColi # cross-sectional area # noqa: N806
+ ACole = BCole * HCole # cross-sectional area # noqa: N806
+ EcC = 4700 * sqrt(fcC * MPa) # noqa: N806
+ # Column moment of inertia
+ IzColi = 1.0 / 12.0 * BColi * HColi**3 # noqa: N806
+ # Column moment of inertia
+ IzCole = 1.0 / 12.0 * BCole * HCole**3 # noqa: N806
+ EleTag = 1 # noqa: N806
Elements = []
- for Nod_ini in range(num_nodes):
+ for Nod_ini in range(num_nodes): # noqa: N806
if ListNodes[Nod_ini, 2] != Loc_heigth[-1]:
- Nod_end = Nod_ini + n_col_axes
- if ListNodes[Nod_ini, 2] == 0.:
- gTr = 1
- RZi = 0
- RZe = HBeam / 2
- LCol = ListNodes[Nod_end, 2] - ListNodes[Nod_ini, 2] - RZi - RZe
+ Nod_end = Nod_ini + n_col_axes # noqa: N806
+ if ListNodes[Nod_ini, 2] == 0.0:
+ gTr = 1 # noqa: N806
+ RZi = 0 # noqa: N806
+ RZe = HBeam / 2 # noqa: N806
+ LCol = ListNodes[Nod_end, 2] - ListNodes[Nod_ini, 2] - RZi - RZe # noqa: N806
else:
- gTr = 2
- RZi = HBeam / 2
- RZe = HBeam / 2
- LCol = ListNodes[Nod_end, 2] - ListNodes[Nod_ini, 2] - RZi - RZe
- if ListNodes[Nod_ini, 1] == 0. or ListNodes[Nod_ini, 1] == Loc_span[-1]:
- BCol, HCol = BCole, HCole
- ACol = ACole
- IzCol = IFC * IzCole
+ gTr = 2 # noqa: N806
+ RZi = HBeam / 2 # noqa: N806
+ RZe = HBeam / 2 # noqa: N806
+ LCol = ListNodes[Nod_end, 2] - ListNodes[Nod_ini, 2] - RZi - RZe # noqa: N806
+ if (
+ ListNodes[Nod_ini, 1] == 0.0
+ or ListNodes[Nod_ini, 1] == Loc_span[-1]
+ ):
+ BCol, HCol = BCole, HCole # noqa: N806
+ ACol = ACole # noqa: N806
+ IzCol = IFC * IzCole # noqa: N806
else:
- BCol, HCol = BColi, HColi
- ACol = AColi
- IzCol = IFC * IzColi
- MassDens = ACol * GConc / g
- Elements.append(BeamElasticElement(EleTag, Nod_ini, Nod_end, ACol, EcC, IzCol, LCol, BCol, HCol, gTr,
- RZi, RZe))
- op.element('elasticBeamColumn', EleTag, Nod_ini, Nod_end, ACol, EcC, IzCol, gTr, '-mass', MassDens,
- MassType)
- EleTag += 1
+ BCol, HCol = BColi, HColi # noqa: N806
+ ACol = AColi # noqa: N806
+ IzCol = IFC * IzColi # noqa: N806
+ MassDens = ACol * GConc / g # noqa: N806
+ Elements.append(
+ BeamElasticElement(
+ EleTag,
+ Nod_ini,
+ Nod_end,
+ ACol,
+ EcC,
+ IzCol,
+ LCol,
+ BCol,
+ HCol,
+ gTr,
+ RZi,
+ RZe,
+ )
+ )
+ op.element(
+ 'elasticBeamColumn',
+ EleTag,
+ Nod_ini,
+ Nod_end,
+ ACol,
+ EcC,
+ IzCol,
+ gTr,
+ '-mass',
+ MassDens,
+ MassType,
+ )
+ EleTag += 1 # noqa: N806
num_cols = EleTag
# Beams creation for elastic analysis
- op.geomTransf('Linear', 3, '-jntOffset', HColi / 2., 0, -HColi / 2., 0)
- op.geomTransf('Linear', 4, '-jntOffset', HCole / 2., 0, -HColi / 2., 0)
- op.geomTransf('Linear', 5, '-jntOffset', HColi / 2., 0, -HCole / 2., 0)
- ABeam = BBeam * HBeam
- EcB = 4700 * sqrt(fcB * MPa)
- IzBeam = IFB * BBeam * HBeam ** 3 / 12
- MassDens = ABeam * GConc / g + WDLS / g
- for Nod_ini in range(num_nodes):
- if ListNodes[Nod_ini, 1] != Loc_span[-1] and ListNodes[Nod_ini, 2] != 0.:
- Nod_end = Nod_ini + 1
- if ListNodes[Nod_ini, 1] == 0.:
- gTr = 4
- RZi = HCole / 2.
- RZe = HColi / 2.
- LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe
+ op.geomTransf('Linear', 3, '-jntOffset', HColi / 2.0, 0, -HColi / 2.0, 0)
+ op.geomTransf('Linear', 4, '-jntOffset', HCole / 2.0, 0, -HColi / 2.0, 0)
+ op.geomTransf('Linear', 5, '-jntOffset', HColi / 2.0, 0, -HCole / 2.0, 0)
+ ABeam = BBeam * HBeam # noqa: N806
+ EcB = 4700 * sqrt(fcB * MPa) # noqa: N806
+ IzBeam = IFB * BBeam * HBeam**3 / 12 # noqa: N806
+ MassDens = ABeam * GConc / g + WDLS / g # noqa: N806
+ for Nod_ini in range(num_nodes): # noqa: N806
+ if (
+ ListNodes[Nod_ini, 1] != Loc_span[-1]
+ and ListNodes[Nod_ini, 2] != 0.0
+ ):
+ Nod_end = Nod_ini + 1 # noqa: N806
+ if ListNodes[Nod_ini, 1] == 0.0:
+ gTr = 4 # noqa: N806
+ RZi = HCole / 2.0 # noqa: N806
+ RZe = HColi / 2.0 # noqa: N806
+ LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe # noqa: N806
elif ListNodes[Nod_ini, 1] == Loc_span[-2]:
- gTr = 5
- RZi = HColi / 2.
- RZe = HCole / 2.
- LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe
+ gTr = 5 # noqa: N806
+ RZi = HColi / 2.0 # noqa: N806
+ RZe = HCole / 2.0 # noqa: N806
+ LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe # noqa: N806
else:
- gTr = 3
- RZi = HColi / 2.
- RZe = HColi / 2.
- LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe
- Elements.append(BeamElasticElement(EleTag, Nod_ini, Nod_end, ABeam, EcB, IzBeam, LBeam, BBeam, HBeam,
- gTr, RZi, RZe))
- op.element('elasticBeamColumn', EleTag, Nod_ini, Nod_end, ABeam, EcB, IzBeam, gTr,
- '-mass', MassDens, MassType)
- EleTag += 1
+ gTr = 3 # noqa: N806
+ RZi = HColi / 2.0 # noqa: N806
+ RZe = HColi / 2.0 # noqa: N806
+ LBeam = ListNodes[Nod_end, 1] - ListNodes[Nod_ini, 1] - RZi - RZe # noqa: N806
+ Elements.append(
+ BeamElasticElement(
+ EleTag,
+ Nod_ini,
+ Nod_end,
+ ABeam,
+ EcB,
+ IzBeam,
+ LBeam,
+ BBeam,
+ HBeam,
+ gTr,
+ RZi,
+ RZe,
+ )
+ )
+ op.element(
+ 'elasticBeamColumn',
+ EleTag,
+ Nod_ini,
+ Nod_end,
+ ABeam,
+ EcB,
+ IzBeam,
+ gTr,
+ '-mass',
+ MassDens,
+ MassType,
+ )
+ EleTag += 1 # noqa: N806
num_elems = EleTag
- num_beams = num_elems - num_cols
+ num_beams = num_elems - num_cols # noqa: F841
# Create a Plain load pattern for gravity loading with a Linear TimeSeries
- Pvig = ABeam * GConc
- PColi = AColi * GConc
- PCole = ACole * GConc
+ Pvig = ABeam * GConc # noqa: N806
+ PColi = AColi * GConc # noqa: N806
+ PCole = ACole * GConc # noqa: N806
op.timeSeries('Linear', 1)
op.pattern('Plain', 1, 1)
- for Element in Elements:
+ for Element in Elements: # noqa: N806
if ListNodes[Element.Nod_ini, 1] == ListNodes[Element.Nod_end, 1]:
- if ListNodes[Element.Nod_ini, 1] == 0. or ListNodes[Element.Nod_ini, 1] == Loc_span[-1]:
- PCol = PCole
+ if (
+ ListNodes[Element.Nod_ini, 1] == 0.0
+ or ListNodes[Element.Nod_ini, 1] == Loc_span[-1]
+ ):
+ PCol = PCole # noqa: N806
else:
- PCol = PColi
+ PCol = PColi # noqa: N806
op.eleLoad('-ele', Element.EleTag, '-type', '-beamUniform', 0, -PCol)
if ListNodes[Element.Nod_ini, 2] == ListNodes[Element.Nod_end, 2]:
- op.eleLoad('-ele', Element.EleTag, '-type', '-beamUniform', -Pvig - WDL)
+ op.eleLoad(
+ '-ele', Element.EleTag, '-type', '-beamUniform', -Pvig - WDL
+ )
op.system('UmfPack')
op.numberer('Plain')
@@ -783,295 +960,395 @@ def AvColumn():
op.algorithm('Linear')
op.analysis('Static')
op.analyze(1)
- ElemnsForceD = []
- for Element in Elements:
- Forces = op.eleForce(Element.EleTag)
+ ElemnsForceD = [] # noqa: N806
+ for Element in Elements: # noqa: N806
+ Forces = op.eleForce(Element.EleTag) # noqa: N806
Forces.insert(0, Element.EleTag)
ElemnsForceD.append(Forces)
- ElemnsForceD = np.array(ElemnsForceD)
- Wtotal = np.sum(ElemnsForceD[:len(Loc_span), 2]) * Lafs / Lafg
+ ElemnsForceD = np.array(ElemnsForceD) # noqa: N806
+ Wtotal = np.sum(ElemnsForceD[: len(Loc_span), 2]) * Lafs / Lafg
op.loadConst('-time', 0.0)
op.timeSeries('Linear', 2)
op.pattern('Plain', 2, 1)
- for Element in Elements:
+ for Element in Elements: # noqa: N806
if ListNodes[Element.Nod_ini, 2] == ListNodes[Element.Nod_end, 2]:
op.eleLoad('-ele', Element.EleTag, '-type', '-beamUniform', -WLL)
op.analyze(1)
# Frame Geometry plot
-# self.ui.DataFrame.canvas.axes.clear()
-# self.ui.DataFrame.canvas.axes.plot(ListNodes[:, 1], ListNodes[:, 2], 'ks')
-#
-# self.ui.DataFrame.canvas.axes.axis('off')
-# for Ele in Elements:
-# xi = ListNodes[Ele.Nod_ini, 1]
-# yi = ListNodes[Ele.Nod_ini, 2]
-# xe = ListNodes[Ele.Nod_end, 1]
-# ye = ListNodes[Ele.Nod_end, 2]
-# self.ui.DataFrame.canvas.axes.plot([xi, xe], [yi, ye], 'k-', alpha=.3)
-# if xi == xe:
-# self.ui.DataFrame.canvas.axes.text(xi, (ye + yi) / 2, r'C{}'.format(Ele.EleTag), style='italic',
-# fontsize=8,
-# rotation='vertical', verticalalignment='center')
-# if yi == ye:
-# self.ui.DataFrame.canvas.axes.text((xe + xi) / 2, yi, r'B{}'.format(Ele.EleTag), style='italic',
-# fontsize=8,
-# horizontalalignment='center')
-# self.ui.DataFrame.canvas.axes.axis('equal')
-# self.ui.DataFrame.canvas.draw()
-# self.ui.DataFrame.canvas.show()
-
- ElemnsForceDL = []
- for Element in Elements:
- Forces = op.eleForce(Element.EleTag)
+ # self.ui.DataFrame.canvas.axes.clear()
+ # self.ui.DataFrame.canvas.axes.plot(ListNodes[:, 1], ListNodes[:, 2], 'ks')
+ #
+ # self.ui.DataFrame.canvas.axes.axis('off')
+ # for Ele in Elements:
+ # xi = ListNodes[Ele.Nod_ini, 1]
+ # yi = ListNodes[Ele.Nod_ini, 2]
+ # xe = ListNodes[Ele.Nod_end, 1]
+ # ye = ListNodes[Ele.Nod_end, 2]
+ # self.ui.DataFrame.canvas.axes.plot([xi, xe], [yi, ye], 'k-', alpha=.3)
+ # if xi == xe:
+ # self.ui.DataFrame.canvas.axes.text(xi, (ye + yi) / 2, r'C{}'.format(Ele.EleTag), style='italic',
+ # fontsize=8,
+ # rotation='vertical', verticalalignment='center')
+ # if yi == ye:
+ # self.ui.DataFrame.canvas.axes.text((xe + xi) / 2, yi, r'B{}'.format(Ele.EleTag), style='italic',
+ # fontsize=8,
+ # horizontalalignment='center')
+ # self.ui.DataFrame.canvas.axes.axis('equal')
+ # self.ui.DataFrame.canvas.draw()
+ # self.ui.DataFrame.canvas.show()
+
+ ElemnsForceDL = [] # noqa: N806
+ for Element in Elements: # noqa: N806
+ Forces = op.eleForce(Element.EleTag) # noqa: N806
Forces.insert(0, Element.EleTag)
ElemnsForceDL.append(Forces)
- ElemnsForceDL = np.array(ElemnsForceDL)
+ ElemnsForceDL = np.array(ElemnsForceDL) # noqa: N806
# Create a Plain load pattern for seismic loading with a Linear TimeSeries (LLEF)
op.loadConst('-time', 0.0)
- Htotal = Loc_heigth[-1]
- Ct = 0.0466
+ Htotal = Loc_heigth[-1] # noqa: N806
+ Ct = 0.0466 # noqa: N806
x = 0.9
- Ta = Ct * Htotal ** x
- print('Ta =', Ta)
- Ie = 1.0
- Ts = Sd1 / Sds
+ Ta = Ct * Htotal**x # noqa: N806
+ print('Ta =', Ta) # noqa: T201
+ Ie = 1.0 # noqa: N806
+ Ts = Sd1 / Sds # noqa: N806
if Ta <= Ts:
- Sa = max(Sds * Ie / R, 0.044 * Sds * Ie, 0.01)
+ Sa = max(Sds * Ie / R, 0.044 * Sds * Ie, 0.01) # noqa: N806
elif Ta <= Tl:
- Sa = max(Sd1 * Ie / Ta / R, 0.044 * Sds * Ie, 0.01)
+ Sa = max(Sd1 * Ie / Ta / R, 0.044 * Sds * Ie, 0.01) # noqa: N806
else:
- Sa = max(Sd1 * Tl * Ie / (Ta ** 2) / R, 0.044 * Sds * Ie, 0.01)
- if Ta <= 0.5:
- k = 1.
- elif Ta <= 2.5:
+ Sa = max(Sd1 * Tl * Ie / (Ta**2) / R, 0.044 * Sds * Ie, 0.01) # noqa: N806
+ if Ta <= 0.5: # noqa: PLR2004
+ k = 1.0
+ elif Ta <= 2.5: # noqa: PLR2004
k = 0.75 + 0.5 * Ta
else:
- k = 2.
- sumH = np.sum(np.power(Loc_heigth, k))
+ k = 2.0
+ sumH = np.sum(np.power(Loc_heigth, k)) # noqa: N806
op.timeSeries('Linear', 3)
op.pattern('Plain', 3, 1)
- print('Wtotal =', Wtotal)
- Fp = Sa * Wtotal * np.power(Loc_heigth, k) / sumH
- print('FSis =', Fp)
- for (fp, ind) in zip(Fp, range(len(Loc_heigth))):
+ print('Wtotal =', Wtotal) # noqa: T201
+ Fp = Sa * Wtotal * np.power(Loc_heigth, k) / sumH # noqa: N806
+ print('FSis =', Fp) # noqa: T201
+ for fp, ind in zip(Fp, range(len(Loc_heigth))):
op.load(int(ListNodesDrift[ind, 0]), fp, 0.0, 0.0)
- Vbasal = Sa * Wtotal
+ Vbasal = Sa * Wtotal # noqa: N806, F841
op.analyze(1)
- ElemnsForceDLE = []
- for Element in Elements:
- Forces = op.eleForce(Element.EleTag)
+ ElemnsForceDLE = [] # noqa: N806
+ for Element in Elements: # noqa: N806
+ Forces = op.eleForce(Element.EleTag) # noqa: N806
Forces.insert(0, Element.EleTag)
ElemnsForceDLE.append(Forces)
- ElemnsForceDLE = np.array(ElemnsForceDLE)
+ ElemnsForceDLE = np.array(ElemnsForceDLE) # noqa: N806
np.set_printoptions(precision=6)
np.set_printoptions(suppress=True)
- # Story drift caculations
- DriftMax = 0.02
- nodesDisp = []
- Id_Node_Drift = ListNodesDrift[:, 0]
- Id_Node_Drift = np.int64(Id_Node_Drift)
- Id_Node_Drift = Id_Node_Drift.tolist()
+ # Story drift calculations
+ DriftMax = 0.02 # noqa: N806
+ nodesDisp = [] # noqa: N806
+ Id_Node_Drift = ListNodesDrift[:, 0] # noqa: N806
+ Id_Node_Drift = np.int64(Id_Node_Drift) # noqa: N806
+ Id_Node_Drift = Id_Node_Drift.tolist() # noqa: N806
for nodo in Id_Node_Drift:
- nodesDisp.append([nodo, op.nodeDisp(nodo, 1)])
- nodesDisp = np.array(nodesDisp)
+ nodesDisp.append([nodo, op.nodeDisp(nodo, 1)]) # noqa: PERF401
+ nodesDisp = np.array(nodesDisp) # noqa: N806
drift = nodesDisp[1:, 1] - nodesDisp[:-1, 1]
drift_p = np.divide(drift, np.array(heigth_v))
ver_drift = np.where(drift_p < DriftMax, 'ok', 'not ok')
- Id_Floor = np.arange(1, len(Loc_heigth))
- drift_table = pd.DataFrame({"1.Floor": Id_Floor, "2.Drift": drift_p * 100, "3.": ver_drift})
- print(drift_table)
+ Id_Floor = np.arange(1, len(Loc_heigth)) # noqa: N806
+ drift_table = pd.DataFrame(
+ {'1.Floor': Id_Floor, '2.Drift': drift_p * 100, '3.': ver_drift}
+ )
+ print(drift_table) # noqa: T201
# Beams and columns design procedures
- Beta1B = beta1(fcB)
+ Beta1B = beta1(fcB) # noqa: N806
cover = 4 * cm
dst = 3 / 8 * inch
- Ast = pi * dst ** 2 / 4. # area de la barra del estribo
- ro_max_b = 0.85 * Beta1B * fcB * 3. / fy / 8. # maximun steel percentage
- ro_min_b = max(0.25 * sqrt(fcB / MPa) * MPa / fy, 1.4 * MPa / fy) # minimun steel percentage
+ Ast = pi * dst**2 / 4.0 # area de la barra del estribo # noqa: N806
+ ro_max_b = 0.85 * Beta1B * fcB * 3.0 / fy / 8.0 # maximum steel percentage
+ ro_min_b = max(
+ 0.25 * sqrt(fcB / MPa) * MPa / fy, 1.4 * MPa / fy
+ ) # minimum steel percentage
DataBeamDesing = []
- for (Ele, EleForceD, EleForceDL, EleForceDLE) in zip(Elements, ElemnsForceD, ElemnsForceDL, ElemnsForceDLE):
+ for Ele, EleForceD, EleForceDL, EleForceDLE in zip( # noqa: N806
+ Elements, ElemnsForceD, ElemnsForceDL, ElemnsForceDLE
+ ):
if ListNodes[Ele.Nod_ini, 2] == ListNodes[Ele.Nod_end, 2]:
- VID = EleForceD[2]
- VIL = EleForceDL[2] - VID
- VIE = EleForceDLE[2] - VID - VIL
- VED = abs(EleForceD[5])
- VEL = abs(EleForceDL[5]) - VED
- VEE = abs(EleForceDLE[5]) - VED - VEL
-
- MID = EleForceD[3] - EleForceD[2]*Ele.RZi
- MIL = EleForceDL[3] - EleForceDL[2]*Ele.RZi - MID
- MIE = EleForceDLE[3] - EleForceDLE[2]*Ele.RZi - MID - MIL
- MED = EleForceD[6] + EleForceD[5]*Ele.RZe
- MEL = EleForceDL[6] + EleForceDL[5]*Ele.RZe - MED
- MEE = EleForceDLE[6] + EleForceDLE[5]*Ele.RZe - MED - MEL
- MED, MEL, MEE = -MED, -MEL, -MEE
- print('MID ', MID, 'MED', MED, 'MIL ', MIL, 'MEL', MEL, 'MIE ', MIE, 'MEE', MEE)
- MI1, MI2, MI3, MI4, MI5 = Combo_ACI(MID, MIL, MIE)
- MNU1 = max([MI1, MI2, MI3, MI4, MI5, 0.]) # Momento negativo nudo inicial de diseño
- MPU1 = min([MI1, MI2, MI3, MI4, MI5, abs(MNU1) / 3]) # Momento positivo nudo inicial de diseño
- ME1, ME2, ME3, ME4, ME5 = Combo_ACI(MED, MEL, MEE)
- MNU2 = max([ME1, ME2, ME3, ME4, ME5, 0.]) # Momento negativo nudo final de diseño
- MPU2 = min([ME1, ME2, ME3, ME4, ME5, abs(MNU2) / 3]) # Momento positivo nudo final de diseño
- Mmax = max([MNU1, -MPU1, MNU2, -MPU2])
- MNU1 = max([MNU1, Mmax / 5])
- MPU1 = min([MPU1, -Mmax / 5])
- MNU2 = max([MNU2, Mmax / 5])
- MPU2 = min([MPU2, -Mmax / 5])
-
- Ast1, dt1, Mn_N1, db_t1 = AsBeam(MNU1, Ele.EleTag)
- Asb1, db1, Mn_P1, db_b1 = AsBeam(MPU1, Ele.EleTag)
- Ast2, dt2, Mn_N2, db_t2 = AsBeam(MNU2, Ele.EleTag)
- Asb2, db2, Mn_P2, db_b2 = AsBeam(MPU2, Ele.EleTag)
-
- VI1 = 1.2 * VID + 1.6 * VIL
- VI2 = 1.2 * VID + 1.0 * VIL - 1.0 * VIE
- VI3 = 0.9 * VID - 1.0 * VIE
- VI4 = (Mn_P1 + Mn_N2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.
- VI5 = (Mn_N1 + Mn_P2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.
- VI6 = 1.2 * VID + 1.0 * VIL - 2.0 * VIE
- VI7 = 0.9 * VID - 2.0 * VIE
-
- VU1a = max(VI1, VI2, VI3)
- VU1b = max(VI4, VI5)
- VU1c = max(VI6, VI7)
-
- VU1 = max(VU1a, min(VU1b, VU1c)) # Cortante negativo nudo inicial de diseño
-
- VE1 = 1.2 * VED + 1.6 * VEL
- VE2 = 1.2 * VED + 1.0 * VEL + 1.0 * VEE
- VE3 = 0.9 * VED + 1.0 * VEE
- VE4 = (Mn_P1 + Mn_N2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.
- VE5 = (Mn_N1 + Mn_P2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.
- VE6 = 1.2 * VED + 1.0 * VEL + 2.0 * VEE
- VE7 = 0.9 * VED + 2.0 * VEE
-
- VU2a = max(VE1, VE2, VE3)
- VU2b = max(VE4, VE5)
- VU2c = max(VE6, VE7)
-
- VU2 = max(VU2a, min(VU2b, VU2c)) # Cortante negativo nudo final de diseño
+ VID = EleForceD[2] # noqa: N806
+ VIL = EleForceDL[2] - VID # noqa: N806
+ VIE = EleForceDLE[2] - VID - VIL # noqa: N806
+ VED = abs(EleForceD[5]) # noqa: N806
+ VEL = abs(EleForceDL[5]) - VED # noqa: N806
+ VEE = abs(EleForceDLE[5]) - VED - VEL # noqa: N806
+
+ MID = EleForceD[3] - EleForceD[2] * Ele.RZi # noqa: N806
+ MIL = EleForceDL[3] - EleForceDL[2] * Ele.RZi - MID # noqa: N806
+ MIE = EleForceDLE[3] - EleForceDLE[2] * Ele.RZi - MID - MIL # noqa: N806
+ MED = EleForceD[6] + EleForceD[5] * Ele.RZe # noqa: N806
+ MEL = EleForceDL[6] + EleForceDL[5] * Ele.RZe - MED # noqa: N806
+ MEE = EleForceDLE[6] + EleForceDLE[5] * Ele.RZe - MED - MEL # noqa: N806
+ MED, MEL, MEE = -MED, -MEL, -MEE # noqa: N806
+ print( # noqa: T201
+ 'MID ',
+ MID,
+ 'MED',
+ MED,
+ 'MIL ',
+ MIL,
+ 'MEL',
+ MEL,
+ 'MIE ',
+ MIE,
+ 'MEE',
+ MEE,
+ )
+ MI1, MI2, MI3, MI4, MI5 = Combo_ACI(MID, MIL, MIE) # noqa: N806
+ MNU1 = max( # noqa: N806
+ [MI1, MI2, MI3, MI4, MI5, 0.0]
+ ) # Momento negativo nudo inicial de diseño
+ MPU1 = min( # noqa: N806
+ [MI1, MI2, MI3, MI4, MI5, abs(MNU1) / 3]
+ ) # Momento positivo nudo inicial de diseño
+ ME1, ME2, ME3, ME4, ME5 = Combo_ACI(MED, MEL, MEE) # noqa: N806
+ MNU2 = max( # noqa: N806
+ [ME1, ME2, ME3, ME4, ME5, 0.0]
+ ) # Momento negativo nudo final de diseño
+ MPU2 = min( # noqa: N806
+ [ME1, ME2, ME3, ME4, ME5, abs(MNU2) / 3]
+ ) # Momento positivo nudo final de diseño
+ Mmax = max([MNU1, -MPU1, MNU2, -MPU2]) # noqa: N806
+ MNU1 = max([MNU1, Mmax / 5]) # noqa: N806
+ MPU1 = min([MPU1, -Mmax / 5]) # noqa: N806
+ MNU2 = max([MNU2, Mmax / 5]) # noqa: N806
+ MPU2 = min([MPU2, -Mmax / 5]) # noqa: N806
+
+ Ast1, dt1, Mn_N1, db_t1 = AsBeam(MNU1, Ele.EleTag) # noqa: N806
+ Asb1, db1, Mn_P1, db_b1 = AsBeam(MPU1, Ele.EleTag) # noqa: N806
+ Ast2, dt2, Mn_N2, db_t2 = AsBeam(MNU2, Ele.EleTag) # noqa: N806
+ Asb2, db2, Mn_P2, db_b2 = AsBeam(MPU2, Ele.EleTag) # noqa: N806
+
+ VI1 = 1.2 * VID + 1.6 * VIL # noqa: N806
+ VI2 = 1.2 * VID + 1.0 * VIL - 1.0 * VIE # noqa: N806
+ VI3 = 0.9 * VID - 1.0 * VIE # noqa: N806
+ VI4 = (Mn_P1 + Mn_N2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.0 # noqa: N806
+ VI5 = (Mn_N1 + Mn_P2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.0 # noqa: N806
+ VI6 = 1.2 * VID + 1.0 * VIL - 2.0 * VIE # noqa: N806
+ VI7 = 0.9 * VID - 2.0 * VIE # noqa: N806
+
+ VU1a = max(VI1, VI2, VI3) # noqa: N806
+ VU1b = max(VI4, VI5) # noqa: N806
+ VU1c = max(VI6, VI7) # noqa: N806
+
+ VU1 = max( # noqa: N806
+ VU1a, min(VU1b, VU1c)
+ ) # Cortante negativo nudo inicial de diseño
+
+ VE1 = 1.2 * VED + 1.6 * VEL # noqa: N806
+ VE2 = 1.2 * VED + 1.0 * VEL + 1.0 * VEE # noqa: N806
+ VE3 = 0.9 * VED + 1.0 * VEE # noqa: N806
+ VE4 = (Mn_P1 + Mn_N2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.0 # noqa: N806
+ VE5 = (Mn_N1 + Mn_P2) / Ele.LEle + (1.2 * WDL + WLL) * Ele.LEle / 2.0 # noqa: N806
+ VE6 = 1.2 * VED + 1.0 * VEL + 2.0 * VEE # noqa: N806
+ VE7 = 0.9 * VED + 2.0 * VEE # noqa: N806
+
+ VU2a = max(VE1, VE2, VE3) # noqa: N806
+ VU2b = max(VE4, VE5) # noqa: N806
+ VU2c = max(VE6, VE7) # noqa: N806
+
+ VU2 = max( # noqa: N806
+ VU2a, min(VU2b, VU2c)
+ ) # Cortante negativo nudo final de diseño
nst1, sst1 = AvBeam(VU1, db_t1, dt1, Ele.EleTag)
nst2, sst2 = AvBeam(VU2, db_t2, dt2, Ele.EleTag)
- DataBeamDesing.append(BeamDesing(Ele.EleTag, BBeam, HBeam, Ast1, dt1, Mn_N1, Asb1, db1, Mn_P1, nst1,
- sst1, Ast2, dt2, Mn_N2, Asb2, db2, Mn_P2, nst2, sst2, Ele.Nod_ini,
- Ele.Nod_end, db_t1, db_b1, db_t2, db_b2))
- #self.ui.tbl_data_design_beams.setRowCount(0)
- #data_beams_table(self)
+ DataBeamDesing.append(
+ BeamDesing(
+ Ele.EleTag,
+ BBeam,
+ HBeam,
+ Ast1,
+ dt1,
+ Mn_N1,
+ Asb1,
+ db1,
+ Mn_P1,
+ nst1,
+ sst1,
+ Ast2,
+ dt2,
+ Mn_N2,
+ Asb2,
+ db2,
+ Mn_P2,
+ nst2,
+ sst2,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ db_t1,
+ db_b1,
+ db_t2,
+ db_b2,
+ )
+ )
+ # self.ui.tbl_data_design_beams.setRowCount(0)
+ # data_beams_table(self)
# Column design procedure
ro_min = 0.01
ro_max = 0.08
- Beta1C = beta1(fcC)
+ Beta1C = beta1(fcC) # noqa: N806
npts = 20
- ncom = 10
+ ncom = 10 # noqa: F841
ecu = 0.003
- Es = 200. * GPa
+ Es = 200.0 * GPa # noqa: N806
- db_v = np.array([5, 6, 7, 8, 9, 10, 11, 14, 18]) # vector de diametros de barras
+ db_v = np.array(
+ [5, 6, 7, 8, 9, 10, 11, 14, 18]
+ ) # vector de diametros de barras
DataColDesing = []
- for (Ele, EleForceD, EleForceDL, EleForceDLE) in zip(Elements, ElemnsForceD, ElemnsForceDL, ElemnsForceDLE):
+ for Ele, EleForceD, EleForceDL, EleForceDLE in zip( # noqa: N806
+ Elements, ElemnsForceD, ElemnsForceDL, ElemnsForceDLE
+ ):
if ListNodes[Ele.Nod_ini, 1] == ListNodes[Ele.Nod_end, 1]:
- Mn_N_R, Mn_P_R, Mn_N_L, Mn_P_L = 0, 0, 0, 0
- for DB in DataBeamDesing:
+ Mn_N_R, Mn_P_R, Mn_N_L, Mn_P_L = 0, 0, 0, 0 # noqa: N806
+ for DB in DataBeamDesing: # noqa: N806
if Ele.Nod_end == DB.Nod_ini:
- Mn_N_R, Mn_P_R = DB.Mn_n1, DB.Mn_p1
+ Mn_N_R, Mn_P_R = DB.Mn_n1, DB.Mn_p1 # noqa: N806
if Ele.Nod_end == DB.Nod_end:
- Mn_N_L, Mn_P_L = DB.Mn_n2, DB.Mn_p2
- Sum_Mn_B = max(Mn_P_R + Mn_N_L, Mn_N_R + Mn_P_L)
+ Mn_N_L, Mn_P_L = DB.Mn_n2, DB.Mn_p2 # noqa: N806
+ Sum_Mn_B = max(Mn_P_R + Mn_N_L, Mn_N_R + Mn_P_L) # noqa: N806, F841
b, h = Ele.BEle, Ele.HEle
- nbB = ceil(b * 10) # bars numbers along B
- nbH = ceil(h * 10) # bars numbers along H
- D_c = 1.1 * h / npts
- nbH_v = np.array([nbH - 1, nbH, nbH + 1])
- nbB_v = np.array([nbB - 1, nbB, nbB + 1])
-
- MID = EleForceD[3]
- MIL = EleForceDL[3] - MID
- MIE = EleForceDLE[3] - MID - MIL
-
- PID = EleForceD[2]
- PIL = EleForceDL[2] - PID
- PIE = EleForceDLE[2] - PID - PIL
-
- MI1, MI2, MI3, MI4, MI5 = Combo_ACI(MID, MIL, MIE)
- PI1, PI2, PI3, PI4, PI5 = Combo_ACI(PID, PIL, PIE)
-
- MED = -EleForceD[6]
- MEL = -EleForceDL[6] - MED
- MEE = -EleForceDLE[6] - MED - MEL
- print('MID ', MID, 'MED', MED, 'MIL ', MIL, 'MEL', MEL, 'MIE ', MIE, 'MEE', MEE)
-
- PED = -EleForceD[5]
- PEL = -EleForceDL[5] - PED
- PEE = -EleForceDLE[5] - PED - PEL
-
- ME1, ME2, ME3, ME4, ME5 = Combo_ACI(MED, MEL, MEE)
- PE1, PE2, PE3, PE4, PE5 = Combo_ACI(PED, PEL, PEE)
-
- Nu_min = min([PI2, PI3, PI4, PI5, PE2, PE3, PE4, PE5])
-
- Pu_v = np.array([PI1, PI2, PI3, PI4, PI5, PE1, PE2, PE3, PE4, PE5])
- Mu_v = np.array([MI1, MI2, MI3, MI4, MI5, ME1, ME2, ME3, ME4, ME5])
- Mu_v = np.absolute(Mu_v)
-
- nbH, nbB, db, As, fiPn, fiMn, Mn_i, d, dist, ro, Mu_i = AsColumn()
-
- VID = EleForceD[1]
- VIL = EleForceDL[1] - VID
- VIE = EleForceDLE[1] - VID - VIL
- VID, VIL, VIE = abs(VID), abs(VIL), abs(VIE)
-
- Mu_is = Mu_i[[1, 2, 3, 4, 6, 7, 8, 9]]
- Mn_max = np.max(Mu_is) # Momento maximo de todas las combo sismicas
- VI1, VI2, VI3, VI4, VI5 = Combo_ACI(VID, VIL, VIE)
-
- VI6 = 2.0 * Mn_max / Ele.LEle
- VI7 = 1.2 * VID + 1.0 * VIL + Omo * VIE
- VI8 = 1.2 * VID + 1.0 * VIL - Omo * VIE
- VI9 = 0.9 * VID + Omo * VIE
- VI10 = 0.9 * VID - Omo * VIE
-
- VUa = max([VI1, VI2, VI3, VI4, VI5])
- VUb = VI6
- VUc = max([VI7, VI8, VI9, VI10])
-
- Vu = max([VUa, min([VUb, VUc])]) # Cortante maximo de diseño
- sst, nsB, nsH = AvColumn()
- DataColDesing.append(ColDesing(Ele.EleTag, b, h, nbH, nbB, db, As, Pu_v, Mu_v, fiPn, fiMn, Mn_i, d,
- dist, ro, Mu_i, sst, nsB, nsH, Ele.Nod_ini, Ele.Nod_end))
-
- #self.ui.tbl_data_design_columns.setRowCount(0)
- #data_columns_table(self)
- #self.ui.tabWidget.setCurrentIndex(1)
+ nbB = ceil(b * 10) # bars numbers along B # noqa: N806
+ nbH = ceil(h * 10) # bars numbers along H # noqa: N806
+ D_c = 1.1 * h / npts # noqa: N806, F841
+ nbH_v = np.array([nbH - 1, nbH, nbH + 1]) # noqa: N806
+ nbB_v = np.array([nbB - 1, nbB, nbB + 1]) # noqa: N806
+
+ MID = EleForceD[3] # noqa: N806
+ MIL = EleForceDL[3] - MID # noqa: N806
+ MIE = EleForceDLE[3] - MID - MIL # noqa: N806
+
+ PID = EleForceD[2] # noqa: N806
+ PIL = EleForceDL[2] - PID # noqa: N806
+ PIE = EleForceDLE[2] - PID - PIL # noqa: N806
+
+ MI1, MI2, MI3, MI4, MI5 = Combo_ACI(MID, MIL, MIE) # noqa: N806
+ PI1, PI2, PI3, PI4, PI5 = Combo_ACI(PID, PIL, PIE) # noqa: N806
+
+ MED = -EleForceD[6] # noqa: N806
+ MEL = -EleForceDL[6] - MED # noqa: N806
+ MEE = -EleForceDLE[6] - MED - MEL # noqa: N806
+ print( # noqa: T201
+ 'MID ',
+ MID,
+ 'MED',
+ MED,
+ 'MIL ',
+ MIL,
+ 'MEL',
+ MEL,
+ 'MIE ',
+ MIE,
+ 'MEE',
+ MEE,
+ )
+
+ PED = -EleForceD[5] # noqa: N806
+ PEL = -EleForceDL[5] - PED # noqa: N806
+ PEE = -EleForceDLE[5] - PED - PEL # noqa: N806
+
+ ME1, ME2, ME3, ME4, ME5 = Combo_ACI(MED, MEL, MEE) # noqa: N806
+ PE1, PE2, PE3, PE4, PE5 = Combo_ACI(PED, PEL, PEE) # noqa: N806
+
+ Nu_min = min([PI2, PI3, PI4, PI5, PE2, PE3, PE4, PE5]) # noqa: N806
+
+ Pu_v = np.array([PI1, PI2, PI3, PI4, PI5, PE1, PE2, PE3, PE4, PE5]) # noqa: N806
+ Mu_v = np.array([MI1, MI2, MI3, MI4, MI5, ME1, ME2, ME3, ME4, ME5]) # noqa: N806
+ Mu_v = np.absolute(Mu_v) # noqa: N806
+
+ nbH, nbB, db, As, fiPn, fiMn, Mn_i, d, dist, ro, Mu_i = AsColumn() # noqa: N806
+
+ VID = EleForceD[1] # noqa: N806
+ VIL = EleForceDL[1] - VID # noqa: N806
+ VIE = EleForceDLE[1] - VID - VIL # noqa: N806
+ VID, VIL, VIE = abs(VID), abs(VIL), abs(VIE) # noqa: N806
+
+ Mu_is = Mu_i[[1, 2, 3, 4, 6, 7, 8, 9]] # noqa: N806
+ # Momento maximo de todas las combo sismicas
+ Mn_max = np.max(Mu_is) # noqa: N806
+ VI1, VI2, VI3, VI4, VI5 = Combo_ACI(VID, VIL, VIE) # noqa: N806
+
+ VI6 = 2.0 * Mn_max / Ele.LEle # noqa: N806
+ VI7 = 1.2 * VID + 1.0 * VIL + Omo * VIE # noqa: N806
+ VI8 = 1.2 * VID + 1.0 * VIL - Omo * VIE # noqa: N806
+ VI9 = 0.9 * VID + Omo * VIE # noqa: N806
+ VI10 = 0.9 * VID - Omo * VIE # noqa: N806
+
+ VUa = max([VI1, VI2, VI3, VI4, VI5]) # noqa: N806
+ VUb = VI6 # noqa: N806
+ VUc = max([VI7, VI8, VI9, VI10]) # noqa: N806
+
+ # Cortante maximo de diseño
+ Vu = max([VUa, min([VUb, VUc])]) # noqa: N806
+ sst, nsB, nsH = AvColumn() # noqa: N806
+ DataColDesing.append(
+ ColDesing(
+ Ele.EleTag,
+ b,
+ h,
+ nbH,
+ nbB,
+ db,
+ As,
+ Pu_v,
+ Mu_v,
+ fiPn,
+ fiMn,
+ Mn_i,
+ d,
+ dist,
+ ro,
+ Mu_i,
+ sst,
+ nsB,
+ nsH,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ )
+ )
+
+ # self.ui.tbl_data_design_columns.setRowCount(0)
+ # data_columns_table(self)
+ # self.ui.tabWidget.setCurrentIndex(1)
# Creation of the nonlinear model
- def CreateNLM(self,rootSIM,outputLogger, preparePushover):
- def __init__(rootSIM):
+ def CreateNLM(self, rootSIM, outputLogger, preparePushover): # noqa: C901, D102, N802, N803, PLR0914, PLR0915
+ def __init__(rootSIM): # noqa: N803, N807
self.rootSIM = rootSIM
self.outputLogger = outputLogger
-
- global T1m, T2m, EleCol, EleBeam
- # Validation of beam and column design table data
- def validate_data(self):
- cover = 4*cm
- dst = 3/8*inch
+ global T1m, T2m, EleCol, EleBeam # noqa: PLW0603
+ # Validation of beam and column design table data
+ def validate_data(self): # noqa: ARG001
+ cover = 4 * cm
+ dst = 3 / 8 * inch
- for DC in DataColDesing:
+ for DC in DataColDesing: # noqa: N806
dp = cover + dst + 0.5 * DC.db
DC.dist = np.linspace(dp, DC.h - dp, DC.nbH)
- Ab = pi * DC.db ** 2. / 4.
- DC.As = np.hstack([DC.nbB * Ab, np.ones(DC.nbH - 2) * 2 * Ab, DC.nbB * Ab])
-
+ Ab = pi * DC.db**2.0 / 4.0 # noqa: N806
+ DC.As = np.hstack(
+ [DC.nbB * Ab, np.ones(DC.nbH - 2) * 2 * Ab, DC.nbB * Ab]
+ )
+
# print("DC.EleTag",DC.EleTag)
# print("DC.nbH",DC.nbH)
# print("DC.db",DC.db)
@@ -1085,232 +1362,528 @@ def validate_data(self):
# print("DC.As",DC.As)
# print("DC.dist",DC.dist)
-
# Function: Parameters of regularized unconfined concrete
def con_inconf_regu():
fpc = -fc
epsc0 = 2 * fpc / Ec
- Gfc = max(2.0 * (-fpc / MPa) * N / mm, 25.0 * N / mm)
+ Gfc = max(2.0 * (-fpc / MPa) * N / mm, 25.0 * N / mm) # noqa: N806
epscu = Gfc / (0.6 * fpc * phl) - 0.8 * fpc / Ec + epsc0
fcu = 0.2 * fpc
- lambdaU = 0.10
+ lambdaU = 0.10 # noqa: N806
ft = 0.33 * sqrt(-fpc * MPa)
- Ets = ft / 0.002
+ Ets = ft / 0.002 # noqa: N806
return fpc, epsc0, fcu, epscu, lambdaU, ft, Ets
# Function: Parameters of regularized confined concrete
- def con_conf_regu(b, h, nsB, nsH, sst):
+ def con_conf_regu(b, h, nsB, nsH, sst): # noqa: N803
fpc = -fc
- bcx = h - 2. * cover - dst
- bcy = b - 2. * cover - dst
- Asx = nsB * Ast
- Asy = nsH * Ast
- Asvt = Asx + Asy
+ bcx = h - 2.0 * cover - dst
+ bcy = b - 2.0 * cover - dst
+ Asx = nsB * Ast # noqa: N806
+ Asy = nsH * Ast # noqa: N806
+ Asvt = Asx + Asy # noqa: N806
flx = Asvt * fy / sst / bcx
fly = Asvt * fy / sst / bcy
slx = bcx / (nsB - 1)
sly = bcy / (nsH - 1)
- k2x = min(0.26 * sqrt((bcx / sst) * (bcx / slx) * (1000. / flx)), 1)
- k2y = min(0.26 * sqrt((bcy / sst) * (bcy / sly) * (1000. / fly)), 1)
- flex = k2x * flx
- fley = k2y * fly
- fle = (flex * bcx + fley * bcy) / (bcx + bcy)
- k1 = 6.7 * (fle / 1000.) ** (-0.17)
- fcc = fc + k1 * fle
+ k2x = min(0.26 * sqrt((bcx / sst) * (bcx / slx) * (1000.0 / flx)), 1)
+ k2y = min(0.26 * sqrt((bcy / sst) * (bcy / sly) * (1000.0 / fly)), 1)
+ fl_ex = k2x * flx
+ fl_ey = k2y * fly
+ fl_e = (fl_ex * bcx + fl_ey * bcy) / (bcx + bcy)
+ k1 = 6.7 * (fl_e / 1000.0) ** (-0.17)
+ fcc = fc + k1 * fl_e
fpcc = -fcc
- Ecc = Ec
- Gfc = max(2.0 * (-fpc / MPa) * N / mm, 25.0 * N / mm)
- K = k1 * fle / fc
- epscc0 = eo1 * (1. + 5. * K)
- Gfcc = 1.7 * Gfc
+ Ecc = Ec # noqa: N806
+ Gfc = max(2.0 * (-fpc / MPa) * N / mm, 25.0 * N / mm) # noqa: N806
+ K = k1 * fl_e / fc # noqa: N806
+ epscc0 = eo1 * (1.0 + 5.0 * K)
+ Gfcc = 1.7 * Gfc # noqa: N806
epsccu = Gfcc / (0.6 * fpcc * phl) - 0.8 * fpcc / Ecc + epscc0
fccu = 0.2 * fpcc
- lambdaC = 0.10
+ lambdaC = 0.10 # noqa: N806
ft = 0.33 * sqrt(-fpc * MPa)
- Ets = ft / 0.002
-
-# print("**** sst",sst)
-# print("**** fpc",fpc)
-# print("**** bcx",bcx)
-# print("**** bcy",bcy)
-# print("**** Asx",Asx)
-# print("**** Asy",Asy)
-# print("**** Asvt",Asvt)
-# print("**** flx",flx)
-# print("**** fly",fly)
-# print("**** slx",slx)
-# print("**** sly",sly)
-# print("**** k2x",k2x)
-# print("**** k2y",k2y)
-# print("**** flex",flex)
-# print("**** fley",fley)
-# print("**** fle",fle)
-# print("**** k1",k1)
-# print("**** fcc",fcc)
-# print("**** fpcc",fpcc)
-# print("**** Ecc",Ecc)
-# print("**** Gfc",Gfc)
-# print("**** K",K)
-# print("**** epscc0",epscc0)
-# print("**** Gfcc",Gfcc)
-# print("**** epsccu",epsccu)
-# print("**** fccu",fccu)
-# print("**** lambdaC",lambdaC)
-# print("**** ft",ft)
-# print("**** Ets",Ets)
+ Ets = ft / 0.002 # noqa: N806
+
+ # print("**** sst",sst)
+ # print("**** fpc",fpc)
+ # print("**** bcx",bcx)
+ # print("**** bcy",bcy)
+ # print("**** Asx",Asx)
+ # print("**** Asy",Asy)
+ # print("**** Asvt",Asvt)
+ # print("**** flx",flx)
+ # print("**** fly",fly)
+ # print("**** slx",slx)
+ # print("**** sly",sly)
+ # print("**** k2x",k2x)
+ # print("**** k2y",k2y)
+ # print("**** fl_ex",fl_ex)
+ # print("**** fl_ey",fl_ey)
+ # print("**** fl_e",fl_e)
+ # print("**** k1",k1)
+ # print("**** fcc",fcc)
+ # print("**** fpcc",fpcc)
+ # print("**** Ecc",Ecc)
+ # print("**** Gfc",Gfc)
+ # print("**** K",K)
+ # print("**** epscc0",epscc0)
+ # print("**** Gfcc",Gfcc)
+ # print("**** epsccu",epsccu)
+ # print("**** fccu",fccu)
+ # print("**** lambdaC",lambdaC)
+ # print("**** ft",ft)
+ # print("**** Ets",Ets)
return fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets
# Function: Regularized steel parameters
def steel_mat_regu():
- FyTestN4 = 490.0 * MPa
- FsuTestN4 = 630.0 * MPa
- epsuTestN4 = 0.10
- LgageTestN4 = 200.0 * mm
- Es = 200.0 * GPa
- FyPosN4 = FyTestN4
- epsyPosN4 = FyPosN4 / Es
- FyNegN4 = FyTestN4
- epsyNegN4 = FyNegN4 / Es
- FsuPosN4 = FsuTestN4
- epsuPosN4 = epsyPosN4 + LgageTestN4 / phl * (epsuTestN4 - epsyPosN4)
- bPosN4 = (FsuPosN4 - FyPosN4) / (Es * (epsuPosN4 - epsyPosN4))
- epsuNegN4 = min(-epsccu, epsuPosN4)
- bNegN4 = bPosN4
+ FyTestN4 = 490.0 * MPa # noqa: N806
+ FsuTestN4 = 630.0 * MPa # noqa: N806
+ epsuTestN4 = 0.10 # noqa: N806
+ LgageTestN4 = 200.0 * mm # noqa: N806
+ Es = 200.0 * GPa # noqa: N806
+ FyPosN4 = FyTestN4 # noqa: N806
+ epsyPosN4 = FyPosN4 / Es # noqa: N806
+ FyNegN4 = FyTestN4 # noqa: N806
+ epsyNegN4 = FyNegN4 / Es # noqa: N806
+ FsuPosN4 = FsuTestN4 # noqa: N806
+ epsuPosN4 = epsyPosN4 + LgageTestN4 / phl * (epsuTestN4 - epsyPosN4) # noqa: N806
+ bPosN4 = (FsuPosN4 - FyPosN4) / (Es * (epsuPosN4 - epsyPosN4)) # noqa: N806
+ epsuNegN4 = min(-epsccu, epsuPosN4) # noqa: N806
+ bNegN4 = bPosN4 # noqa: N806
# FsuNegN4 = FsuTestN4
- FsuNegN4 = FyNegN4 + bNegN4 * (Es * (epsuNegN4 - epsyNegN4))
- FsrPosN4 = 0.2 * FyPosN4
- epsrPosN4 = (FsuPosN4 - FsrPosN4) / Es + 1.05 * epsuPosN4
- FsrNegN4 = 0.2 * FsuNegN4
- epsrNegN4 = (FsuNegN4 - FsrNegN4) / Es + 1.05 * epsuNegN4
- pinchX = 0.2
- pinchY = 0.8
- damage1 = 0.0
- damage2 = 0.0
- beta = 0.0
+ FsuNegN4 = FyNegN4 + bNegN4 * (Es * (epsuNegN4 - epsyNegN4)) # noqa: N806
+ FsrPosN4 = 0.2 * FyPosN4 # noqa: N806
+ epsrPosN4 = (FsuPosN4 - FsrPosN4) / Es + 1.05 * epsuPosN4 # noqa: N806, F841
+ FsrNegN4 = 0.2 * FsuNegN4 # noqa: N806
+ epsrNegN4 = (FsuNegN4 - FsrNegN4) / Es + 1.05 * epsuNegN4 # noqa: N806, F841
+ pinchX = 0.2 # noqa: N806, F841
+ pinchY = 0.8 # noqa: N806, F841
+ damage1 = 0.0 # noqa: F841
+ damage2 = 0.0 # noqa: F841
+ beta = 0.0 # noqa: F841
# op.uniaxialMaterial('Hysteretic', Ele.EleTag * 6 + 4 + pos, FyPosN4, epsyPosN4, FsuPosN4, epsuPosN4
# , FsrPosN4, epsrPosN4, -FyNegN4, -epsyNegN4, -FsuNegN4, -epsuNegN4, -FsrNegN4
# , -epsrNegN4, pinchX, pinchY, damage1, damage2, beta)
- SteelN4Mat = Ele.EleTag * 6 + 4 + pos
- SteelMPFTag = 1e6 * SteelN4Mat
- R0 = 20.0
- cR1 = 0.925
- cR2 = 0.15
+ SteelN4Mat = Ele.EleTag * 6 + 4 + pos # noqa: N806
+ SteelMPFTag = 1e6 * SteelN4Mat # noqa: N806
+ R0 = 20.0 # noqa: N806
+ cR1 = 0.925 # noqa: N806
+ cR2 = 0.15 # noqa: N806
a1 = 0.0
a2 = 1.0
a3 = 0.0
a4 = 0.0
- print('SteelMPF', int(SteelMPFTag), FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4)
- op.uniaxialMaterial('SteelMPF', SteelMPFTag, FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4)
- outputLogger.add_array(['uniaxialMaterial','SteelMPF', int(SteelMPFTag), FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4])
-
- print('MinMax', int(SteelN4Mat), int(SteelMPFTag), '-min', -epsuNegN4, '-max', epsuPosN4)
- op.uniaxialMaterial('MinMax', SteelN4Mat, SteelMPFTag, '-min', -epsuNegN4, '-max', epsuPosN4)
- outputLogger.add_array(['uniaxialMaterial','MinMax', int(SteelN4Mat), int(SteelMPFTag), '-min', -epsuNegN4, '-max', epsuPosN4])
+ print( # noqa: T201
+ 'SteelMPF',
+ int(SteelMPFTag),
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ )
+ op.uniaxialMaterial(
+ 'SteelMPF',
+ SteelMPFTag,
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'SteelMPF',
+ int(SteelMPFTag),
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ ]
+ )
+
+ print( # noqa: T201
+ 'MinMax',
+ int(SteelN4Mat),
+ int(SteelMPFTag),
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ )
+ op.uniaxialMaterial(
+ 'MinMax',
+ SteelN4Mat,
+ SteelMPFTag,
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'MinMax',
+ int(SteelN4Mat),
+ int(SteelMPFTag),
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ ]
+ )
# Function: Parameters of non-regularized confined concrete
- def con_conf(b, h, nsB, nsH, sst):
+ def con_conf(b, h, nsB, nsH, sst): # noqa: N803
fpc = -fc
- bcx = h - 2. * cover - dst
- bcy = b - 2. * cover - dst
- Asx = nsB * Ast
- Asy = nsH * Ast
- Asvt = Asx + Asy
+ bcx = h - 2.0 * cover - dst
+ bcy = b - 2.0 * cover - dst
+ Asx = nsB * Ast # noqa: N806
+ Asy = nsH * Ast # noqa: N806
+ Asvt = Asx + Asy # noqa: N806
flx = Asvt * fy / sst / bcx
fly = Asvt * fy / sst / bcy
slx = bcx / (nsB - 1)
sly = bcy / (nsH - 1)
- k2x = min(0.26 * sqrt((bcx / sst) * (bcx / slx) * (1000. / flx)), 1)
- k2y = min(0.26 * sqrt((bcy / sst) * (bcy / sly) * (1000. / fly)), 1)
- flex = k2x * flx
- fley = k2y * fly
- fle = (flex * bcx + fley * bcy) / (bcx + bcy)
- k1 = 6.7 * (fle / 1000.) ** (-0.17)
- fcc = fc + k1 * fle
+ k2x = min(0.26 * sqrt((bcx / sst) * (bcx / slx) * (1000.0 / flx)), 1)
+ k2y = min(0.26 * sqrt((bcy / sst) * (bcy / sly) * (1000.0 / fly)), 1)
+ fl_ex = k2x * flx
+ fl_ey = k2y * fly
+ fl_e = (fl_ex * bcx + fl_ey * bcy) / (bcx + bcy)
+ k1 = 6.7 * (fl_e / 1000.0) ** (-0.17)
+ fcc = fc + k1 * fl_e
fpcc = -fcc
- K = k1 * fle / fc
- epscc0 = eo1 * (1. + 5. * K)
+ K = k1 * fl_e / fc # noqa: N806
+ epscc0 = eo1 * (1.0 + 5.0 * K)
rov = Asvt / sst / (bcx + bcy)
e85 = 260 * rov * epscc0 + eo85
epsccu = (e85 - epscc0) * (0.2 * fcc - fcc) / (0.85 * fcc - fcc) + epscc0
fccu = 0.2 * fpcc
- lambdaC = 0.10
+ lambdaC = 0.10 # noqa: N806
ft = 0.33 * sqrt(-fpc * MPa)
- Ets = ft / 0.002
+ Ets = ft / 0.002 # noqa: N806
return fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets
# Function: Parameters of non-regularized steel
def steel_mat():
- FyTestN4 = 490.0 * MPa
- FsuTestN4 = 630.0 * MPa
- epsuTestN4 = 0.10
- LgageTestN4 = phl
- Es = 200.0 * GPa
- FyPosN4 = FyTestN4
- epsyPosN4 = FyPosN4 / Es
- FyNegN4 = FyTestN4
- epsyNegN4 = FyNegN4 / Es
- FsuPosN4 = FsuTestN4
- epsuPosN4 = epsyPosN4 + LgageTestN4 / phl * (epsuTestN4 - epsyPosN4)
- bPosN4 = (FsuPosN4 - FyPosN4) / (Es * (epsuPosN4 - epsyPosN4))
- epsuNegN4 = min(-epsccu, epsuPosN4)
- bNegN4 = bPosN4
+ FyTestN4 = 490.0 * MPa # noqa: N806
+ FsuTestN4 = 630.0 * MPa # noqa: N806
+ epsuTestN4 = 0.10 # noqa: N806
+ LgageTestN4 = phl # noqa: N806
+ Es = 200.0 * GPa # noqa: N806
+ FyPosN4 = FyTestN4 # noqa: N806
+ epsyPosN4 = FyPosN4 / Es # noqa: N806
+ FyNegN4 = FyTestN4 # noqa: N806
+ epsyNegN4 = FyNegN4 / Es # noqa: N806
+ FsuPosN4 = FsuTestN4 # noqa: N806
+ epsuPosN4 = epsyPosN4 + LgageTestN4 / phl * (epsuTestN4 - epsyPosN4) # noqa: N806
+ bPosN4 = (FsuPosN4 - FyPosN4) / (Es * (epsuPosN4 - epsyPosN4)) # noqa: N806
+ epsuNegN4 = min(-epsccu, epsuPosN4) # noqa: N806
+ bNegN4 = bPosN4 # noqa: N806
# FsuNegN4 = FsuTestN4
- FsuNegN4 = FyNegN4 + bNegN4 * (Es * (epsuNegN4 - epsyNegN4))
- FsrPosN4 = 0.2 * FyPosN4
- epsrPosN4 = (FsuPosN4 - FsrPosN4) / Es + 1.05 * epsuPosN4
- FsrNegN4 = 0.2 * FsuNegN4
- epsrNegN4 = (FsuNegN4 - FsrNegN4) / Es + 1.05 * epsuNegN4
- pinchX = 0.2
- pinchY = 0.8
- damage1 = 0.0
- damage2 = 0.0
- beta = 0.0
+ FsuNegN4 = FyNegN4 + bNegN4 * (Es * (epsuNegN4 - epsyNegN4)) # noqa: N806
+ FsrPosN4 = 0.2 * FyPosN4 # noqa: N806
+ epsrPosN4 = (FsuPosN4 - FsrPosN4) / Es + 1.05 * epsuPosN4 # noqa: N806, F841
+ FsrNegN4 = 0.2 * FsuNegN4 # noqa: N806
+ epsrNegN4 = (FsuNegN4 - FsrNegN4) / Es + 1.05 * epsuNegN4 # noqa: N806, F841
+ pinchX = 0.2 # noqa: N806, F841
+ pinchY = 0.8 # noqa: N806, F841
+ damage1 = 0.0 # noqa: F841
+ damage2 = 0.0 # noqa: F841
+ beta = 0.0 # noqa: F841
# op.uniaxialMaterial('Hysteretic', Ele.EleTag * 6 + 4 + pos, FyPosN4, epsyPosN4, FsuPosN4, epsuPosN4
# , FsrPosN4, epsrPosN4, -FyNegN4, -epsyNegN4, -FsuNegN4, -epsuNegN4, -FsrNegN4
# , -epsrNegN4, pinchX, pinchY, damage1, damage2, beta)
- SteelN4Mat = Ele.EleTag * 6 + 4 + pos
- SteelMPFTag = 1e6 * SteelN4Mat
- R0 = 20.0
- cR1 = 0.925
- cR2 = 0.15
+ SteelN4Mat = Ele.EleTag * 6 + 4 + pos # noqa: N806
+ SteelMPFTag = 1e6 * SteelN4Mat # noqa: N806
+ R0 = 20.0 # noqa: N806
+ cR1 = 0.925 # noqa: N806
+ cR2 = 0.15 # noqa: N806
a1 = 0.0
a2 = 1.0
a3 = 0.0
a4 = 0.0
- print('SteelMPF', int(SteelMPFTag), FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4)
- op.uniaxialMaterial('SteelMPF', SteelMPFTag, FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4)
- outputLogger.add_array(['uniaxialMaterial','SteelMPF', int(SteelMPFTag), FyPosN4, FyNegN4, Es, bPosN4, bNegN4, R0, cR1, cR2, a1, a2, a3, a4])
-
- print('MinMax', int(SteelN4Mat), int(SteelMPFTag), '-min', -epsuNegN4, '-max', epsuPosN4)
- op.uniaxialMaterial('MinMax', SteelN4Mat, SteelMPFTag, '-min', -epsuNegN4, '-max', epsuPosN4)
- outputLogger.add_array(['uniaxialMaterial','MinMax', int(SteelN4Mat), int(SteelMPFTag), '-min', -epsuNegN4, '-max', epsuPosN4])
+ print( # noqa: T201
+ 'SteelMPF',
+ int(SteelMPFTag),
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ )
+ op.uniaxialMaterial(
+ 'SteelMPF',
+ SteelMPFTag,
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'SteelMPF',
+ int(SteelMPFTag),
+ FyPosN4,
+ FyNegN4,
+ Es,
+ bPosN4,
+ bNegN4,
+ R0,
+ cR1,
+ cR2,
+ a1,
+ a2,
+ a3,
+ a4,
+ ]
+ )
+
+ print( # noqa: T201
+ 'MinMax',
+ int(SteelN4Mat),
+ int(SteelMPFTag),
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ )
+ op.uniaxialMaterial(
+ 'MinMax',
+ SteelN4Mat,
+ SteelMPFTag,
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'MinMax',
+ int(SteelN4Mat),
+ int(SteelMPFTag),
+ '-min',
+ -epsuNegN4,
+ '-max',
+ epsuPosN4,
+ ]
+ )
# Function: Creation of fibers in beams
- def fiber_beam(Ast, Asb, pos):
+ def fiber_beam(Ast, Asb, pos): # noqa: N803
op.section('Fiber', Ele.EleTag * 2 + pos)
- op.patch('rect', Ele.EleTag * 6 + pos, 10, 1, -y2 + dp, -z2 + dp, y2 - dp, z2 - dp)
- op.patch('rect', Ele.EleTag * 6 + 2 + pos, 10, 1, -y2 + dp, z2 - dp, y2 - dp, z2)
- op.patch('rect', Ele.EleTag * 6 + 2 + pos, 10, 1, -y2 + dp, -z2, y2 - dp, -z2 + dp)
+ op.patch(
+ 'rect',
+ Ele.EleTag * 6 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ -z2 + dp,
+ y2 - dp,
+ z2 - dp,
+ )
+ op.patch(
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ z2 - dp,
+ y2 - dp,
+ z2,
+ )
+ op.patch(
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ -z2,
+ y2 - dp,
+ -z2 + dp,
+ )
op.patch('rect', Ele.EleTag * 6 + 2 + pos, 2, 1, -y2, -z2, -y2 + dp, z2)
op.patch('rect', Ele.EleTag * 6 + 2 + pos, 2, 1, y2 - dp, -z2, y2, z2)
- print('BeamL',Ele.EleTag * 6 + 4 + pos, 1, Ast, y2 - dp, z2 - dp, y2 - dp, -z2 + dp)
- op.layer('straight', Ele.EleTag * 6 + 4 + pos, 1, Ast, y2 - dp, z2 - dp, y2 - dp, -z2 + dp)
- print('BeamR',Ele.EleTag * 6 + 4 + pos, 1, Asb, -y2 + dp, z2 - dp, -y2 + dp, -z2 + dp)
- op.layer('straight', Ele.EleTag * 6 + 4 + pos, 1, Asb, -y2 + dp, z2 - dp, -y2 + dp, -z2 + dp)
-
+ print( # noqa: T201
+ 'BeamL',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Ast,
+ y2 - dp,
+ z2 - dp,
+ y2 - dp,
+ -z2 + dp,
+ )
+ op.layer(
+ 'straight',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Ast,
+ y2 - dp,
+ z2 - dp,
+ y2 - dp,
+ -z2 + dp,
+ )
+ print( # noqa: T201
+ 'BeamR',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Asb,
+ -y2 + dp,
+ z2 - dp,
+ -y2 + dp,
+ -z2 + dp,
+ )
+ op.layer(
+ 'straight',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Asb,
+ -y2 + dp,
+ z2 - dp,
+ -y2 + dp,
+ -z2 + dp,
+ )
+
outputLogger.add_line('# Creating fibres in beam')
-
- outputLogger.add_array(['section','Fiber', Ele.EleTag * 2 + pos,'{'])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + pos, 10, 1, -y2 + dp, -z2 + dp, y2 - dp, z2 - dp])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + 2 + pos, 10, 1, -y2 + dp, z2 - dp, y2 - dp, z2])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + 2 + pos, 10, 1, -y2 + dp, -z2, y2 - dp, -z2 + dp])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + 2 + pos, 2, 1, -y2, -z2, -y2 + dp, z2])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + 2 + pos, 2, 1, y2 - dp, -z2, y2, z2])
- outputLogger.add_array(['layer','straight', Ele.EleTag * 6 + 4 + pos, 1, Ast, y2 - dp, z2 - dp, y2 - dp, -z2 + dp])
- outputLogger.add_array(['layer','straight', Ele.EleTag * 6 + 4 + pos, 1, Asb, -y2 + dp, z2 - dp, -y2 + dp, -z2 + dp])
+
+ outputLogger.add_array(['section', 'Fiber', Ele.EleTag * 2 + pos, '{'])
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ -z2 + dp,
+ y2 - dp,
+ z2 - dp,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ z2 - dp,
+ y2 - dp,
+ z2,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 10,
+ 1,
+ -y2 + dp,
+ -z2,
+ y2 - dp,
+ -z2 + dp,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 2,
+ 1,
+ -y2,
+ -z2,
+ -y2 + dp,
+ z2,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + 2 + pos,
+ 2,
+ 1,
+ y2 - dp,
+ -z2,
+ y2,
+ z2,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'layer',
+ 'straight',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Ast,
+ y2 - dp,
+ z2 - dp,
+ y2 - dp,
+ -z2 + dp,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'layer',
+ 'straight',
+ Ele.EleTag * 6 + 4 + pos,
+ 1,
+ Asb,
+ -y2 + dp,
+ z2 - dp,
+ -y2 + dp,
+ -z2 + dp,
+ ]
+ )
outputLogger.add_line('}')
validate_data(self)
@@ -1318,128 +1891,384 @@ def fiber_beam(Ast, Asb, pos):
outputLogger.add_line('wipe;')
op.model('Basic', '-ndm', 2, '-ndf', 3)
- outputLogger.add_array(['model','Basic', '-ndm', 2, '-ndf', 3])
+ outputLogger.add_array(['model', 'Basic', '-ndm', 2, '-ndf', 3])
outputLogger.add_line('# Create the nodes')
for node in ListNodes:
op.node(int(node[0]), int(node[1]), int(node[2]))
- outputLogger.add_array(['node',int(node[0]), int(node[1]), int(node[2])])
+ outputLogger.add_array(
+ ['node', int(node[0]), int(node[1]), int(node[2])]
+ )
- if node[2] == 0.:
+ if node[2] == 0.0:
op.fix(int(node[0]), 1, 1, 1)
- outputLogger.add_array(['fix',int(node[0]), 1, 1, 1])
+ outputLogger.add_array(['fix', int(node[0]), 1, 1, 1])
if node[2] > 0 and node[1] == 0:
- MasterNode = node[0]
+ MasterNode = node[0] # noqa: N806
if node[2] > 0 and node[1] != 0:
op.equalDOF(int(MasterNode), int(node[0]), 1)
- outputLogger.add_array(['equalDOF',int(MasterNode), int(node[0]), 1])
+ outputLogger.add_array(
+ ['equalDOF', int(MasterNode), int(node[0]), 1]
+ )
cover = 4 * cm
dst = 3 / 8 * inch
- Ast = pi * dst ** 2 / 4. # area de la barra del estribo
+ Ast = pi * dst**2 / 4.0 # area de la barra del estribo # noqa: N806
# creacion de columnas
# HBeam = float(self.ui.HBeam.text())
- HBeam = float(rootSIM["BeamDepth"])
-
+ HBeam = float(rootSIM['BeamDepth']) # noqa: N806
+
# HColi = float(self.ui.HColi.text()) # Column inside Depth
- HColi = float(rootSIM["IntColDepth"])
-
+ HColi = float(rootSIM['IntColDepth']) # noqa: N806
+
# HCole = float(self.ui.HCole.text()) # Column outside Depth
- HCole = float(rootSIM["ExtColDepth"])
+ HCole = float(rootSIM['ExtColDepth']) # noqa: N806
# fy = float(self.ui.fy.text()) * MPa
- fy = float(rootSIM["FySteel"]) * MPa
+ fy = float(rootSIM['FySteel']) * MPa
+
+ Es = 200.0 * GPa # noqa: N806, F841
- Es = 200.0 * GPa
-
# fcB = float(self.ui.fcB.text()) * MPa
- fcB = float(rootSIM["BeamFpc"]) * MPa
+ fcB = float(rootSIM['BeamFpc']) * MPa # noqa: N806
# fcC = float(self.ui.fcC.text()) * MPa
- fcC = float(rootSIM["ColFpc"]) * MPa
-
+ fcC = float(rootSIM['ColFpc']) * MPa # noqa: N806
+
op.geomTransf('PDelta', 1, '-jntOffset', 0, 0, 0, -HBeam / 2)
op.geomTransf('PDelta', 2, '-jntOffset', 0, HBeam / 2, 0, -HBeam / 2)
- op.geomTransf('Corotational', 3, '-jntOffset', HColi / 2., 0, -HColi / 2., 0)
- op.geomTransf('Corotational', 4, '-jntOffset', HCole / 2., 0, -HColi / 2., 0)
- op.geomTransf('Corotational', 5, '-jntOffset', HColi / 2., 0, -HCole / 2., 0)
-
+ op.geomTransf(
+ 'Corotational', 3, '-jntOffset', HColi / 2.0, 0, -HColi / 2.0, 0
+ )
+ op.geomTransf(
+ 'Corotational', 4, '-jntOffset', HCole / 2.0, 0, -HColi / 2.0, 0
+ )
+ op.geomTransf(
+ 'Corotational', 5, '-jntOffset', HColi / 2.0, 0, -HCole / 2.0, 0
+ )
+
outputLogger.add_line('# Define the geometric transformations')
- outputLogger.add_array(['geomTransf','PDelta', 1, '-jntOffset', 0, 0, 0, -HBeam / 2])
- outputLogger.add_array(['geomTransf','PDelta', 2, '-jntOffset', 0, HBeam / 2, 0, -HBeam / 2])
- outputLogger.add_array(['geomTransf','Corotational', 3, '-jntOffset', HColi / 2., 0, -HColi / 2., 0])
- outputLogger.add_array(['geomTransf','Corotational', 4, '-jntOffset', HCole / 2., 0, -HColi / 2., 0])
- outputLogger.add_array(['geomTransf','Corotational', 5, '-jntOffset', HColi / 2., 0, -HCole / 2., 0])
+ outputLogger.add_array(
+ ['geomTransf', 'PDelta', 1, '-jntOffset', 0, 0, 0, -HBeam / 2]
+ )
+ outputLogger.add_array(
+ ['geomTransf', 'PDelta', 2, '-jntOffset', 0, HBeam / 2, 0, -HBeam / 2]
+ )
+ outputLogger.add_array(
+ [
+ 'geomTransf',
+ 'Corotational',
+ 3,
+ '-jntOffset',
+ HColi / 2.0,
+ 0,
+ -HColi / 2.0,
+ 0,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'geomTransf',
+ 'Corotational',
+ 4,
+ '-jntOffset',
+ HCole / 2.0,
+ 0,
+ -HColi / 2.0,
+ 0,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'geomTransf',
+ 'Corotational',
+ 5,
+ '-jntOffset',
+ HColi / 2.0,
+ 0,
+ -HCole / 2.0,
+ 0,
+ ]
+ )
EleCol = []
EleBeam = []
- for Ele in Elements:
+ for Ele in Elements: # noqa: N806
if ListNodes[Ele.Nod_ini, 1] == ListNodes[Ele.Nod_end, 1]:
EleCol.append(Ele)
else:
EleBeam.append(Ele)
-
- platicHingeOpt = int(rootSIM["PlasticHingeOpt"])
- includeRegularization = bool(rootSIM["IncludeRegularization"])
-
+
+ platicHingeOpt = int(rootSIM['PlasticHingeOpt']) # noqa: N806
+ includeRegularization = bool(rootSIM['IncludeRegularization']) # noqa: N806
+
# print("platicHingeOpt",platicHingeOpt)
# print("includeRegularization",includeRegularization)
# Creation of non-linear elements (beams and columns)
- eo1, eo85, eo20, lambdaU = -0.002, -0.0038, -0.006, 0.1
- for (Ele, DC) in zip(EleCol, DataColDesing):
-
- outputLogger.add_line('# Creating materials and elements for column '+str(DC.EleTag))
-
- fc, Ec = fcC, Ele.EcEle
+ eo1, eo85, eo20, lambdaU = -0.002, -0.0038, -0.006, 0.1 # noqa: N806
+ for Ele, DC in zip(EleCol, DataColDesing): # noqa: N806
+ outputLogger.add_line(
+ '# Creating materials and elements for column ' + str(DC.EleTag)
+ )
+
+ fc, Ec = fcC, Ele.EcEle # noqa: N806
if platicHingeOpt == 1:
phl = 0.5 * DC.h
- elif platicHingeOpt == 2:
+ elif platicHingeOpt == 2: # noqa: PLR2004
phl = 0.08 * Ele.LEle + 0.022 * fy / MPa * DC.db / mm
- elif platicHingeOpt == 3:
+ elif platicHingeOpt == 3: # noqa: PLR2004
phl = 0.05 * Ele.LEle + 0.1 * fy / MPa * DC.db / mm / sqrt(fc * MPa)
-
- if includeRegularization == True:
- fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu()
- print('Concrete02', Ele.EleTag * 6, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 1, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets)
-
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets])
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 1, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets])
-
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu(DC.b, DC.h, DC.nsB, DC.nsH, DC.sst)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
-
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
-
+
+ if includeRegularization == True: # noqa: E712
+ fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu() # noqa: N806
+ print( # noqa: T201
+ 'Concrete02',
+ Ele.EleTag * 6,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ )
+
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu( # noqa: N806
+ DC.b, DC.h, DC.nsB, DC.nsH, DC.sst
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+
pos = 0
steel_mat_regu()
pos = 1
steel_mat_regu()
# No regularization
- else :
+ else:
ft = 0.33 * sqrt(fcC * MPa)
- Ets = ft / 0.002
- print('Concrete02', Ele.EleTag * 6, -fcC, eo1, -0.2 * fcC, eo20, lambdaU, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6, -fcC, eo1, -0.2 * fcC, eo20, lambdaU, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 1, -fcC, eo1, -0.2 * fcC, eo20, lambdaU, ft, Ets)
-
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6, -fcC, eo1, -0.2 * fcC, eo20, lambdaU, ft, Ets])
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 1, -fcC, eo1, -0.2 * fcC, eo20, lambdaU, ft, Ets])
-
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf(DC.b, DC.h, DC.nsB, DC.nsH, DC.sst)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
-
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
-
+ Ets = ft / 0.002 # noqa: N806
+ print( # noqa: T201
+ 'Concrete02',
+ Ele.EleTag * 6,
+ -fcC,
+ eo1,
+ -0.2 * fcC,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6,
+ -fcC,
+ eo1,
+ -0.2 * fcC,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ -fcC,
+ eo1,
+ -0.2 * fcC,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ )
+
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6,
+ -fcC,
+ eo1,
+ -0.2 * fcC,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ -fcC,
+ eo1,
+ -0.2 * fcC,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf( # noqa: N806
+ DC.b, DC.h, DC.nsB, DC.nsH, DC.sst
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+
pos = 0
steel_mat()
pos = 1
@@ -1448,95 +2277,416 @@ def fiber_beam(Ast, Asb, pos):
dp = DC.dist[0]
y1 = DC.h / 2.0
z1 = DC.b / 2.0
-
- outputLogger.add_line('# Creating sections and fibres for element ' + str(Ele.EleTag))
-
+
+ outputLogger.add_line(
+ '# Creating sections and fibres for element ' + str(Ele.EleTag)
+ )
+
op.section('Fiber', Ele.EleTag)
- op.patch('rect', Ele.EleTag * 6 + 2, 10, 1, -y1 + dp, -z1 + dp, y1 - dp, z1 - dp)
+ op.patch(
+ 'rect',
+ Ele.EleTag * 6 + 2,
+ 10,
+ 1,
+ -y1 + dp,
+ -z1 + dp,
+ y1 - dp,
+ z1 - dp,
+ )
op.patch('rect', Ele.EleTag * 6, 10, 1, -y1 + dp, z1 - dp, y1 - dp, z1)
op.patch('rect', Ele.EleTag * 6, 10, 1, -y1 + dp, -z1, y1 - dp, -z1 + dp)
op.patch('rect', Ele.EleTag * 6, 2, 1, -y1, -z1, -y1 + dp, z1)
op.patch('rect', Ele.EleTag * 6, 2, 1, y1 - dp, -z1, y1, z1)
-
- outputLogger.add_array(['section','Fiber', int(Ele.EleTag),'{'])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6 + 2, 10, 1, -y1 + dp, -z1 + dp, y1 - dp, z1 - dp])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6, 10, 1, -y1 + dp, z1 - dp, y1 - dp, z1])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6, 10, 1, -y1 + dp, -z1, y1 - dp, -z1 + dp])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6, 2, 1, -y1, -z1, -y1 + dp, z1])
- outputLogger.add_array(['patch','rect', Ele.EleTag * 6, 2, 1, y1 - dp, -z1, y1, z1])
-
- for dist, As in zip(DC.dist, DC.As):
- print('Col ', Ele.EleTag * 6 + 4, 1, As, -y1 + dist, z1 - dp, -y1 + dist, -z1 + dp)
- op.layer('straight', Ele.EleTag * 6 + 4, 1, As, -y1 + dist, z1 - dp, -y1 + dist, -z1 + dp)
- outputLogger.add_array(['layer','straight', Ele.EleTag * 6 + 4, 1, As, -y1 + dist, z1 - dp, -y1 + dist, -z1 + dp])
+
+ outputLogger.add_array(['section', 'Fiber', int(Ele.EleTag), '{'])
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6 + 2,
+ 10,
+ 1,
+ -y1 + dp,
+ -z1 + dp,
+ y1 - dp,
+ z1 - dp,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6,
+ 10,
+ 1,
+ -y1 + dp,
+ z1 - dp,
+ y1 - dp,
+ z1,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'patch',
+ 'rect',
+ Ele.EleTag * 6,
+ 10,
+ 1,
+ -y1 + dp,
+ -z1,
+ y1 - dp,
+ -z1 + dp,
+ ]
+ )
+ outputLogger.add_array(
+ ['patch', 'rect', Ele.EleTag * 6, 2, 1, -y1, -z1, -y1 + dp, z1]
+ )
+ outputLogger.add_array(
+ ['patch', 'rect', Ele.EleTag * 6, 2, 1, y1 - dp, -z1, y1, z1]
+ )
+
+ for dist, As in zip(DC.dist, DC.As): # noqa: N806
+ print( # noqa: T201
+ 'Col ',
+ Ele.EleTag * 6 + 4,
+ 1,
+ As,
+ -y1 + dist,
+ z1 - dp,
+ -y1 + dist,
+ -z1 + dp,
+ )
+ op.layer(
+ 'straight',
+ Ele.EleTag * 6 + 4,
+ 1,
+ As,
+ -y1 + dist,
+ z1 - dp,
+ -y1 + dist,
+ -z1 + dp,
+ )
+ outputLogger.add_array(
+ [
+ 'layer',
+ 'straight',
+ Ele.EleTag * 6 + 4,
+ 1,
+ As,
+ -y1 + dist,
+ z1 - dp,
+ -y1 + dist,
+ -z1 + dp,
+ ]
+ )
outputLogger.add_line('}')
- MassDens = Ele.AEle * GConc / g
- op.beamIntegration('HingeRadau', Ele.EleTag, Ele.EleTag, phl, Ele.EleTag, phl, Ele.EleTag)
-
+ MassDens = Ele.AEle * GConc / g # noqa: N806
+ op.beamIntegration(
+ 'HingeRadau',
+ Ele.EleTag,
+ Ele.EleTag,
+ phl,
+ Ele.EleTag,
+ phl,
+ Ele.EleTag,
+ )
+
# outputLogger.add_array(['beamIntegration','HingeRadau', Ele.EleTag, Ele.EleTag, phl, Ele.EleTag, phl, Ele.EleTag])
-
- op.element('forceBeamColumn', Ele.EleTag, Ele.Nod_ini, Ele.Nod_end, Ele.ElegTr, Ele.EleTag, '-mass', MassDens)
-
- intgrStr = '"HingeRadau' + ' ' + str(Ele.EleTag) + ' ' + str(phl) + ' ' +str(Ele.EleTag) + ' ' + str(phl) + ' ' + str(Ele.EleTag) + '"'
- outputLogger.add_array(['element', 'forceBeamColumn', Ele.EleTag, Ele.Nod_ini, Ele.Nod_end, Ele.ElegTr, intgrStr,'-mass', MassDens])
-
- for (Ele, DB) in zip(EleBeam, DataBeamDesing):
- fc, Ec, nsH = fcB, Ele.EcEle, 2
+
+ op.element(
+ 'forceBeamColumn',
+ Ele.EleTag,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ Ele.ElegTr,
+ Ele.EleTag,
+ '-mass',
+ MassDens,
+ )
+
+ intgrStr = ( # noqa: N806
+ '"HingeRadau' # noqa: ISC003
+ + ' '
+ + str(Ele.EleTag)
+ + ' '
+ + str(phl)
+ + ' '
+ + str(Ele.EleTag)
+ + ' '
+ + str(phl)
+ + ' '
+ + str(Ele.EleTag)
+ + '"'
+ )
+ outputLogger.add_array(
+ [
+ 'element',
+ 'forceBeamColumn',
+ Ele.EleTag,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ Ele.ElegTr,
+ intgrStr,
+ '-mass',
+ MassDens,
+ ]
+ )
+
+ for Ele, DB in zip(EleBeam, DataBeamDesing): # noqa: N806
+ fc, Ec, nsH = fcB, Ele.EcEle, 2 # noqa: N806
if platicHingeOpt == 1:
phl1 = 0.5 * DB.h
phl2 = 0.5 * DB.h
- elif platicHingeOpt == 2:
+ elif platicHingeOpt == 2: # noqa: PLR2004
phl1 = 0.08 * Ele.LEle + 0.022 * fy / MPa * DB.db_t1 / mm
phl2 = 0.08 * Ele.LEle + 0.022 * fy / MPa * DB.db_t2 / mm
- elif platicHingeOpt == 3:
- phl1 = 0.05 * Ele.LEle + 0.1 * fy / MPa * DB.db_t1 / mm / sqrt(fc * MPa)
- phl2 = 0.05 * Ele.LEle + 0.1 * fy / MPa * DB.db_t2 / mm / sqrt(fc * MPa)
-
- outputLogger.add_line('# Creating materials and elements for beam '+str(DB.EleTag))
-
- if includeRegularization == True:
+ elif platicHingeOpt == 3: # noqa: PLR2004
+ phl1 = 0.05 * Ele.LEle + 0.1 * fy / MPa * DB.db_t1 / mm / sqrt(
+ fc * MPa
+ )
+ phl2 = 0.05 * Ele.LEle + 0.1 * fy / MPa * DB.db_t2 / mm / sqrt(
+ fc * MPa
+ )
+
+ outputLogger.add_line(
+ '# Creating materials and elements for beam ' + str(DB.EleTag)
+ )
+
+ if includeRegularization == True: # noqa: E712
phl = phl1
- fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu()
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial', 'Concrete02', Ele.EleTag * 6, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets])
+ fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu() # noqa: N806
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
phl = phl2
- fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu()
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 1, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 1, fpc, epsc0, fcu, epscu, lambdaU, ft, Ets])
+ fpc, epsc0, fcu, epscu, lambdaU, ft, Ets = con_inconf_regu() # noqa: N806
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ fpc,
+ epsc0,
+ fcu,
+ epscu,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
phl, pos = phl1, 0
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu(DB.b, DB.h, DB.ns1, nsH, DB.ss1)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu( # noqa: N806
+ DB.b, DB.h, DB.ns1, nsH, DB.ss1
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
steel_mat_regu()
phl, pos = phl2, 1
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu(DB.b, DB.h, DB.ns2, nsH, DB.ss2)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf_regu( # noqa: N806
+ DB.b, DB.h, DB.ns2, nsH, DB.ss2
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
steel_mat_regu()
# No regularization
else:
ft = 0.33 * sqrt(fcB * MPa)
- Ets = ft / 0.002
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6, -fcB, eo1, -0.2 * fcB, eo20, lambdaU, ft, Ets)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 1, -fcB, eo1, -0.2 * fcB, eo20, lambdaU, ft, Ets)
-
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6, -fcB, eo1, -0.2 * fcB, eo20, lambdaU, ft, Ets])
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 1, -fcB, eo1, -0.2 * fcB, eo20, lambdaU, ft, Ets])
-
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf(DB.b, DB.h, DB.ns1, nsH, DB.ss1)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 2, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
-
- fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf(DB.b, DB.h, DB.ns2, nsH, DB.ss2)
- op.uniaxialMaterial('Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets)
- outputLogger.add_array(['uniaxialMaterial','Concrete02', Ele.EleTag * 6 + 3, fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets])
-
+ Ets = ft / 0.002 # noqa: N806
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6,
+ -fcB,
+ eo1,
+ -0.2 * fcB,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ -fcB,
+ eo1,
+ -0.2 * fcB,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ )
+
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6,
+ -fcB,
+ eo1,
+ -0.2 * fcB,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 1,
+ -fcB,
+ eo1,
+ -0.2 * fcB,
+ eo20,
+ lambdaU,
+ ft,
+ Ets,
+ ]
+ )
+
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf( # noqa: N806
+ DB.b, DB.h, DB.ns1, nsH, DB.ss1
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 2,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+
+ fpcc, epscc0, fccu, epsccu, lambdaC, ft, Ets = con_conf( # noqa: N806
+ DB.b, DB.h, DB.ns2, nsH, DB.ss2
+ )
+ op.uniaxialMaterial(
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ )
+ outputLogger.add_array(
+ [
+ 'uniaxialMaterial',
+ 'Concrete02',
+ Ele.EleTag * 6 + 3,
+ fpcc,
+ epscc0,
+ fccu,
+ epsccu,
+ lambdaC,
+ ft,
+ Ets,
+ ]
+ )
+
pos = 0
steel_mat()
pos = 1
@@ -1545,34 +2695,84 @@ def fiber_beam(Ast, Asb, pos):
z2 = DB.b / 2.0
dp = DB.h - min(DB.db1, DB.dt1)
pos = 0
-
+
fiber_beam(DB.Ast1, DB.Asb1, pos)
dp = DB.h - min(DB.db2, DB.dt2)
pos = 1
fiber_beam(DB.Ast2, DB.Asb2, pos)
- MassDens = Ele.AEle * GConc / g + WDLS / g
- op.beamIntegration('HingeRadau', Ele.EleTag, Ele.EleTag * 2, phl1, Ele.EleTag * 2 + 1, phl2, Ele.EleTag * 2)
- #outputLogger.add_array(['beamIntegration','HingeRadau', Ele.EleTag, Ele.EleTag * 2, phl1, Ele.EleTag * 2 + 1, phl2, Ele.EleTag * 2])
-
- op.element('forceBeamColumn', Ele.EleTag, Ele.Nod_ini, Ele.Nod_end, Ele.ElegTr, Ele.EleTag, '-mass', MassDens)
-
- intgrStr = '"HingeRadau' + ' ' + str(Ele.EleTag * 2) + ' ' + str(phl1) + ' ' +str(Ele.EleTag * 2 + 1) + ' ' + str(phl2) + ' ' + str( Ele.EleTag * 2) + '"'
- outputLogger.add_array(['element','forceBeamColumn', Ele.EleTag, Ele.Nod_ini, Ele.Nod_end, Ele.ElegTr, intgrStr, '-mass', MassDens])
+ MassDens = Ele.AEle * GConc / g + WDLS / g # noqa: N806
+ op.beamIntegration(
+ 'HingeRadau',
+ Ele.EleTag,
+ Ele.EleTag * 2,
+ phl1,
+ Ele.EleTag * 2 + 1,
+ phl2,
+ Ele.EleTag * 2,
+ )
+ # outputLogger.add_array(['beamIntegration','HingeRadau', Ele.EleTag, Ele.EleTag * 2, phl1, Ele.EleTag * 2 + 1, phl2, Ele.EleTag * 2])
+
+ op.element(
+ 'forceBeamColumn',
+ Ele.EleTag,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ Ele.ElegTr,
+ Ele.EleTag,
+ '-mass',
+ MassDens,
+ )
+
+ intgrStr = ( # noqa: N806
+ '"HingeRadau' # noqa: ISC003
+ + ' '
+ + str(Ele.EleTag * 2)
+ + ' '
+ + str(phl1)
+ + ' '
+ + str(Ele.EleTag * 2 + 1)
+ + ' '
+ + str(phl2)
+ + ' '
+ + str(Ele.EleTag * 2)
+ + '"'
+ )
+ outputLogger.add_array(
+ [
+ 'element',
+ 'forceBeamColumn',
+ Ele.EleTag,
+ Ele.Nod_ini,
+ Ele.Nod_end,
+ Ele.ElegTr,
+ intgrStr,
+ '-mass',
+ MassDens,
+ ]
+ )
list_beams = [Ele.EleTag for Ele in EleBeam]
list_cols = [Ele.EleTag for Ele in EleCol]
- print('list_beams =', list_beams)
- print('list_cols =', list_cols)
-
- print("Model Nonlinear Built")
+ print('list_beams =', list_beams) # noqa: T201
+ print('list_cols =', list_cols) # noqa: T201
+
+ print('Model Nonlinear Built') # noqa: T201
# KZ: gravity analysis
outputLogger.add_array(['timeSeries Linear 1'])
outputLogger.add_array(['pattern Plain 1 Constant {'])
- for Ele in EleCol:
- outputLogger.add_array(['eleLoad -ele {} -type -beamUniform 0 {}'.format(Ele.EleTag, -Ele.AEle*GConc)])
- for Ele in EleBeam:
- outputLogger.add_array(['eleLoad -ele {} -type -beamUniform {}'.format(Ele.EleTag, -Ele.AEle*GConc - WDL)])
+ for Ele in EleCol: # noqa: N806
+ outputLogger.add_array(
+ [
+ f'eleLoad -ele {Ele.EleTag} -type -beamUniform 0 {-Ele.AEle * GConc}'
+ ]
+ )
+ for Ele in EleBeam: # noqa: N806
+ outputLogger.add_array(
+ [
+ f'eleLoad -ele {Ele.EleTag} -type -beamUniform {-Ele.AEle * GConc - WDL}'
+ ]
+ )
outputLogger.add_array(['}'])
outputLogger.add_array(['set Tol 1.0e-6'])
outputLogger.add_array(['constraints Plain'])
@@ -1586,17 +2786,17 @@ def fiber_beam(Ast, Asb, pos):
outputLogger.add_array(['loadConst -time 0.0'])
# KZ: user defined damping
- xi = rootSIM.get('dampingRatio',0.05)
+ xi = rootSIM.get('dampingRatio', 0.05)
# KZ: modes
- if rootSIM.get('Simulation',None) is not None:
+ if rootSIM.get('Simulation', None) is not None:
tmp = rootSIM.get('Simulation')
- mode1 = tmp.get('firstMode',1)
- mode2 = tmp.get('secnondMode',3)
+ mode1 = tmp.get('firstMode', 1)
+ mode2 = tmp.get('secnondMode', 3)
else:
mode1 = 1
mode2 = 3
- outputLogger.add_array(['set nEigenI {}'.format(mode1)])
- outputLogger.add_array(['set nEigenJ {}'.format(mode2)])
+ outputLogger.add_array([f'set nEigenI {mode1}'])
+ outputLogger.add_array([f'set nEigenJ {mode2}'])
outputLogger.add_array(['set lambdaN [eigen [expr $nEigenJ]]'])
outputLogger.add_array(['set lambdaI [lindex $lambdaN [expr $nEigenI-1]]'])
outputLogger.add_array(['set lambdaJ [lindex $lambdaN [expr $nEigenJ-1]]'])
@@ -1604,116 +2804,236 @@ def fiber_beam(Ast, Asb, pos):
outputLogger.add_array(['set lambda2 [expr pow($lambdaJ,0.5)]'])
outputLogger.add_array(['set T1 [expr 2.0*3.14/$lambda1]'])
outputLogger.add_array(['puts "T1 = $T1"'])
- outputLogger.add_array(['set a0 [expr {}*2.0*$lambda1*$lambda2/($lambda1+$lambda2)]'.format(xi)])
- outputLogger.add_array(['set a1 [expr {}*2.0/($lambda1+$lambda2)]'.format(xi)])
+ outputLogger.add_array(
+ [f'set a0 [expr {xi}*2.0*$lambda1*$lambda2/($lambda1+$lambda2)]']
+ )
+ outputLogger.add_array([f'set a1 [expr {xi}*2.0/($lambda1+$lambda2)]'])
outputLogger.add_array(['rayleigh $a0 0.0 $a1 0.0'])
- if preparePushover == False:
+ if preparePushover == False: # noqa: E712
return
- if not os.path.exists("Pushover"):
- os.mkdir("Pushover")
+ if not os.path.exists('Pushover'): # noqa: PTH110
+ os.mkdir('Pushover') # noqa: PTH102
# Recording of forces and deformations from nonlinear analysis
- op.recorder('Element', '-file', 'Pushover/beams_force_1.out',
- '-time', '-ele', *list_beams, 'section', 1, 'force')
- op.recorder('Element', '-file', 'Pushover/beams_def_1.out',
- '-time', '-ele', *list_beams, 'section', 1, 'deformation')
- op.recorder('Element', '-file', 'Pushover/beams_force_6.out',
- '-time', '-ele', *list_beams, 'section', 6, 'force')
- op.recorder('Element', '-file', 'Pushover/beams_def_6.out',
- '-time', '-ele', *list_beams, 'section', 6, 'deformation')
- op.recorder('Element', '-file', 'Pushover/cols_force_1.out',
- '-time', '-ele', *list_cols, 'section', 1, 'force')
- op.recorder('Element', '-file', 'Pushover/cols_def_1.out',
- '-time', '-ele', *list_cols, 'section', 1, 'deformation')
- op.recorder('Element', '-file', 'Pushover/cols_force_6.out',
- '-time', '-ele', *list_cols, 'section', 6, 'force')
- op.recorder('Element', '-file', 'Pushover/cols_def_6.out',
- '-time', '-ele', *list_cols, 'section', 6, 'deformation')
- op.recorder('Node', '-file', 'Pushover/HoriNodes.out',
- '-time', '-node', *ListNodes, '-dof', 1, 'disp')
- op.recorder('Node', '-file', 'Pushover/VertNodes.out',
- '-time', '-node', *ListNodes, '-dof', 2, 'disp')
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/beams_force_1.out',
+ '-time',
+ '-ele',
+ *list_beams,
+ 'section',
+ 1,
+ 'force',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/beams_def_1.out',
+ '-time',
+ '-ele',
+ *list_beams,
+ 'section',
+ 1,
+ 'deformation',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/beams_force_6.out',
+ '-time',
+ '-ele',
+ *list_beams,
+ 'section',
+ 6,
+ 'force',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/beams_def_6.out',
+ '-time',
+ '-ele',
+ *list_beams,
+ 'section',
+ 6,
+ 'deformation',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/cols_force_1.out',
+ '-time',
+ '-ele',
+ *list_cols,
+ 'section',
+ 1,
+ 'force',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/cols_def_1.out',
+ '-time',
+ '-ele',
+ *list_cols,
+ 'section',
+ 1,
+ 'deformation',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/cols_force_6.out',
+ '-time',
+ '-ele',
+ *list_cols,
+ 'section',
+ 6,
+ 'force',
+ )
+ op.recorder(
+ 'Element',
+ '-file',
+ 'Pushover/cols_def_6.out',
+ '-time',
+ '-ele',
+ *list_cols,
+ 'section',
+ 6,
+ 'deformation',
+ )
+ op.recorder(
+ 'Node',
+ '-file',
+ 'Pushover/HoriNodes.out',
+ '-time',
+ '-node',
+ *ListNodes,
+ '-dof',
+ 1,
+ 'disp',
+ )
+ op.recorder(
+ 'Node',
+ '-file',
+ 'Pushover/VertNodes.out',
+ '-time',
+ '-node',
+ *ListNodes,
+ '-dof',
+ 2,
+ 'disp',
+ )
# Create a Plain load pattern for gravity loading with a Linear TimeSeries
op.timeSeries('Linear', 1)
op.pattern('Plain', 1, 1)
- for Ele in EleCol:
- op.eleLoad('-ele', Ele.EleTag, '-type', '-beamUniform', 0, -Ele.AEle * GConc)
- for Ele in EleBeam:
- op.eleLoad('-ele', Ele.EleTag, '-type', '-beamUniform', -Ele.AEle * GConc - WDL)
-
- Tol = 1.0e-6 # convergence tolerance for test
+ for Ele in EleCol: # noqa: N806
+ op.eleLoad(
+ '-ele', Ele.EleTag, '-type', '-beamUniform', 0, -Ele.AEle * GConc
+ )
+ for Ele in EleBeam: # noqa: N806
+ op.eleLoad(
+ '-ele', Ele.EleTag, '-type', '-beamUniform', -Ele.AEle * GConc - WDL
+ )
+
+ Tol = 1.0e-6 # convergence tolerance for test # noqa: N806
op.constraints('Plain') # how it handles boundary conditions
- op.numberer('Plain') # renumber dof to minimize band-width (optimization), if you want to
- op.system('BandGeneral') # how to store and solve the system of equations in the analysis
- op.test('NormDispIncr', Tol, 100) # determine if convergence has been achieved at the end of an iteration step
- op.algorithm('KrylovNewton') # use Newton solution algorithm: updates tangent stiffness at every iteration
- NstepGravity = 10 # apply gravity in 10 steps
- DGravity = 1. / NstepGravity # first load increment;
- op.integrator('LoadControl', DGravity) # determine the next time step for an analysis
+ op.numberer(
+ 'Plain'
+ ) # renumber dof to minimize band-width (optimization), if you want to
+ op.system(
+ 'BandGeneral'
+ ) # how to store and solve the system of equations in the analysis
+ op.test(
+ 'NormDispIncr', Tol, 100
+ ) # determine if convergence has been achieved at the end of an iteration step
+ op.algorithm(
+ 'KrylovNewton'
+ ) # use Newton solution algorithm: updates tangent stiffness at every iteration
+ NstepGravity = 10 # apply gravity in 10 steps # noqa: N806
+ DGravity = 1.0 / NstepGravity # first load increment; # noqa: N806
+ op.integrator(
+ 'LoadControl', DGravity
+ ) # determine the next time step for an analysis
op.analysis('Static') # define type of analysis static or transient
op.analyze(NstepGravity) # apply gravity
op.loadConst('-time', 0.0)
- #xi = 0.05 # damping ratio
- MpropSwitch = 1.0
- KcurrSwitch = 0.0
- KcommSwitch = 1.0
- KinitSwitch = 0.0
- nEigenI = 1 # mode 1
- nEigenI2 = 2 # mode 2
- nEigenJ = 3 # mode 3
- lambdaN = op.eigen(nEigenJ) # eigenvalue analysis for nEigenJ modes
- lambdaI = lambdaN[nEigenI - 1] # eigenvalue mode i
- lambdaI2 = lambdaN[nEigenI2 - 1] # eigenvalue mode i2
- lambdaJ = lambdaN[nEigenJ - 1] # eigenvalue mode j
- print('lambdaN ', lambdaN)
- omegaI = pow(lambdaI, 0.5)
- omegaI2 = pow(lambdaI2, 0.5)
- omegaJ = pow(lambdaJ, 0.5)
- T1m = 2. * pi / omegaI
- T2m = 2. * pi / omegaI2
-
- print('Ta1=', T1m, 'seg', ' Ta2=', T2m, ' seg')
- alphaM = MpropSwitch * xi * (2. * omegaI * omegaJ) / (omegaI + omegaJ) # M-prop. damping D = alphaM*M
- betaKcurr = KcurrSwitch * 2. * xi / (omegaI + omegaJ) # current-K +beatKcurr*KCurrent
- betaKcomm = KcommSwitch * 2. * xi / (omegaI + omegaJ) # last-committed K +betaKcomm*KlastCommitt
- betaKinit = KinitSwitch * 2. * xi / (omegaI + omegaJ) # initial-K +beatKinit*Kini
+ # xi = 0.05 # damping ratio
+ MpropSwitch = 1.0 # noqa: N806
+ KcurrSwitch = 0.0 # noqa: N806
+ KcommSwitch = 1.0 # noqa: N806
+ KinitSwitch = 0.0 # noqa: N806
+ nEigenI = 1 # mode 1 # noqa: N806
+ nEigenI2 = 2 # mode 2 # noqa: N806
+ nEigenJ = 3 # mode 3 # noqa: N806
+ # eigenvalue analysis for nEigenJ modes
+ lambdaN = op.eigen(nEigenJ) # noqa: N806
+ lambdaI = lambdaN[nEigenI - 1] # eigenvalue mode i # noqa: N806
+ lambdaI2 = lambdaN[nEigenI2 - 1] # eigenvalue mode i2 # noqa: N806
+ lambdaJ = lambdaN[nEigenJ - 1] # eigenvalue mode j # noqa: N806
+ print('lambdaN ', lambdaN) # noqa: T201
+ omegaI = pow(lambdaI, 0.5) # noqa: N806
+ omegaI2 = pow(lambdaI2, 0.5) # noqa: N806
+ omegaJ = pow(lambdaJ, 0.5) # noqa: N806
+ T1m = 2.0 * pi / omegaI
+ T2m = 2.0 * pi / omegaI2
+
+ print('Ta1=', T1m, 'seg', ' Ta2=', T2m, ' seg') # noqa: T201
+ alphaM = ( # noqa: N806
+ MpropSwitch * xi * (2.0 * omegaI * omegaJ) / (omegaI + omegaJ)
+ ) # M-prop. damping D = alphaM*M
+ betaKcurr = ( # noqa: N806
+ KcurrSwitch * 2.0 * xi / (omegaI + omegaJ)
+ ) # current-K +beatKcurr*KCurrent
+ betaKcomm = ( # noqa: N806
+ KcommSwitch * 2.0 * xi / (omegaI + omegaJ)
+ ) # last-committed K +betaKcomm*KlastCommitt
+ betaKinit = ( # noqa: N806
+ KinitSwitch * 2.0 * xi / (omegaI + omegaJ)
+ ) # initial-K +beatKinit*Kini
op.rayleigh(alphaM, betaKcurr, betaKinit, betaKcomm) # RAYLEIGH damping
-
# Pushover function
- def Pushover(self, rootSIM):
- def __init__(rootSIM):
+ def Pushover(self, rootSIM): # noqa: C901, D102, N802, N803, PLR0914, PLR0915
+ def __init__(rootSIM): # noqa: N803, N807
self.rootSIM = rootSIM
-
- global cbar
- def singlePush1(dref, mu, ctrlNode, dispDir, nSteps):
- IOflag = 2
- testType = 'RelativeNormDispIncr'
+
+ global cbar # noqa: PLW0602
+
+ def singlePush1(dref, mu, ctrlNode, dispDir, nSteps): # noqa: C901, N802, N803
+ IOflag = 2 # noqa: N806
+ testType = 'RelativeNormDispIncr' # noqa: N806
# set testType EnergyIncr; # Dont use with Penalty constraints
# set testType RelativeNormUnbalance; # Dont use with Penalty constraints
# set testType RelativeNormDispIncr; # Dont use with Lagrange constraints
# set testType RelativeTotalNormDispIncr; # Dont use with Lagrange constraints
# set testType RelativeEnergyIncr; # Dont use with Penalty constraints
- tolInit = 1.0e-6 # the initial Tolerance, so it can be referred back to
- iterInit = 50 # the initial Max Number of Iterations
- algorithmType = 'KrylovNewton' # the algorithm type
-
- op.test(testType, tolInit
- , iterInit) # determine if convergence has been achieved at the end of an iteration step
- op.algorithm(algorithmType) # use Newton solution algorithm: updates tangent stiffness at every iteration
+ tolInit = 1.0e-6 # the initial Tolerance, so it can be referred back to # noqa: N806
+ iterInit = 50 # the initial Max Number of Iterations # noqa: N806
+ algorithmType = 'KrylovNewton' # the algorithm type # noqa: N806
+
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
+ op.algorithm(
+ algorithmType
+ ) # use Newton solution algorithm: updates tangent stiffness at every iteration
disp = dref * mu
- dU = disp / (1.0 * nSteps)
- print('dref ', dref, 'mu ', mu, 'dU ', dU, 'disp ', disp)
- op.integrator('DisplacementControl', ctrlNode, dispDir, dU) # determine the next time step for an analysis
+ dU = disp / (1.0 * nSteps) # noqa: N806
+ print('dref ', dref, 'mu ', mu, 'dU ', dU, 'disp ', disp) # noqa: T201
+ op.integrator(
+ 'DisplacementControl', ctrlNode, dispDir, dU
+ ) # determine the next time step for an analysis
op.analysis('Static') # define type of analysis static or transient
# Print values
if IOflag >= 1:
- print('singlePush: Push ', ctrlNode, ' to ', mu)
+ print('singlePush: Push ', ctrlNode, ' to ', mu) # noqa: T201
# the initial values to start the while loop
ok = 0
@@ -1722,64 +3042,86 @@ def singlePush1(dref, mu, ctrlNode, dispDir, nSteps):
# This feature of disabling the possibility of having a negative loading has been included.
# This has been adapted from a similar script by Prof. Garbaggio
htot = op.nodeCoord(ctrlNode, 2)
- maxDriftPiso = 0.0
- VBasal_v = []
- DriftTecho_v = []
+ maxDriftPiso = 0.0 # noqa: N806
+ VBasal_v = [] # noqa: N806
+ DriftTecho_v = [] # noqa: N806
while step <= nSteps and ok == 0 and loadf > 0:
- #self.ui.progressBar.setValue(100 * step / nSteps)
+ # self.ui.progressBar.setValue(100 * step / nSteps)
ok = op.analyze(1)
loadf = op.getTime()
temp = op.nodeDisp(ctrlNode, dispDir)
# Print the current displacement
- if IOflag >= 2:
- print('Pushed ', ctrlNode, ' in ', dispDir, ' to ', temp, ' with ', loadf, 'step', step)
+ if IOflag >= 2: # noqa: PLR2004
+ print( # noqa: T201
+ 'Pushed ',
+ ctrlNode,
+ ' in ',
+ dispDir,
+ ' to ',
+ temp,
+ ' with ',
+ loadf,
+ 'step',
+ step,
+ )
# If the analysis fails, try the following changes to achieve convergence
# Analysis will be slower in here though...
if ok != 0:
- print('Trying relaxed convergence..')
- op.test(testType, tolInit * 0.01,
- iterInit * 50) # determine if convergence has been achieved at the end of an iteration step
+ print('Trying relaxed convergence..') # noqa: T201
+ op.test(
+ testType, tolInit * 0.01, iterInit * 50
+ ) # determine if convergence has been achieved at the end of an iteration step
ok = op.analyze(1)
- op.test(testType, tolInit,
- iterInit) # determine if convergence has been achieved at the end of an iteration step
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
if ok != 0:
- print('Trying Newton with initial then current .')
- op.test(testType, tolInit * 0.01,
- iterInit * 50) # determine if convergence has been achieved at the end of an iteration step
+ print('Trying Newton with initial then current .') # noqa: T201
+ op.test(
+ testType, tolInit * 0.01, iterInit * 50
+ ) # determine if convergence has been achieved at the end of an iteration step
op.algorithm('Newton', '-initialThenCurrent')
ok = op.analyze(1)
op.algorithm(algorithmType)
- op.test(testType, tolInit,
- iterInit) # determine if convergence has been achieved at the end of an iteration step
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
if ok != 0:
- print('Trying ModifiedNewton with initial ..')
- op.test(testType, tolInit * 0.01,
- iterInit * 50) # determine if convergence has been achieved at the end of an iteration step
+ print('Trying ModifiedNewton with initial ..') # noqa: T201
+ op.test(
+ testType, tolInit * 0.01, iterInit * 50
+ ) # determine if convergence has been achieved at the end of an iteration step
op.algorithm('ModifiedNewton', '-initial')
ok = op.analyze(1)
op.algorithm(algorithmType)
- op.test(testType, tolInit,
- iterInit) # determine if convergence has been achieved at the end of an iteration step
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
if ok != 0:
- print('Trying KrylovNewton ..')
- op.test(testType, tolInit * 0.01,
- iterInit * 50) # determine if convergence has been achieved at the end of an iteration step
+ print('Trying KrylovNewton ..') # noqa: T201
+ op.test(
+ testType, tolInit * 0.01, iterInit * 50
+ ) # determine if convergence has been achieved at the end of an iteration step
op.algorithm('KrylovNewton')
ok = op.analyze(1)
op.algorithm(algorithmType)
- op.test(testType, tolInit,
- iterInit) # determine if convergence has been achieved at the end of an iteration step
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
if ok != 0:
- print('Perform a Hail Mary ....')
- op.test('FixedNumIter',
- iterInit) # determine if convergence has been achieved at the end of an iteration step
+ print('Perform a Hail Mary ....') # noqa: T201
+ op.test(
+ 'FixedNumIter', iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
ok = op.analyze(1)
- for (nod_ini, nod_end) in zip(ListNodesDrift[:-1, 0], ListNodesDrift[1:, 0]):
+ for nod_ini, nod_end in zip(
+ ListNodesDrift[:-1, 0], ListNodesDrift[1:, 0]
+ ):
# print('nod_ini ', nod_ini, 'nod_end', nod_end)
- nod_ini = int(nod_ini)
- nod_end = int(nod_end)
+ nod_ini = int(nod_ini) # noqa: PLW2901
+ nod_end = int(nod_end) # noqa: PLW2901
pos_i = op.nodeCoord(nod_ini, 2)
pos_s = op.nodeCoord(nod_end, 2)
hpiso = pos_s - pos_i
@@ -1787,35 +3129,40 @@ def singlePush1(dref, mu, ctrlNode, dispDir, nSteps):
desp_s = op.nodeDisp(nod_end, 1)
desp_piso = abs(desp_s - desp_i)
drift_piso = desp_piso / hpiso
- if drift_piso >= maxDriftPiso:
- maxDriftPiso = drift_piso
+ maxDriftPiso = max(maxDriftPiso, drift_piso) # noqa: N806
- VBasal = 0.
+ VBasal = 0.0 # noqa: N806
op.reactions()
for node in ListNodesBasal:
# print('ind Basal ', node[0])
- VBasal = VBasal + op.nodeReaction(node[0], 1)
- VBasal_v = np.append(VBasal_v, VBasal)
- DriftTecho = op.nodeDisp(ctrlNode, dispDir) / htot
- DriftTecho_v = np.append(DriftTecho_v, DriftTecho)
+ VBasal = VBasal + op.nodeReaction(node[0], 1) # noqa: N806, PLR6104
+ VBasal_v = np.append(VBasal_v, VBasal) # noqa: N806
+ DriftTecho = op.nodeDisp(ctrlNode, dispDir) / htot # noqa: N806
+ DriftTecho_v = np.append(DriftTecho_v, DriftTecho) # noqa: N806
loadf = op.getTime()
step += 1
- maxDriftTecho = dU * step / htot
- maxDriftTecho2 = op.nodeDisp(ctrlNode, dispDir) / htot
+ maxDriftTecho = dU * step / htot # noqa: N806
+ maxDriftTecho2 = op.nodeDisp(ctrlNode, dispDir) / htot # noqa: N806
if ok != 0:
- print('DispControl Analysis FAILED')
+ print('DispControl Analysis FAILED') # noqa: T201
else:
- print('DispControl Analysis SUCCESSFUL')
+ print('DispControl Analysis SUCCESSFUL') # noqa: T201
if loadf <= 0:
- print('Stopped because of Load factor below zero: ', loadf)
+ print('Stopped because of Load factor below zero: ', loadf) # noqa: T201
# if PrintFlag == 0:
# os.remove("singlePush.txt")
# print singlePush.txt
- return maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v
+ return (
+ maxDriftPiso,
+ maxDriftTecho,
+ maxDriftTecho2,
+ VBasal_v,
+ DriftTecho_v,
+ )
# Pushover function varying tests and algorithms
- def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
+ def singlePush(dref, mu, ctrlNode, dispDir, nSteps): # noqa: C901, N802, N803
# --------------------------------------------------
# Description of Parameters
# --------------------------------------------------
@@ -1826,23 +3173,36 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
# nSteps: Number of steps.
# IOflag: Option to print details on screen. 2 for print of each step, 1 for basic info (default), 0 for off
# ---------------------------------------------------
- test = {1: 'NormDispIncr', 2: 'RelativeEnergyIncr', 3: 'EnergyIncr',
- 4: 'RelativeNormUnbalance', 5: 'RelativeNormDispIncr',
- 6: 'NormUnbalance', 7: 'FixedNumIter'}
- alg = {1: 'KrylovNewton', 2: 'SecantNewton', 3: 'ModifiedNewton',
- 4: 'RaphsonNewton', 5: 'PeriodicNewton', 6: 'BFGS',
- 7: 'Broyden', 8: 'NewtonLineSearch'}
+ test = {
+ 1: 'NormDispIncr',
+ 2: 'RelativeEnergyIncr',
+ 3: 'EnergyIncr',
+ 4: 'RelativeNormUnbalance',
+ 5: 'RelativeNormDispIncr',
+ 6: 'NormUnbalance',
+ 7: 'FixedNumIter',
+ }
+ alg = {
+ 1: 'KrylovNewton',
+ 2: 'SecantNewton',
+ 3: 'ModifiedNewton',
+ 4: 'RaphsonNewton',
+ 5: 'PeriodicNewton',
+ 6: 'BFGS',
+ 7: 'Broyden',
+ 8: 'NewtonLineSearch',
+ }
# test = {1:'NormDispIncr', 2: 'RelativeEnergyIncr', 3:'EnergyIncr'}
# alg = {1:'KrylovNewton', 2:'ModifiedNewton'}
- IOflag = 2
- PrintFlag = 0
- testType = 'RelativeNormDispIncr' # Dont use with Penalty constraints
+ IOflag = 2 # noqa: N806
+ PrintFlag = 0 # noqa: N806, F841
+ testType = 'RelativeNormDispIncr' # Dont use with Penalty constraints # noqa: N806
- tolInit = 1.0e-7 # the initial Tolerance, so it can be referred back to
- iterInit = 50 # the initial Max Number of Iterations
- algorithmType = 'KrylovNewton' # the algorithm type
+ tolInit = 1.0e-7 # the initial Tolerance, so it can be referred back to # noqa: N806
+ iterInit = 50 # the initial Max Number of Iterations # noqa: N806
+ algorithmType = 'KrylovNewton' # the algorithm type # noqa: N806
# algorithmType Newton; # the algorithm type
# algorithmType Newton; # the algorithm type
@@ -1850,18 +3210,25 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
# op.numberer('RCM') # renumber dof to minimize band-width (optimization), if you want to
# op.system('BandGeneral') # how to store and solve the system of equations in the analysis
- op.test(testType, tolInit,
- iterInit) # determine if convergence has been achieved at the end of an iteration step
- op.algorithm(algorithmType) # use Newton solution algorithm: updates tangent stiffness at every iteration
+ op.test(
+ testType, tolInit, iterInit
+ ) # determine if convergence has been achieved at the end of an iteration step
+ op.algorithm(
+ algorithmType
+ ) # use Newton solution algorithm: updates tangent stiffness at every iteration
disp = dref * mu
- dU = disp / (1.0 * nSteps)
- print('dref ', dref, 'mu ', mu, 'dU ', dU, 'disp ', disp, 'nSteps ', nSteps)
- op.integrator('DisplacementControl', ctrlNode, dispDir, dU) # determine the next time step for an analysis
+ dU = disp / (1.0 * nSteps) # noqa: N806
+ print( # noqa: T201
+ 'dref ', dref, 'mu ', mu, 'dU ', dU, 'disp ', disp, 'nSteps ', nSteps
+ )
+ op.integrator(
+ 'DisplacementControl', ctrlNode, dispDir, dU
+ ) # determine the next time step for an analysis
op.analysis('Static') # defivne type of analysis static or transient
# Print values
if IOflag >= 1:
- print('singlePush: Push ', ctrlNode, ' to ', mu)
+ print('singlePush: Push ', ctrlNode, ' to ', mu) # noqa: T201
# the initial values to start the while loop
ok = 0
@@ -1869,35 +3236,46 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
loadf = 1.0
# This feature of disabling the possibility of having a negative loading has been included.
# This has been adapted from a similar script by Prof. Garbaggio
- maxDriftPiso = 0.0
+ maxDriftPiso = 0.0 # noqa: N806
htot = op.nodeCoord(ctrlNode, 2)
- VBasal_v = []
- DriftTecho_v = []
+ VBasal_v = [] # noqa: N806
+ DriftTecho_v = [] # noqa: N806
# factor_v = np.array([1,0.75,0.5,0.25,0.1,2,3,5,10])
# fact_v = np.array([50,100,500])
# factor = 100
# fact = 1.
while step <= nSteps and ok == 0 and loadf > 0:
- #self.ui.progressBar.setValue(100 * step / nSteps)
+ # self.ui.progressBar.setValue(100 * step / nSteps)
ok = op.analyze(1)
loadf = op.getTime()
temp = op.nodeDisp(ctrlNode, dispDir)
- if IOflag >= 2:
- print('Pushed ', ctrlNode, ' in ', dispDir, ' to ', temp, ' with ', loadf, 'step ', step)
+ if IOflag >= 2: # noqa: PLR2004
+ print( # noqa: T201
+ 'Pushed ',
+ ctrlNode,
+ ' in ',
+ dispDir,
+ ' to ',
+ temp,
+ ' with ',
+ loadf,
+ 'step ',
+ step,
+ )
# for factor in factor_v:
# op.integrator('DisplacementControl',ctrlNode,dispDir,factor*dU) # determine the next time step for an analysis
# for fact in fact_v:
- for j in alg:
- for i in test:
+ for j in alg: # noqa: PLC0206
+ for i in test: # noqa: PLC0206
for fact in [1, 20, 50]:
- if ok != 0 and j >= 4 and i != 7:
+ if ok != 0 and j >= 4 and i != 7: # noqa: PLR2004
# print('Trying ',str(alg[j]))
- op.test(test[i], tolInit * .01, iterInit * fact)
+ op.test(test[i], tolInit * 0.01, iterInit * fact)
op.algorithm(alg[j])
ok = op.analyze(1)
op.algorithm(algorithmType)
op.test(testType, tolInit, iterInit)
- elif ok != 0 and j < 4 and i != 7:
+ elif ok != 0 and j < 4 and i != 7: # noqa: PLR2004
# print('Trying ',str(alg[j]))
op.test(test[i], tolInit, iterInit * fact)
op.algorithm(alg[j], '-initial')
@@ -1906,7 +3284,7 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
op.test(testType, tolInit, iterInit)
if ok == 0:
break
- if ok != 0 and i == 7:
+ if ok != 0 and i == 7: # noqa: PLR2004
op.test(test[i], iterInit)
op.algorithm(alg[j])
ok = op.analyze(1)
@@ -1920,10 +3298,12 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
# break
# op.integrator('DisplacementControl',ctrlNode,dispDir,dU) # determine the next time step for an analysis
# Calculation of maximum Drift between floors
- for (nod_ini, nod_end) in zip(ListNodesDrift[:-1, 0], ListNodesDrift[1:, 0]):
+ for nod_ini, nod_end in zip(
+ ListNodesDrift[:-1, 0], ListNodesDrift[1:, 0]
+ ):
# print('nod_ini ', nod_ini, 'nod_end', nod_end)
- nod_ini = int(nod_ini)
- nod_end = int(nod_end)
+ nod_ini = int(nod_ini) # noqa: PLW2901
+ nod_end = int(nod_end) # noqa: PLW2901
pos_i = op.nodeCoord(nod_ini, 2)
pos_s = op.nodeCoord(nod_end, 2)
hpiso = pos_s - pos_i
@@ -1931,80 +3311,88 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
desp_s = op.nodeDisp(nod_end, 1)
desp_piso = abs(desp_s - desp_i)
drift_piso = desp_piso / hpiso
- if drift_piso >= maxDriftPiso:
- maxDriftPiso = drift_piso
+ maxDriftPiso = max(maxDriftPiso, drift_piso) # noqa: N806
- VBasal = 0.
+ VBasal = 0.0 # noqa: N806
op.reactions()
for node in ListNodesBasal:
# print('ind Basal ', node[0])
- VBasal = VBasal + op.nodeReaction(node[0], 1)
- VBasal_v = np.append(VBasal_v, VBasal)
- DriftTecho = op.nodeDisp(ctrlNode, dispDir) / htot
- DriftTecho_v = np.append(DriftTecho_v, DriftTecho)
+ VBasal = VBasal + op.nodeReaction(node[0], 1) # noqa: N806, PLR6104
+ VBasal_v = np.append(VBasal_v, VBasal) # noqa: N806
+ DriftTecho = op.nodeDisp(ctrlNode, dispDir) / htot # noqa: N806
+ DriftTecho_v = np.append(DriftTecho_v, DriftTecho) # noqa: N806
loadf = op.getTime()
step += 1
- maxDriftTecho = dU * step / htot
- maxDriftTecho2 = op.nodeDisp(ctrlNode, dispDir) / htot
+ maxDriftTecho = dU * step / htot # noqa: N806
+ maxDriftTecho2 = op.nodeDisp(ctrlNode, dispDir) / htot # noqa: N806
if ok != 0:
- print('DispControl Analysis FAILED')
+ print('DispControl Analysis FAILED') # noqa: T201
else:
- print('DispControl Analysis SUCCESSFUL')
+ print('DispControl Analysis SUCCESSFUL') # noqa: T201
if loadf <= 0:
- print('Stopped because of Load factor below zero: ', loadf)
+ print('Stopped because of Load factor below zero: ', loadf) # noqa: T201
# if PrintFlag == 0:
# os.remove("singlePush.txt")
# print singlePush.txt
- return maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v
-
- ListNodesDrift = ListNodes[np.where(ListNodes[:, 1] == 0.)]
- ListNodesBasal = ListNodes[np.where(ListNodes[:, 2] == 0.)]
- if T1m <= 0.5:
- k = 1.
- elif T1m <= 2.5:
+ return (
+ maxDriftPiso,
+ maxDriftTecho,
+ maxDriftTecho2,
+ VBasal_v,
+ DriftTecho_v,
+ )
+
+ ListNodesDrift = ListNodes[np.where(ListNodes[:, 1] == 0.0)] # noqa: N806
+ ListNodesBasal = ListNodes[np.where(ListNodes[:, 2] == 0.0)] # noqa: N806
+ if T1m <= 0.5: # noqa: PLR2004
+ k = 1.0
+ elif T1m <= 2.5: # noqa: PLR2004
k = 0.75 + 0.5 * T1m
else:
- k = 2.
+ k = 2.0
- sumH = np.sum(np.power(Loc_heigth, k))
+ sumH = np.sum(np.power(Loc_heigth, k)) # noqa: N806
floors_num = len(Loc_heigth)
-
+
# Match default example
- triangForceDist = True
+ triangForceDist = True # noqa: N806
# Defining the pushover lateral distribution type
- if triangForceDist == True:
- Fp = np.power(Loc_heigth, k) / sumH
+ if triangForceDist == True: # noqa: E712
+ Fp = np.power(Loc_heigth, k) / sumH # noqa: N806
else:
- Fp = 1. / floors_num * np.ones(floors_num + 1)
- print('Fp =', Fp)
+ Fp = 1.0 / floors_num * np.ones(floors_num + 1) # noqa: N806
+ print('Fp =', Fp) # noqa: T201
op.loadConst('-time', 0.0)
op.timeSeries('Linear', 2)
op.pattern('Plain', 2, 1)
- for (node, fp, ind) in zip(ListNodesDrift, Fp, range(floors_num)):
+ for node, fp, ind in zip(ListNodesDrift, Fp, range(floors_num)): # noqa: B007
op.load(int(node[0]), fp, 0.0, 0.0)
- Htotal = Loc_heigth[-1]
-# Der_obj = float(self.ui.Der_obj.text())
- Der_obj = 0.04 # Match default example
- Des_obj = Der_obj * Htotal # Desplazamiento objetivo
-# nSteps = int(self.ui.nSteps.text())
- nSteps = 110 # Match default example
+ Htotal = Loc_heigth[-1] # noqa: N806
+ # Der_obj = float(self.ui.Der_obj.text())
+ Der_obj = 0.04 # Match default example # noqa: N806
+ Des_obj = Der_obj * Htotal # Desplazamiento objetivo # noqa: N806
+ # nSteps = int(self.ui.nSteps.text())
+ nSteps = 110 # Match default example # noqa: N806
dref = Des_obj / nSteps
mu = nSteps
- IDctrlNode = int(ListNodesDrift[-1, 0]) # Node where displacement is read
- print('IDctrlNode =', IDctrlNode)
- IDctrlDOF = 1 # DOF x=1, y=2
- Tol = 1.0e-4 # Tolerance
-
- runFastPushover = True
- if runFastPushover == True:
- maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v = singlePush1(dref, mu, IDctrlNode,
- IDctrlDOF, nSteps)
+ # Node where displacement is read
+ IDctrlNode = int(ListNodesDrift[-1, 0]) # noqa: N806
+ print('IDctrlNode =', IDctrlNode) # noqa: T201
+ IDctrlDOF = 1 # DOF x=1, y=2 # noqa: N806
+ Tol = 1.0e-4 # Tolerance # noqa: N806, F841
+
+ runFastPushover = True # noqa: N806
+ if runFastPushover == True: # noqa: E712
+ maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v = ( # noqa: N806
+ singlePush1(dref, mu, IDctrlNode, IDctrlDOF, nSteps)
+ )
else:
- maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v = singlePush(dref, mu, IDctrlNode,
- IDctrlDOF, nSteps)
+ maxDriftPiso, maxDriftTecho, maxDriftTecho2, VBasal_v, DriftTecho_v = ( # noqa: F841, N806
+ singlePush(dref, mu, IDctrlNode, IDctrlDOF, nSteps)
+ )
op.wipe()
@@ -2017,126 +3405,146 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
cols_def_1 = np.loadtxt('Pushover/cols_def_1.out')
cols_force_6 = np.loadtxt('Pushover/cols_force_6.out')
cols_def_6 = np.loadtxt('Pushover/cols_def_6.out')
- print('cols_def_1', cols_def_1)
+ print('cols_def_1', cols_def_1) # noqa: T201
# fy = float(self.ui.fy.text()) * MPa
- fy = float(rootSIM["FySteel"]) * MPa
- print("Fy",fy)
-
- Es = 200.0 * GPa
+ fy = float(rootSIM['FySteel']) * MPa
+ print('Fy', fy) # noqa: T201
+
+ Es = 200.0 * GPa # noqa: N806
ey = fy / Es
num_beams = len(EleBeam)
num_cols = len(EleCol)
- CD_Beams = np.zeros([num_beams, 2])
+ CD_Beams = np.zeros([num_beams, 2]) # noqa: N806
# Calculation of curvature ductility of beams and columns
- for (ind, DB) in zip(range(1, num_beams + 1), DataBeamDesing):
- ets_beam_1 = beams_def_1[:, 2 * ind - 1] + beams_def_1[:, 2 * ind] * (DB.dt1-DB.h/2)
- ebs_beam_1 = beams_def_1[:, 2 * ind - 1] + beams_def_1[:, 2 * ind] * (DB.h/2-DB.db1)
- ets_beam_6 = beams_def_6[:, 2 * ind - 1] + beams_def_6[:, 2 * ind] * (DB.dt2-DB.h/2)
- ebs_beam_6 = beams_def_6[:, 2 * ind - 1] + beams_def_6[:, 2 * ind] * (DB.h/2-DB.db1)
+ for ind, DB in zip(range(1, num_beams + 1), DataBeamDesing): # noqa: N806
+ ets_beam_1 = beams_def_1[:, 2 * ind - 1] + beams_def_1[:, 2 * ind] * (
+ DB.dt1 - DB.h / 2
+ )
+ ebs_beam_1 = beams_def_1[:, 2 * ind - 1] + beams_def_1[:, 2 * ind] * (
+ DB.h / 2 - DB.db1
+ )
+ ets_beam_6 = beams_def_6[:, 2 * ind - 1] + beams_def_6[:, 2 * ind] * (
+ DB.dt2 - DB.h / 2
+ )
+ ebs_beam_6 = beams_def_6[:, 2 * ind - 1] + beams_def_6[:, 2 * ind] * (
+ DB.h / 2 - DB.db1
+ )
es_beam_1 = np.maximum(np.absolute(ets_beam_1), np.absolute(ebs_beam_1))
es_beam_6 = np.maximum(np.absolute(ets_beam_6), np.absolute(ebs_beam_6))
- print('es_beam_1', es_beam_1, 'es_beam_6', es_beam_6)
+ print('es_beam_1', es_beam_1, 'es_beam_6', es_beam_6) # noqa: T201
if np.max(es_beam_1) <= ey:
- CD_1 = 0
+ CD_1 = 0 # noqa: N806
else:
fi_1 = np.absolute(beams_def_1[:, 2 * ind])
- M_beam_1 = np.absolute(beams_force_1[:, 2 * ind])
+ M_beam_1 = np.absolute(beams_force_1[:, 2 * ind]) # noqa: N806
f = interpolate.interp1d(es_beam_1, M_beam_1)
- My_1 = f(ey)
+ My_1 = f(ey) # noqa: N806
f = interpolate.interp1d(M_beam_1, fi_1)
fiy_1 = f(My_1)
- CD_1 = fi_1[-1] / fiy_1
+ CD_1 = fi_1[-1] / fiy_1 # noqa: N806
if np.max(es_beam_6) <= ey:
- CD_6 = 0
+ CD_6 = 0 # noqa: N806
else:
fi_6 = np.absolute(beams_def_6[:, 2 * ind])
- M_beam_6 = np.absolute(beams_force_6[:, 2 * ind])
+ M_beam_6 = np.absolute(beams_force_6[:, 2 * ind]) # noqa: N806
f = interpolate.interp1d(es_beam_6, M_beam_6)
- My_6 = f(ey)
+ My_6 = f(ey) # noqa: N806
f = interpolate.interp1d(M_beam_6, fi_6)
fiy_6 = f(My_6)
- CD_6 = fi_6[-1] / fiy_6
+ CD_6 = fi_6[-1] / fiy_6 # noqa: N806
CD_Beams[ind - 1, :] = [CD_1, CD_6]
- print('CD_Beams =', CD_Beams)
-
- CD_Cols = np.zeros([num_cols, 2])
- for (ind, DC) in zip(range(1, num_cols + 1), DataColDesing):
- ets_col_1 = cols_def_1[:, 2 * ind - 1] + cols_def_1[:, 2 * ind] * (DC.d-DC.h/2)
- ebs_col_1 = cols_def_1[:, 2 * ind - 1] + cols_def_1[:, 2 * ind] * (DC.h/2-DC.d)
- ets_col_6 = cols_def_6[:, 2 * ind - 1] + cols_def_6[:, 2 * ind] * (DC.d-DC.h/2)
- ebs_col_6 = cols_def_6[:, 2 * ind - 1] + cols_def_6[:, 2 * ind] * (DC.h/2-DC.d)
+ print('CD_Beams =', CD_Beams) # noqa: T201
+
+ CD_Cols = np.zeros([num_cols, 2]) # noqa: N806
+ for ind, DC in zip(range(1, num_cols + 1), DataColDesing): # noqa: N806
+ ets_col_1 = cols_def_1[:, 2 * ind - 1] + cols_def_1[:, 2 * ind] * (
+ DC.d - DC.h / 2
+ )
+ ebs_col_1 = cols_def_1[:, 2 * ind - 1] + cols_def_1[:, 2 * ind] * (
+ DC.h / 2 - DC.d
+ )
+ ets_col_6 = cols_def_6[:, 2 * ind - 1] + cols_def_6[:, 2 * ind] * (
+ DC.d - DC.h / 2
+ )
+ ebs_col_6 = cols_def_6[:, 2 * ind - 1] + cols_def_6[:, 2 * ind] * (
+ DC.h / 2 - DC.d
+ )
es_col_1 = np.maximum(np.absolute(ets_col_1), np.absolute(ebs_col_1))
es_col_6 = np.maximum(np.absolute(ets_col_6), np.absolute(ebs_col_6))
- print('es_col_1', es_col_1, 'es_col_6', es_col_6)
+ print('es_col_1', es_col_1, 'es_col_6', es_col_6) # noqa: T201
if np.max(es_col_1) <= ey:
- CD_1 = 0
+ CD_1 = 0 # noqa: N806
else:
fi_1 = np.absolute(cols_def_1[:, 2 * ind])
- M_col_1 = np.absolute(cols_force_1[:, 2 * ind])
+ M_col_1 = np.absolute(cols_force_1[:, 2 * ind]) # noqa: N806
f = interpolate.interp1d(es_col_1, M_col_1)
- Mfy_1 = f(ey)
+ Mfy_1 = f(ey) # noqa: N806
f = interpolate.interp1d(M_col_1, fi_1)
fify_1 = f(Mfy_1)
- My_1 = np.max(M_col_1)
+ My_1 = np.max(M_col_1) # noqa: N806
fiy_1 = My_1 / Mfy_1 * fify_1
- CD_1 = fi_1[-1] / fiy_1
+ CD_1 = fi_1[-1] / fiy_1 # noqa: N806
if np.max(es_col_6) <= ey:
- CD_6 = 0
+ CD_6 = 0 # noqa: N806
else:
fi_6 = np.absolute(cols_def_6[:, 2 * ind])
- M_col_6 = np.absolute(cols_force_6[:, 2 * ind])
+ M_col_6 = np.absolute(cols_force_6[:, 2 * ind]) # noqa: N806
f = interpolate.interp1d(es_col_6, M_col_6)
- Mfy_6 = f(ey)
+ Mfy_6 = f(ey) # noqa: N806
f = interpolate.interp1d(M_col_6, fi_6)
fify_6 = f(Mfy_6)
- My_6 = np.max(M_col_6)
+ My_6 = np.max(M_col_6) # noqa: N806
fiy_6 = My_6 / Mfy_6 * fify_6
- CD_6 = fi_6[-1] / fiy_6
+ CD_6 = fi_6[-1] / fiy_6 # noqa: N806
CD_Cols[ind - 1, :] = [CD_1, CD_6]
- print('CD_Cols =', CD_Cols)
- CD_Ele = np.concatenate((CD_Cols, CD_Beams), axis=0)
+ print('CD_Cols =', CD_Cols) # noqa: T201
+ CD_Ele = np.concatenate((CD_Cols, CD_Beams), axis=0) # noqa: N806
- Desp_x = np.loadtxt('Pushover/HoriNodes.out')
- Desp_y = np.loadtxt('Pushover/VertNodes.out')
- Nodes_desp_x = ListNodes[:, 1] + 3 * Desp_x[-1, 1:]
- Nodes_desp_y = ListNodes[:, 2] + 3 * Desp_y[-1, 1:]
+ Desp_x = np.loadtxt('Pushover/HoriNodes.out') # noqa: N806
+ Desp_y = np.loadtxt('Pushover/VertNodes.out') # noqa: N806
+ Nodes_desp_x = ListNodes[:, 1] + 3 * Desp_x[-1, 1:] # noqa: N806
+ Nodes_desp_y = ListNodes[:, 2] + 3 * Desp_y[-1, 1:] # noqa: N806
fpos = 0.1
fsize = 1
- DataDC = []
- for Ele in Elements:
+ DataDC = [] # noqa: N806
+ for Ele in Elements: # noqa: N806
xi = Nodes_desp_x[Ele.Nod_ini]
yi = Nodes_desp_y[Ele.Nod_ini]
xe = Nodes_desp_x[Ele.Nod_end]
ye = Nodes_desp_y[Ele.Nod_end]
- x = np.array([xi, xe])
- y = np.array([yi, ye])
- Delta_x = xe-xi
- Delta_y = ye-yi
- xi_CD = xi + fpos * Delta_x
- yi_CD = yi + fpos * Delta_y
- xe_CD = xe - fpos * Delta_x
- ye_CD = ye - fpos * Delta_y
- CD_i = CD_Ele[Ele.EleTag-1, 0]
- CD_e = CD_Ele[Ele.EleTag-1, 1]
- DataDC.append(DuctilityCurve(xi_CD, xe_CD, yi_CD, ye_CD, fsize*CD_i, fsize*CD_e))
- DC_x, DC_y, DC_size = [], [], []
- for DC in DataDC:
+ x = np.array([xi, xe]) # noqa: F841
+ y = np.array([yi, ye]) # noqa: F841
+ Delta_x = xe - xi # noqa: N806
+ Delta_y = ye - yi # noqa: N806
+ xi_CD = xi + fpos * Delta_x # noqa: N806
+ yi_CD = yi + fpos * Delta_y # noqa: N806
+ xe_CD = xe - fpos * Delta_x # noqa: N806
+ ye_CD = ye - fpos * Delta_y # noqa: N806
+ CD_i = CD_Ele[Ele.EleTag - 1, 0] # noqa: N806
+ CD_e = CD_Ele[Ele.EleTag - 1, 1] # noqa: N806
+ DataDC.append(
+ DuctilityCurve(
+ xi_CD, xe_CD, yi_CD, ye_CD, fsize * CD_i, fsize * CD_e
+ )
+ )
+ DC_x, DC_y, DC_size = [], [], [] # noqa: N806
+ for DC in DataDC: # noqa: N806
DC_x.append([DC.xi, DC.xe])
DC_y.append([DC.yi, DC.ye])
DC_size.append([DC.CD_i, DC.CD_e])
- DC_x = np.array(DC_x)
- DC_x = DC_x.flatten()
- DC_y = np.array(DC_y)
- DC_y = DC_y.flatten()
- DC_size = np.array(DC_size)
- DC_size = DC_size.flatten()
- print('DC_x= ', DC_x)
- print('DC_y= ', DC_y)
- print('DC_size= ', DC_size)
+ DC_x = np.array(DC_x) # noqa: N806
+ DC_x = DC_x.flatten() # noqa: N806
+ DC_y = np.array(DC_y) # noqa: N806
+ DC_y = DC_y.flatten() # noqa: N806
+ DC_size = np.array(DC_size) # noqa: N806
+ DC_size = DC_size.flatten() # noqa: N806
+ print('DC_x= ', DC_x) # noqa: T201
+ print('DC_y= ', DC_y) # noqa: T201
+ print('DC_size= ', DC_size) # noqa: T201
if __name__ == '__main__':
@@ -2146,5 +3554,9 @@ def singlePush(dref, mu, ctrlNode, dispDir, nSteps):
parser.add_argument('--filenameSAM')
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
-
- sys.exit(runBuildingDesign(args.filenameAIM, args.filenameEVENT, args.filenameSAM, args.getRV))
+
+ sys.exit(
+ runBuildingDesign(
+ args.filenameAIM, args.filenameEVENT, args.filenameSAM, args.getRV
+ )
+ )
diff --git a/modules/createSAM/customPyInput/CustomPyInput.py b/modules/createSAM/customPyInput/CustomPyInput.py
index 76c09540b..45e0fa2e7 100644
--- a/modules/createSAM/customPyInput/CustomPyInput.py
+++ b/modules/createSAM/customPyInput/CustomPyInput.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,30 +37,41 @@
# Adam Zsarnóczay
#
-import sys, argparse,json
+import argparse
+import json
+import sys
-def create_SAM(AIM_file, EVENT_file, SAM_file,
- model_script, model_path, ndm, dof_map, column_line, getRV):
+def create_SAM( # noqa: N802, D103
+ AIM_file, # noqa: N803
+ EVENT_file, # noqa: ARG001, N803
+ SAM_file, # noqa: N803
+ model_script,
+ model_path,
+ ndm,
+ dof_map,
+ column_line,
+ getRV, # noqa: ARG001, N803
+):
# KZ: modifying BIM to AIM
- with open(AIM_file, 'r', encoding="utf-8") as f:
- root_AIM = json.load(f)
- root_GI = root_AIM['GeneralInformation']
+ with open(AIM_file, encoding='utf-8') as f: # noqa: PTH123
+ root_AIM = json.load(f) # noqa: N806
+ root_GI = root_AIM['GeneralInformation'] # noqa: N806
try:
stories = int(root_GI['NumberOfStories'])
- except:
- raise ValueError("number of stories information missing")
+ except: # noqa: E722
+ raise ValueError('number of stories information missing') # noqa: B904, EM101, TRY003
if column_line is None:
# KZ: looking into SAM
- root_SAM = root_AIM.get('Modeling', {})
+ root_SAM = root_AIM.get('Modeling', {}) # noqa: N806
nodes = root_SAM.get('centroidNodes', [])
if len(nodes) == 0:
- nodes = list(range(stories+1))
+ nodes = list(range(stories + 1))
else:
nodes = [int(node) for node in column_line.split(',')]
- nodes = nodes[:stories+1]
+ nodes = nodes[: stories + 1]
node_map = []
for floor, node in enumerate(nodes):
@@ -72,7 +82,7 @@ def create_SAM(AIM_file, EVENT_file, SAM_file,
node_entry['floor'] = f'{floor}'
node_map.append(node_entry)
- root_SAM = {
+ root_SAM = { # noqa: N806
'mainScript': model_script,
'modelPath': model_path,
'dofMap': dof_map,
@@ -82,36 +92,45 @@ def create_SAM(AIM_file, EVENT_file, SAM_file,
'numStory': stories,
# KZ: correcting the ndm format --> this causing standardEarthquakeEDP failure...
'ndm': int(ndm),
- # TODO: improve this if we want random vars in the structure
- 'randomVar': []
+ # TODO: improve this if we want random vars in the structure # noqa: TD002
+ 'randomVar': [],
}
# pass all other attributes in the AIM GI to SAM
- for cur_key in root_GI.keys():
+ for cur_key in root_GI.keys(): # noqa: SIM118
cur_item = root_GI.get(cur_key, None)
- if cur_key in root_SAM.keys():
+ if cur_key in root_SAM.keys(): # noqa: SIM118
pass
else:
root_SAM[cur_key] = cur_item
- with open(SAM_file, 'w', encoding="utf-8") as f:
+ with open(SAM_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(root_SAM, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
parser.add_argument('--filenameSAM')
parser.add_argument('--mainScript')
parser.add_argument('--modelPath', default='')
- parser.add_argument('--ndm', default="3")
- parser.add_argument('--dofMap', default="1, 2, 3")
+ parser.add_argument('--ndm', default='3')
+ parser.add_argument('--dofMap', default='1, 2, 3')
parser.add_argument('--columnLine', default=None)
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
- sys.exit(create_SAM(
- args.filenameAIM, args.filenameEVENT, args.filenameSAM,
- args.mainScript, args.modelPath, args.ndm,
- args.dofMap, args.columnLine, args.getRV))
+ sys.exit(
+ create_SAM(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.filenameSAM,
+ args.mainScript,
+ args.modelPath,
+ args.ndm,
+ args.dofMap,
+ args.columnLine,
+ args.getRV,
+ )
+ )
diff --git a/modules/createSAM/openSeesPyInput/OpenSeesPyInput.py b/modules/createSAM/openSeesPyInput/OpenSeesPyInput.py
index fdbe1e74b..44eb8d89e 100644
--- a/modules/createSAM/openSeesPyInput/OpenSeesPyInput.py
+++ b/modules/createSAM/openSeesPyInput/OpenSeesPyInput.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,24 +37,35 @@
# Adam Zsarn�czay
#
-import sys, argparse,json
+import argparse
+import json
+import sys
-def create_SAM(BIM_file, EVENT_file, SAM_file,
- model_script, model_path, ndm, dof_map, column_line, getRV):
- with open(BIM_file, 'r', encoding="utf-8") as f:
- root_BIM = json.load(f)['GeneralInformation']
+def create_SAM( # noqa: N802, D103
+ BIM_file, # noqa: N803
+ EVENT_file, # noqa: ARG001, N803
+ SAM_file, # noqa: N803
+ model_script,
+ model_path,
+ ndm,
+ dof_map,
+ column_line,
+ getRV, # noqa: ARG001, N803
+):
+ with open(BIM_file, encoding='utf-8') as f: # noqa: PTH123
+ root_BIM = json.load(f)['GeneralInformation'] # noqa: N806
try:
stories = root_BIM['NumberOfStories']
- except:
- raise ValueError("OpenSeesPyInput - structural information missing")
+ except: # noqa: E722
+ raise ValueError('OpenSeesPyInput - structural information missing') # noqa: B904, EM101, TRY003
if column_line is None:
- nodes = list(range(stories+1))
+ nodes = list(range(stories + 1))
else:
nodes = [int(node) for node in column_line.split(',')]
- nodes = nodes[:stories+1]
+ nodes = nodes[: stories + 1]
node_map = []
for floor, node in enumerate(nodes):
@@ -65,7 +75,7 @@ def create_SAM(BIM_file, EVENT_file, SAM_file,
node_entry['floor'] = f'{floor}'
node_map.append(node_entry)
- root_SAM = {
+ root_SAM = { # noqa: N806
'mainScript': model_script,
'modelPath': model_path,
'dofMap': dof_map,
@@ -74,28 +84,37 @@ def create_SAM(BIM_file, EVENT_file, SAM_file,
'NodeMapping': node_map,
'numStory': stories,
'ndm': ndm,
- # TODO: improve this if we want random vars in the structure
- 'randomVar': []
+ # TODO: improve this if we want random vars in the structure # noqa: TD002
+ 'randomVar': [],
}
- with open(SAM_file, 'w', encoding="utf-8") as f:
+ with open(SAM_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(root_SAM, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
parser.add_argument('--filenameSAM')
parser.add_argument('--mainScript')
parser.add_argument('--modelPath')
- parser.add_argument('--ndm', default="3")
- parser.add_argument('--dofMap', default="1, 2, 3")
+ parser.add_argument('--ndm', default='3')
+ parser.add_argument('--dofMap', default='1, 2, 3')
parser.add_argument('--columnLine', default=None)
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
- sys.exit(create_SAM(
- args.filenameAIM, args.filenameEVENT, args.filenameSAM,
- args.mainScript, args.modelPath, args.ndm,
- args.dofMap, args.columnLine, args.getRV))
+ sys.exit(
+ create_SAM(
+ args.filenameAIM,
+ args.filenameEVENT,
+ args.filenameSAM,
+ args.mainScript,
+ args.modelPath,
+ args.ndm,
+ args.dofMap,
+ args.columnLine,
+ args.getRV,
+ )
+ )
diff --git a/modules/createSAM/surrogateGP/SurrogateGP.py b/modules/createSAM/surrogateGP/SurrogateGP.py
index b2f41e698..4fb69b788 100644
--- a/modules/createSAM/surrogateGP/SurrogateGP.py
+++ b/modules/createSAM/surrogateGP/SurrogateGP.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -43,60 +42,69 @@
# Input files: AIM.json, surrogate.json (user provided)
# Output files: SAM.json
-import sys, argparse,json, os
+import argparse
+import json
+import os
+import sys
-def create_SAM(AIM_file, SAM_file):
- #
+def create_SAM(AIM_file, SAM_file): # noqa: N802, N803, D103
+ #
# Find SAM.json info from surrogate model file
#
# load AIM
- with open(AIM_file, 'r') as f:
- root_AIM = json.load(f)
+ with open(AIM_file) as f: # noqa: PLW1514, PTH123
+ root_AIM = json.load(f) # noqa: N806
- print("General Information tab is ignored")
- root_SAM = root_AIM['Applications']['Modeling']
+ print('General Information tab is ignored') # noqa: T201
+ root_SAM = root_AIM['Applications']['Modeling'] # noqa: N806
# find and load surrogate json
# surrogate_path = os.path.join(root_SAM['ApplicationData']['MS_Path'],root_SAM['ApplicationData']['mainScript'])
- surrogate_path = os.path.join(os.getcwd(),root_SAM['ApplicationData']['mainScript'])
- print(surrogate_path)
+ surrogate_path = os.path.join( # noqa: PTH118
+ os.getcwd(), # noqa: PTH109
+ root_SAM['ApplicationData']['mainScript'],
+ )
+ print(surrogate_path) # noqa: T201
- with open(surrogate_path, 'r') as f:
- surrogate_model = json.load(f)
+ with open(surrogate_path) as f: # noqa: PLW1514, PTH123
+ surrogate_model = json.load(f)
# find SAM in surrogate json
- root_SAM = surrogate_model['SAM']
+ root_SAM = surrogate_model['SAM'] # noqa: N806
# sanity check
- if root_AIM["Applications"]["EDP"]["Application"] != "SurrogateEDP":
- with open("../workflow.err","w") as f:
- f.write("Please select [None] in the EDP tab.")
- exit(-1)
+ if root_AIM['Applications']['EDP']['Application'] != 'SurrogateEDP':
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('Please select [None] in the EDP tab.')
+ exit(-1) # noqa: PLR1722
- if root_AIM["Applications"]["Simulation"]["Application"] != "SurrogateSimulation":
- with open("../workflow.err","w") as f:
- f.write("Please select [None] in the FEM tab.")
- exit(-1)
+ if (
+ root_AIM['Applications']['Simulation']['Application']
+ != 'SurrogateSimulation'
+ ):
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('Please select [None] in the FEM tab.')
+ exit(-1) # noqa: PLR1722
# write SAM.json
- with open(SAM_file, 'w') as f:
+ with open(SAM_file, 'w') as f: # noqa: PLW1514, PTH123
json.dump(root_SAM, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
- parser.add_argument('--filenameEVENT') # not used
+ parser.add_argument('--filenameEVENT') # not used
parser.add_argument('--filenameSAM')
parser.add_argument('--mainScript')
- parser.add_argument('--getRV', nargs='?', const=True, default=False) # Not used
+ parser.add_argument('--getRV', nargs='?', const=True, default=False) # Not used
args = parser.parse_args()
sys.exit(create_SAM(args.filenameAIM, args.filenameSAM))
diff --git a/modules/performDL/CBCities/CBCitiesMethods.py b/modules/performDL/CBCities/CBCitiesMethods.py
index 406973d43..ea7e3d5c1 100644
--- a/modules/performDL/CBCities/CBCitiesMethods.py
+++ b/modules/performDL/CBCities/CBCitiesMethods.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -39,27 +38,30 @@
# Dr. Stevan Gavrilovic
-from scipy.spatial import cKDTree
import itertools
-import os, sys, json, posixpath
-import pandas as pd
+import posixpath
+from operator import itemgetter
+
import numpy as np
+import pandas as pd
+from scipy.spatial import cKDTree
-from operator import itemgetter
-def ckdnearest(gdfA, gdfB, gdfB_cols=['pgv']):
- A = np.concatenate(
- [np.array(geom.coords) for geom in gdfA.geometry.to_list()])
- B = [np.array(geom.coords) for geom in gdfB.geometry.to_list()]
- B_ix = tuple(itertools.chain.from_iterable(
- [itertools.repeat(i, x) for i, x in enumerate(list(map(len, B)))]))
- B = np.concatenate(B)
+def ckdnearest(gdfA, gdfB, gdfB_cols=['pgv']): # noqa: B006, N803, D103
+ A = np.concatenate([np.array(geom.coords) for geom in gdfA.geometry.to_list()]) # noqa: N806
+ B = [np.array(geom.coords) for geom in gdfB.geometry.to_list()] # noqa: N806
+ B_ix = tuple( # noqa: N806
+ itertools.chain.from_iterable(
+ list(itertools.starmap(itertools.repeat, enumerate(list(map(len, B)))))
+ )
+ )
+ B = np.concatenate(B) # noqa: N806
ckd_tree = cKDTree(B)
- dist, idx = ckd_tree.query(A, k=1)
+ dist, idx = ckd_tree.query(A, k=1) # noqa: F841
idx = itemgetter(*idx)(B_ix)
- gdf = pd.concat(
- [gdfA, gdfB.loc[idx, gdfB_cols].reset_index(drop=True)], axis=1)
- return gdf
+ gdf = pd.concat([gdfA, gdfB.loc[idx, gdfB_cols].reset_index(drop=True)], axis=1)
+ return gdf # noqa: RET504
+
# def pgv_node2pipe(pipe_info,node_info):
# pgvs = []
@@ -70,70 +72,70 @@ def ckdnearest(gdfA, gdfB, gdfB_cols=['pgv']):
# pgvs.append(pgv)
# return pgvs
-def pgv_node2pipe(pipe_info,node_info):
+
+def pgv_node2pipe(pipe_info, node_info): # noqa: D103
res = []
-
+
node_ids = np.array(node_info['node_id'])
pgvs = np.array(node_info['pgv'])
-
+
n1s = np.array(pipe_info['node1'])
n2s = np.array(pipe_info['node2'])
-
- for n1,n2 in zip(n1s,n2s):
- pgv = (pgvs[node_ids == n1]+pgvs[node_ids == n2])[0]/2
+
+ for n1, n2 in zip(n1s, n2s):
+ pgv = (pgvs[node_ids == n1] + pgvs[node_ids == n2])[0] / 2
res.append(pgv)
return res
-def get_prefix(file_path):
+
+def get_prefix(file_path): # noqa: D103
file_name = file_path.split('/')[-1]
prefix = file_name.split('.')[0]
- return prefix
+ return prefix # noqa: RET504
# Get the PGV value for the pipe
-def add_pgv2pipe(pipe):
-
+def add_pgv2pipe(pipe): # noqa: D103
reg_event = pipe['RegionalEvent']
events = pipe['Events'][0]
-
+
event_folder_path = events['EventFolderPath']
-
+
event_array = events['Events']
event_units = reg_event['units']
pgvs = np.array([])
-
- for eventFile, scaleFactor in event_array :
-
+
+ for eventFile, scaleFactor in event_array: # noqa: N806
# Discard the numbering at the end of the csv file name
- eventFile = eventFile[:len(eventFile)-8]
+ eventFile = eventFile[: len(eventFile) - 8] # noqa: N806, PLW2901
# Get the path to the event file
- path_Event_File = posixpath.join(event_folder_path,eventFile)
-
+ path_Event_File = posixpath.join(event_folder_path, eventFile) # noqa: N806
+
# Read in the event file IM List
- eventIMList = pd.read_csv(path_Event_File, header=0)
-
- PGVCol = eventIMList.loc[:,'PGV']
-
+ eventIMList = pd.read_csv(path_Event_File, header=0) # noqa: N806
+
+ PGVCol = eventIMList.loc[:, 'PGV'] # noqa: N806
+
pgv_unit = event_units['PGV']
-
+
# Scale the PGVs and account for units - fragility functions are in inch per second
- if pgv_unit == 'cmps' :
- PGVCol = PGVCol.apply(lambda x: cm2inch(x) * scaleFactor)
- elif pgv_unit == 'inps' :
+ if pgv_unit == 'cmps':
+ PGVCol = PGVCol.apply(lambda x: cm2inch(x) * scaleFactor) # noqa: B023, N806
+ elif pgv_unit == 'inps':
continue
- else :
- print("Error, only 'cmps' and 'inps' units are supported for PGV")
-
- pgvs = np.append(pgvs,PGVCol.values)
+ else:
+ print("Error, only 'cmps' and 'inps' units are supported for PGV") # noqa: T201
+
+ pgvs = np.append(pgvs, PGVCol.values)
-
pipe['pgv'] = pgvs
return pipe
+
# pgv_info = pd.read_csv(pgv_path)
# gd_pgv = gpd.GeoDataFrame(
# pgv_info, geometry=gpd.points_from_xy(pgv_info.lon, pgv_info.lat))
@@ -142,50 +144,54 @@ def add_pgv2pipe(pipe):
# pipe_info['pgv'] = pgvs
#
# return pipe_info
-
-
-k_dict = {'A':1,
- 'C': 1,
- 'D':0.5,
- 'F':1,
- 'H':1,
- 'K':1,
- 'N':1,
- None:1,
- 'T':1,
- 'R':1,
- 'L':1,
- 'S':0.6,
- 'W':1}
-
-def cm2inch(cm):
- return 39.3701*cm/100
-
-def calculate_fail_repairrate(k,pgv,l):
-
- rr = k*0.00187*pgv/1000
- failure_rate = 1- np.power(np.e,-rr*l)
-
- return failure_rate
-
-def get_pipe_failrate(pipe):
-
- pipe_GI = pipe['GeneralInformation']
-
- m,l,pgv = pipe_GI['material'],pipe_GI['length'],pipe['pgv']
-
- pipeRR = calculate_fail_repairrate(k_dict[m],l,pgv)
-
- return pipeRR
-
-def add_failrate2pipe(pipe):
+
+k_dict = {
+ 'A': 1,
+ 'C': 1,
+ 'D': 0.5,
+ 'F': 1,
+ 'H': 1,
+ 'K': 1,
+ 'N': 1,
+ None: 1,
+ 'T': 1,
+ 'R': 1,
+ 'L': 1,
+ 'S': 0.6,
+ 'W': 1,
+}
+
+
+def cm2inch(cm): # noqa: D103
+ return 39.3701 * cm / 100
+
+
+def calculate_fail_repairrate(k, pgv, l): # noqa: E741, D103
+ rr = k * 0.00187 * pgv / 1000
+ failure_rate = 1 - np.power(np.e, -rr * l)
+
+ return failure_rate # noqa: RET504
+
+
+def get_pipe_failrate(pipe): # noqa: D103
+ pipe_GI = pipe['GeneralInformation'] # noqa: N806
+
+ m, l, pgv = pipe_GI['material'], pipe_GI['length'], pipe['pgv'] # noqa: E741
+
+ pipeRR = calculate_fail_repairrate(k_dict[m], l, pgv) # noqa: N806
+
+ return pipeRR # noqa: RET504
+
+
+def add_failrate2pipe(pipe): # noqa: D103
pipe = add_pgv2pipe(pipe)
-
+
pipe['fail_prob'] = get_pipe_failrate(pipe)
-
+
return pipe
-
+
+
#
#
# pgv_prefix = get_prefix(pgv_path)
@@ -194,7 +200,6 @@ def add_failrate2pipe(pipe):
# print (f'saved to {save_path}')
-
# pipe_info['fail_prob'] = get_pipe_failrate(pipe_info)
#
#
@@ -203,52 +208,59 @@ def add_failrate2pipe(pipe):
# pipe_info.to_file(save_path, driver="GeoJSON")
# print (f'saved to {save_path}')
-def get_bar_ranges(space):
+
+def get_bar_ranges(space): # noqa: D103
ranges = []
- for i in range(1,len(space)):
- ranges.append((space[i-1],space[i]))
+ for i in range(1, len(space)):
+ ranges.append((space[i - 1], space[i])) # noqa: PERF401
return ranges
-def get_failure_groups(fail_probs,min_thre = 1e-3,num_groups = 10):
+
+def get_failure_groups(fail_probs, min_thre=1e-3, num_groups=10): # noqa: D103
valid_fails = [fail_prob for fail_prob in fail_probs if fail_prob > min_thre]
- count, space = np.histogram(valid_fails, num_groups)
+ count, space = np.histogram(valid_fails, num_groups) # noqa: F841
ranges = get_bar_ranges(space)
- return ranges
+ return ranges # noqa: RET504
+
-def get_failed_pipes_mask(pipe_info,groups):
+def get_failed_pipes_mask(pipe_info, groups): # noqa: D103
broken_pipes = np.zeros(len(pipe_info))
for r in groups:
- pipes_mask = list((pipe_info['fail_prob'] > r[0]) & (pipe_info['fail_prob'] < r[1]))
+ pipes_mask = list(
+ (pipe_info['fail_prob'] > r[0]) & (pipe_info['fail_prob'] < r[1])
+ )
valid_indices = np.nonzero(pipes_mask)[0]
num_fails = int(np.mean(r) * sum(pipes_mask))
-
- fail_indices = np.random.choice(valid_indices,num_fails,replace = False)
-
+
+ fail_indices = np.random.choice(valid_indices, num_fails, replace=False)
+
broken_pipes[fail_indices] = 1
-
+
return broken_pipes
-
-def generate_leak_diameter(pipe_diam,min_ratio = 0.05, max_ratio = 0.25):
- r = np.random.uniform(min_ratio,max_ratio)
- return pipe_diam*r
-
-def get_leak_sizes(pipe_info):
+
+
+def generate_leak_diameter(pipe_diam, min_ratio=0.05, max_ratio=0.25): # noqa: D103
+ r = np.random.uniform(min_ratio, max_ratio)
+ return pipe_diam * r
+
+
+def get_leak_sizes(pipe_info): # noqa: D103
leak_size = np.zeros(len(pipe_info))
for index, row in pipe_info.iterrows():
- d,repair = row['diameter'],row['repair']
+ d, repair = row['diameter'], row['repair']
if repair:
leak_d = generate_leak_diameter(d)
leak_size[index] = leak_d
return leak_size
-def fail_pipes_number(pipe_info):
-
+
+def fail_pipes_number(pipe_info): # noqa: D103
fail_probs = np.array(pipe_info['fail_prob'])
groups = get_failure_groups(fail_probs)
-
- failed_pipes_mask = get_failed_pipes_mask(pipe_info,groups)
+
+ failed_pipes_mask = get_failed_pipes_mask(pipe_info, groups)
num_failed_pipes = sum(failed_pipes_mask)
- print (f'number of failed pipes are : {num_failed_pipes}')
+ print(f'number of failed pipes are : {num_failed_pipes}') # noqa: T201
return num_failed_pipes
diff --git a/modules/performDL/CBCities/CBCitiesPipeDamageAssessment.py b/modules/performDL/CBCities/CBCitiesPipeDamageAssessment.py
index fc3128339..510a6c606 100644
--- a/modules/performDL/CBCities/CBCitiesPipeDamageAssessment.py
+++ b/modules/performDL/CBCities/CBCitiesPipeDamageAssessment.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -38,113 +37,108 @@
# Modified 'cb-cities' code provided by the Soga Research Group UC Berkeley
# Dr. Stevan Gavrilovic
-import itertools
import argparse
-import os, sys, json, posixpath
-import pandas as pd
+import json
+import os
+import posixpath
+
import numpy as np
+import pandas as pd
+from CBCitiesMethods import * # noqa: F403
-from operator import itemgetter
-from CBCitiesMethods import *
+def main(node_info, pipe_info): # noqa: D103
+ # Load Data
-def main(node_info, pipe_info):
+ print('Loading the node json file...') # noqa: T201
+ with open(node_info) as f: # noqa: PLW1514, PTH123
+ node_data = json.load(f) # noqa: F841
- # Load Data
-
- print('Loading the node json file...')
-
- with open(node_info, 'r') as f:
- node_data = json.load(f)
-
- with open(pipe_info, 'r') as f:
+ with open(pipe_info) as f: # noqa: PLW1514, PTH123
pipe_data = json.load(f)
-
-
+
min_id = int(pipe_data[0]['id'])
max_id = int(pipe_data[0]['id'])
-
- allPipes = []
-
+
+ allPipes = [] # noqa: N806
+
for pipe in pipe_data:
-
- AIM_file = pipe['file']
-
+ AIM_file = pipe['file'] # noqa: N806
+
asst_id = pipe['id']
-
+
min_id = min(int(asst_id), min_id)
max_id = max(int(asst_id), max_id)
-
+
# Open the AIM file
- with open(AIM_file, 'r') as f:
- pipe = AIM_data = json.load(f)
-
+ with open(AIM_file) as f: # noqa: PLW1514, PTH123
+ pipe = AIM_data = json.load(f) # noqa: N806, F841, PLW2901
+
allPipes.append(pipe)
-
# read pgv for nodes
-# pgv_csv_files = glob('../data/rupture/rupture62_im/*.csv')
+ # pgv_csv_files = glob('../data/rupture/rupture62_im/*.csv')
# Mapping & Saving
- import multiprocessing as mp
+ import multiprocessing as mp # noqa: PLC0415
- pool = mp.Pool(mp.cpu_count()-1)
- results = pool.map(add_failrate2pipe, [pipe for pipe in allPipes])
+ pool = mp.Pool(mp.cpu_count() - 1)
+ results = pool.map(add_failrate2pipe, [pipe for pipe in allPipes]) # noqa: C416, F405
pool.close()
-
- df = pd.DataFrame({'DV':{},'MeanFailureProbability':{}})
+
+ df = pd.DataFrame({'DV': {}, 'MeanFailureProbability': {}}) # noqa: PD901
for pipe in results:
-
- failureProbArray = pipe['fail_prob']
- avgFailureProb = np.average(failureProbArray)
+ failureProbArray = pipe['fail_prob'] # noqa: N806
+ avgFailureProb = np.average(failureProbArray) # noqa: N806
pipe_id = pipe['GeneralInformation']['AIM_id']
- print("pipe_id: ",pipe_id)
-# print("failureProbArray: ",failureProbArray)
- print("avgFailureProb: ",avgFailureProb)
-
- df2 = pd.DataFrame({'DV': pipe_id, 'MeanFailureProbability': avgFailureProb}, index=[0])
- df = pd.concat([df,df2], axis=0)
-
-
+ print('pipe_id: ', pipe_id) # noqa: T201
+ # print("failureProbArray: ",failureProbArray)
+ print('avgFailureProb: ', avgFailureProb) # noqa: T201
+
+ df2 = pd.DataFrame(
+ {'DV': pipe_id, 'MeanFailureProbability': avgFailureProb}, index=[0]
+ )
+ df = pd.concat([df, df2], axis=0) # noqa: PD901
+
# Get the directory for saving the results, assume it is the same one with the AIM file
- aimDir = os.path.dirname(pipe_info)
- aimFileName = os.path.basename(pipe_info)
-
- saveDir = posixpath.join(aimDir,f'DV_{min_id}-{max_id}.csv')
-
- df.to_csv(saveDir, index = False)
-
- return 0
- #failed_pipes = fail_pipes_number(pipe)
-
+ aimDir = os.path.dirname(pipe_info) # noqa: PTH120, N806
+ aimFileName = os.path.basename(pipe_info) # noqa: PTH119, N806, F841
-if __name__ == '__main__':
+ saveDir = posixpath.join(aimDir, f'DV_{min_id}-{max_id}.csv') # noqa: N806
- #Defining the command line arguments
- workflowArgParser = argparse.ArgumentParser(
- "Run the CB-cities water distribution damage and loss workflow.",
- allow_abbrev=False)
+ df.to_csv(saveDir, index=False)
- workflowArgParser.add_argument("-n", "--nodeInfo",
- default=None,
- help="Node information.")
- workflowArgParser.add_argument("-p", "--pipeInfo",
- default=None,
- help="Pipe Information.")
- workflowArgParser.add_argument("-s", "--save_dir",
- default=None,
- help="Directory where to save the results.")
+ return 0
+ # failed_pipes = fail_pipes_number(pipe)
- #Parsing the command line arguments
- wfArgs = workflowArgParser.parse_args()
+
+if __name__ == '__main__':
+ # Defining the command line arguments
+ workflowArgParser = argparse.ArgumentParser( # noqa: N816
+ 'Run the CB-cities water distribution damage and loss workflow.',
+ allow_abbrev=False,
+ )
+
+ workflowArgParser.add_argument(
+ '-n', '--nodeInfo', default=None, help='Node information.'
+ )
+ workflowArgParser.add_argument(
+ '-p', '--pipeInfo', default=None, help='Pipe Information.'
+ )
+ workflowArgParser.add_argument(
+ '-s', '--save_dir', default=None, help='Directory where to save the results.'
+ )
+
+ # Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args() # noqa: N816
# update the local app dir with the default - if needed
-# if wfArgs.appDir is None:
-# workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
-# wfArgs.appDir = workflow_dir.parents[1]
+ # if wfArgs.appDir is None:
+ # workflow_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+ # wfArgs.appDir = workflow_dir.parents[1]
- #Calling the main workflow method and passing the parsed arguments
- main(node_info = wfArgs.nodeInfo, pipe_info = wfArgs.pipeInfo)
+ # Calling the main workflow method and passing the parsed arguments
+ main(node_info=wfArgs.nodeInfo, pipe_info=wfArgs.pipeInfo)
diff --git a/modules/performDL/CBCities/CBCitiesWDNDL.py b/modules/performDL/CBCities/CBCitiesWDNDL.py
index 05adfc550..526511d19 100644
--- a/modules/performDL/CBCities/CBCitiesWDNDL.py
+++ b/modules/performDL/CBCities/CBCitiesWDNDL.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -39,68 +38,72 @@
# Dr. Stevan Gavrilovic
import argparse
-import os, sys, json, posixpath
-import pandas as pd
-import numpy as np
-
+import json
+import posixpath
+import sys
from time import gmtime, strftime
-def log_msg(msg):
+import numpy as np
+import pandas as pd
+
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
-from operator import itemgetter
-from CBCitiesMethods import *
+from CBCitiesMethods import * # noqa: E402, F403
-def run_DL_calc(aim_file_path, saveDir, output_name):
-
+
+def run_DL_calc(aim_file_path, saveDir, output_name): # noqa: N802, N803, D103
# Load Data
-
- print('Loading the pipeline json file...')
+
+ print('Loading the pipeline json file...') # noqa: T201
# Open the AIM file
- with open(aim_file_path, 'r') as f:
- pipe = AIM_data = json.load(f)
-
- add_failrate2pipe(pipe)
-
- failureRateArray = pipe['fail_prob']
- avgRr = np.average(failureRateArray)
-
- df = pd.DataFrame({'DV': "0", 'RepairRate': avgRr}, index=[0])
-
- savePath = posixpath.join(saveDir,output_name)
-
- df.to_csv(savePath, index = False)
-
+ with open(aim_file_path) as f: # noqa: PLW1514, PTH123
+ pipe = AIM_data = json.load(f) # noqa: N806, F841
+
+ add_failrate2pipe(pipe) # noqa: F405
+
+ failureRateArray = pipe['fail_prob'] # noqa: N806
+ avgRr = np.average(failureRateArray) # noqa: N806
+
+ df = pd.DataFrame({'DV': '0', 'RepairRate': avgRr}, index=[0]) # noqa: PD901
+
+ savePath = posixpath.join(saveDir, output_name) # noqa: N806
+
+ df.to_csv(savePath, index=False)
+
return 0
-
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--filenameDL')
- parser.add_argument('-p', '--demandFile', default = None)
+ parser.add_argument('-p', '--demandFile', default=None)
parser.add_argument('--outputEDP', default='EDP.csv')
- parser.add_argument('--outputDM', default = 'DM.csv')
- parser.add_argument('--outputDV', default = 'DV.csv')
- parser.add_argument('--resource_dir', default = None)
- parser.add_argument('--dirnameOutput', default = None)
+ parser.add_argument('--outputDM', default='DM.csv')
+ parser.add_argument('--outputDV', default='DV.csv')
+ parser.add_argument('--resource_dir', default=None)
+ parser.add_argument('--dirnameOutput', default=None)
args = parser.parse_args(args)
log_msg('Initializing CB-Cities calculation...')
- out = run_DL_calc(aim_file_path = args.filenameDL, saveDir = args.dirnameOutput, output_name = args.outputDV)
+ out = run_DL_calc(
+ aim_file_path=args.filenameDL,
+ saveDir=args.dirnameOutput,
+ output_name=args.outputDV,
+ )
if out == -1:
- log_msg("DL calculation failed.")
+ log_msg('DL calculation failed.')
else:
log_msg('DL calculation completed.')
-if __name__ == '__main__':
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/performDL/pelicun3/DL_visuals.py b/modules/performDL/pelicun3/DL_visuals.py
index c0726bbdf..57eaf8b5b 100644
--- a/modules/performDL/pelicun3/DL_visuals.py
+++ b/modules/performDL/pelicun3/DL_visuals.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2023 Leland Stanford Junior University
# Copyright (c) 2023 The Regents of the University of California
#
@@ -37,389 +36,440 @@
# Contributors:
# Adam Zsarnóczay
-import os, sys, json
import argparse
+import json
+import os
import shutil
+import sys
+from copy import deepcopy
from pathlib import Path
from textwrap import wrap
-from copy import deepcopy
from zipfile import ZipFile
+import colorlover as cl
import numpy as np
-from scipy.stats import norm
import pandas as pd
-
-import colorlover as cl
-
-from pelicun.base import pelicun_path
-from pelicun.base import convert_to_MultiIndex
-
+from pelicun.base import convert_to_MultiIndex, pelicun_path
from plotly import graph_objects as go
from plotly.subplots import make_subplots
+from scipy.stats import norm
-import time
-
-#start_time = time.time()
+# start_time = time.time()
-def plot_fragility(comp_db_path, output_path, create_zip="0"):
- if create_zip == "1":
+def plot_fragility(comp_db_path, output_path, create_zip='0'): # noqa: C901, D103
+ if create_zip == '1':
output_path = output_path[:-4]
- if os.path.exists(output_path):
+ if os.path.exists(output_path): # noqa: PTH110
shutil.rmtree(output_path)
- Path(output_path).mkdir(parents=True, exist_ok=True);
-
- #frag_df = convert_to_MultiIndex(pd.read_csv(resource_dir + '/' + frag_DB_file, index_col=0), axis=1)
+ Path(output_path).mkdir(parents=True, exist_ok=True)
+ # frag_df = convert_to_MultiIndex(pd.read_csv(resource_dir + '/' + frag_DB_file, index_col=0), axis=1)
frag_df = convert_to_MultiIndex(pd.read_csv(comp_db_path, index_col=0), axis=1)
- comp_db_meta = comp_db_path[:-3]+'json'
+ comp_db_meta = comp_db_path[:-3] + 'json'
if Path(comp_db_meta).is_file():
- with open(comp_db_meta, 'r') as f:
+ with open(comp_db_meta) as f: # noqa: PLW1514, PTH123
frag_meta = json.load(f)
else:
frag_meta = None
- #for comp_id in frag_df.index[:20]:
- #for comp_id in frag_df.index[400:420]:
- #for comp_id in frag_df.index[438:439]:
- #for comp_id in frag_df.index[695:705]:
- for comp_id in frag_df.index:
-
+ # for comp_id in frag_df.index[:20]:
+ # for comp_id in frag_df.index[400:420]:
+ # for comp_id in frag_df.index[438:439]:
+ # for comp_id in frag_df.index[695:705]:
+ for comp_id in frag_df.index: # noqa: PLR1702
comp_data = frag_df.loc[comp_id]
- if frag_meta != None:
- if comp_id in frag_meta.keys():
+ if frag_meta != None: # noqa: E711
+ if comp_id in frag_meta.keys(): # noqa: SIM118
comp_meta = frag_meta[comp_id]
else:
comp_meta = None
else:
comp_meta = None
- #print(json.dumps(comp_meta, indent=2))
+ # print(json.dumps(comp_meta, indent=2))
fig = go.Figure()
fig = make_subplots(
- rows=1, cols=2,
- specs = [[{"type":"xy"},{"type":"table"}]],
- column_widths = [0.4, 0.6],
- horizontal_spacing = 0.02,
- vertical_spacing=0.02
- )
+ rows=1,
+ cols=2,
+ specs=[[{'type': 'xy'}, {'type': 'table'}]],
+ column_widths=[0.4, 0.6],
+ horizontal_spacing=0.02,
+ vertical_spacing=0.02,
+ )
- limit_states = [val for val in comp_data.index.unique(level=0) if 'LS' in val]
+ limit_states = [
+ val for val in comp_data.index.unique(level=0) if 'LS' in val
+ ]
# mapping to a color in a sequential color scale
colors = {
- 1: [cl.scales['3']['seq']['Reds'][2],],
+ 1: [
+ cl.scales['3']['seq']['Reds'][2],
+ ],
2: cl.scales['3']['seq']['Reds'][1:],
3: cl.scales['4']['seq']['Reds'][1:],
4: cl.scales['5']['seq']['Reds'][1:],
- 5: cl.scales['5']['seq']['Reds']
+ 5: cl.scales['5']['seq']['Reds'],
}
- if comp_data.loc[('Incomplete','')] != 1:
-
+ if comp_data.loc[('Incomplete', '')] != 1:
p_min, p_max = 0.01, 0.9
d_min = np.inf
- d_max = -np.inf
-
- LS_count = 0
- for LS in limit_states:
- if comp_data.loc[(LS,'Family')] == 'normal':
- d_min_i, d_max_i = norm.ppf([p_min, p_max],
- loc=comp_data.loc[(LS,'Theta_0')],
- scale=comp_data.loc[(LS,'Theta_1')]*comp_data.loc[(LS,'Theta_0')])
- elif comp_data.loc[(LS,'Family')] == 'lognormal':
- d_min_i, d_max_i = np.exp(norm.ppf([p_min, p_max],
- loc=np.log(comp_data.loc[(LS,'Theta_0')]),
- scale=comp_data.loc[(LS,'Theta_1')]))
+ d_max = -np.inf
+
+ LS_count = 0 # noqa: N806
+ for LS in limit_states: # noqa: N806
+ if comp_data.loc[(LS, 'Family')] == 'normal':
+ d_min_i, d_max_i = norm.ppf(
+ [p_min, p_max],
+ loc=comp_data.loc[(LS, 'Theta_0')],
+ scale=comp_data.loc[(LS, 'Theta_1')]
+ * comp_data.loc[(LS, 'Theta_0')],
+ )
+ elif comp_data.loc[(LS, 'Family')] == 'lognormal':
+ d_min_i, d_max_i = np.exp(
+ norm.ppf(
+ [p_min, p_max],
+ loc=np.log(comp_data.loc[(LS, 'Theta_0')]),
+ scale=comp_data.loc[(LS, 'Theta_1')],
+ )
+ )
else:
continue
- LS_count += 1
-
+ LS_count += 1 # noqa: N806
+
d_min = np.min([d_min, d_min_i])
d_max = np.max([d_max, d_max_i])
-
- demand_vals = np.linspace(d_min, d_max, num=100)
- for i_ls, LS in enumerate(limit_states):
+ demand_vals = np.linspace(d_min, d_max, num=100)
- if comp_data.loc[(LS,'Family')] == 'normal':
- cdf_vals = norm.cdf(demand_vals,
- loc=comp_data.loc[(LS,'Theta_0')],
- scale=comp_data.loc[(LS,'Theta_1')]*comp_data.loc[(LS,'Theta_0')])
- elif comp_data.loc[(LS,'Family')] == 'lognormal':
- cdf_vals = norm.cdf(np.log(demand_vals),
- loc=np.log(comp_data.loc[(LS,'Theta_0')]),
- scale=comp_data.loc[(LS,'Theta_1')])
+ for i_ls, LS in enumerate(limit_states): # noqa: N806
+ if comp_data.loc[(LS, 'Family')] == 'normal':
+ cdf_vals = norm.cdf(
+ demand_vals,
+ loc=comp_data.loc[(LS, 'Theta_0')],
+ scale=comp_data.loc[(LS, 'Theta_1')]
+ * comp_data.loc[(LS, 'Theta_0')],
+ )
+ elif comp_data.loc[(LS, 'Family')] == 'lognormal':
+ cdf_vals = norm.cdf(
+ np.log(demand_vals),
+ loc=np.log(comp_data.loc[(LS, 'Theta_0')]),
+ scale=comp_data.loc[(LS, 'Theta_1')],
+ )
else:
- continue
-
- fig.add_trace(go.Scatter(
- x = demand_vals,
- y = cdf_vals,
- mode = 'lines',
- line = dict(
- width=3,
- color=colors[LS_count][i_ls]
+ continue
+
+ fig.add_trace(
+ go.Scatter(
+ x=demand_vals,
+ y=cdf_vals,
+ mode='lines',
+ line=dict(width=3, color=colors[LS_count][i_ls]), # noqa: C408
+ name=LS,
),
- name = LS,
- ), row=1, col=1)
+ row=1,
+ col=1,
+ )
else:
- fig.add_trace(go.Scatter(
- x = [0,],
- y = [0,],
- mode = 'lines',
- line = dict(
- width=3,
- color=colors[1][0]
+ fig.add_trace(
+ go.Scatter(
+ x=[
+ 0,
+ ],
+ y=[
+ 0,
+ ],
+ mode='lines',
+ line=dict(width=3, color=colors[1][0]), # noqa: C408
+ name='Incomplete Fragility Data',
),
- name = 'Incomplete Fragility Data',
- ), row=1, col=1)
+ row=1,
+ col=1,
+ )
table_vals = []
- for LS in limit_states:
-
- if np.all(pd.isna(comp_data[LS][['Theta_0','Family','Theta_1','DamageStateWeights']].values)) == False:
- table_vals.append(np.insert(comp_data[LS][['Theta_0','Family','Theta_1','DamageStateWeights']].values, 0, LS))
+ for LS in limit_states: # noqa: N806
+ if (
+ np.all( # noqa: E712
+ pd.isna(
+ comp_data[LS][
+ ['Theta_0', 'Family', 'Theta_1', 'DamageStateWeights']
+ ].values
+ )
+ )
+ == False
+ ):
+ table_vals.append( # noqa: PERF401
+ np.insert(
+ comp_data[LS][
+ ['Theta_0', 'Family', 'Theta_1', 'DamageStateWeights']
+ ].values,
+ 0,
+ LS,
+ )
+ )
table_vals = np.array(table_vals).T
ds_list = []
ds_i = 1
for dsw in table_vals[-1]:
-
- if pd.isna(dsw) == True:
+ if pd.isna(dsw) == True: # noqa: E712
ds_list.append(f'DS{ds_i}')
ds_i += 1
else:
w_list = dsw.split('|')
- ds_list.append('
'.join([f'DS{ds_i+i} ({100.0 * float(w):.0f}%)'
- for i, w in enumerate(w_list)]))
+ ds_list.append(
+ '
'.join(
+ [
+ f'DS{ds_i + i} ({100.0 * float(w):.0f}%)'
+ for i, w in enumerate(w_list)
+ ]
+ )
+ )
ds_i += len(w_list)
- for i in range(1,5):
- table_vals[-i] = table_vals[-i-1]
+ for i in range(1, 5):
+ table_vals[-i] = table_vals[-i - 1]
table_vals[1] = np.array(ds_list)
- font_size = 16
- if ds_i > 8:
+ font_size = 16
+ if ds_i > 8: # noqa: PLR2004
font_size = 8.5
-
- fig.add_trace(go.Table(
- columnwidth = [50,70,65,95,80],
- header=dict(
- values=['Limit
State',
+
+ fig.add_trace(
+ go.Table(
+ columnwidth=[50, 70, 65, 95, 80],
+ header=dict( # noqa: C408
+ values=[
+ 'Limit
State',
'Damage State(s)',
' Median
Capacity',
' Capacity
Distribution',
- ' Capacity
Dispersion'],
- align=['center','left','center','center','center'],
- fill = dict(color='rgb(200,200,200)'),
- line = dict(color='black'),
- font = dict(color='black', size=16)
+ ' Capacity
Dispersion',
+ ],
+ align=['center', 'left', 'center', 'center', 'center'],
+ fill=dict(color='rgb(200,200,200)'), # noqa: C408
+ line=dict(color='black'), # noqa: C408
+ font=dict(color='black', size=16), # noqa: C408
),
- cells=dict(
- values=table_vals,
- height = 30,
- align=['center','left','center','center','center'],
- fill = dict(color='rgba(0,0,0,0)'),
- line = dict(color='black'),
- font = dict(color='black', size=font_size)
- )
- ), row=1, col=2)
+ cells=dict( # noqa: C408
+ values=table_vals,
+ height=30,
+ align=['center', 'left', 'center', 'center', 'center'],
+ fill=dict(color='rgba(0,0,0,0)'), # noqa: C408
+ line=dict(color='black'), # noqa: C408
+ font=dict(color='black', size=font_size), # noqa: C408
+ ),
+ ),
+ row=1,
+ col=2,
+ )
x_loc = 0.4928
y_loc = 0.697 + 0.123
ds_offset = 0.086
info_font_size = 10
- if ds_i > 8:
+ if ds_i > 8: # noqa: PLR2004
x_loc = 0.4928
y_loc = 0.705 + 0.123
ds_offset = 0.0455
info_font_size = 9
for i_ls, ds_desc in enumerate(ds_list):
+ if comp_meta != None: # noqa: E711
+ ls_meta = comp_meta['LimitStates'][f'LS{i_ls + 1}']
- if comp_meta != None:
- ls_meta = comp_meta['LimitStates'][f'LS{i_ls+1}']
-
- y_loc = y_loc - 0.123
+ y_loc = y_loc - 0.123 # noqa: PLR6104
if '
' in ds_desc:
-
ds_vals = ds_desc.split('
')
- for i_ds, ds_name in enumerate(ds_vals):
-
+ for i_ds, ds_name in enumerate(ds_vals): # noqa: B007
ds_id = list(ls_meta.keys())[i_ds]
- if ls_meta[ds_id].get('Description', False) != False:
- ds_description = '
'.join(wrap(ls_meta[ds_id]["Description"], width=70))
+ if ls_meta[ds_id].get('Description', False) != False: # noqa: E712
+ ds_description = '
'.join(
+ wrap(ls_meta[ds_id]['Description'], width=70)
+ )
else:
ds_description = ''
-
- if ls_meta[ds_id].get('RepairAction', False) != False:
- ds_repair = '
'.join(wrap(ls_meta[ds_id]["RepairAction"], width=70))
+
+ if ls_meta[ds_id].get('RepairAction', False) != False: # noqa: E712
+ ds_repair = '
'.join(
+ wrap(ls_meta[ds_id]['RepairAction'], width=70)
+ )
else:
ds_repair = ''
- if ds_repair != '':
+ if ds_repair != '': # noqa: PLC1901
ds_text = f'{ds_id}
{ds_description}
Repair Action
{ds_repair}'
else:
ds_text = f'{ds_id}
{ds_description}'
- y_loc_ds = y_loc - 0.018 - i_ds*ds_offset
+ y_loc_ds = y_loc - 0.018 - i_ds * ds_offset
fig.add_annotation(
- text=f'*',
- hovertext=ds_text,
- xref='paper', yref='paper',
- axref='pixel', ayref='pixel',
- xanchor = 'left', yanchor='bottom',
- font=dict(size=info_font_size),
- showarrow = False,
- ax = 0, ay = 0,
- x = x_loc, y = y_loc_ds)
+ text='*',
+ hovertext=ds_text,
+ xref='paper',
+ yref='paper',
+ axref='pixel',
+ ayref='pixel',
+ xanchor='left',
+ yanchor='bottom',
+ font=dict(size=info_font_size), # noqa: C408
+ showarrow=False,
+ ax=0,
+ ay=0,
+ x=x_loc,
+ y=y_loc_ds,
+ )
y_loc = y_loc_ds - 0.008
-
else:
-
# assuming a single Damage State
- ds_id = list(ls_meta.keys())[0]
+ ds_id = list(ls_meta.keys())[0] # noqa: RUF015
- if ls_meta[ds_id].get('Description', False) != False:
- ds_description = '
'.join(wrap(ls_meta[ds_id]["Description"], width=70))
+ if ls_meta[ds_id].get('Description', False) != False: # noqa: E712
+ ds_description = '
'.join(
+ wrap(ls_meta[ds_id]['Description'], width=70)
+ )
else:
ds_description = ''
-
- if ls_meta[ds_id].get('RepairAction', False) != False:
- ds_repair = '
'.join(wrap(ls_meta[ds_id]["RepairAction"], width=70))
+
+ if ls_meta[ds_id].get('RepairAction', False) != False: # noqa: E712
+ ds_repair = '
'.join(
+ wrap(ls_meta[ds_id]['RepairAction'], width=70)
+ )
else:
ds_repair = ''
- if ds_repair != '':
+ if ds_repair != '': # noqa: PLC1901
ds_text = f'{ds_id}
{ds_description}
Repair Action
{ds_repair}'
else:
ds_text = f'{ds_id}
{ds_description}'
fig.add_annotation(
- text=f'*',
+ text='*',
hovertext=ds_text,
- xref='paper', yref='paper',
- axref='pixel', ayref='pixel',
- xanchor = 'left', yanchor='bottom',
- font=dict(size=info_font_size),
- showarrow = False,
- ax = 0, ay = 0,
- x = x_loc, y = y_loc)
-
- shared_ax_props = dict(
- showgrid = True,
- linecolor = 'black',
- gridwidth = 0.05,
- gridcolor = 'rgb(192,192,192)'
+ xref='paper',
+ yref='paper',
+ axref='pixel',
+ ayref='pixel',
+ xanchor='left',
+ yanchor='bottom',
+ font=dict(size=info_font_size), # noqa: C408
+ showarrow=False,
+ ax=0,
+ ay=0,
+ x=x_loc,
+ y=y_loc,
+ )
+
+ shared_ax_props = dict( # noqa: C408
+ showgrid=True,
+ linecolor='black',
+ gridwidth=0.05,
+ gridcolor='rgb(192,192,192)',
)
- demand_unit = comp_data.loc[('Demand','Unit')]
+ demand_unit = comp_data.loc[('Demand', 'Unit')]
if demand_unit == 'unitless':
demand_unit = '-'
fig.update_xaxes(
- title_text=f"{comp_data.loc[('Demand','Type')]} [{demand_unit}]",
- **shared_ax_props)
+ title_text=f"{comp_data.loc[('Demand', 'Type')]} [{demand_unit}]",
+ **shared_ax_props,
+ )
+ fig.update_yaxes(
+ title_text='P(LS≥lsi)', range=[0, 1.01], **shared_ax_props
+ )
- fig.update_yaxes(title_text=f'P(LS≥lsi)',
- range=[0,1.01],
- **shared_ax_props)
-
fig.update_layout(
- #title = f'{comp_id}',
- margin=dict(b=5,r=5,l=5,t=5),
+ # title = f'{comp_id}',
+ margin=dict(b=5, r=5, l=5, t=5), # noqa: C408
height=300,
width=950,
paper_bgcolor='rgba(0,0,0,0)',
- plot_bgcolor = 'rgba(0,0,0,0)',
- showlegend=False
+ plot_bgcolor='rgba(0,0,0,0)',
+ showlegend=False,
)
- with open(f'{output_path}/{comp_id}.html', "w") as f:
+ with open(f'{output_path}/{comp_id}.html', 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write(fig.to_html(full_html=False, include_plotlyjs='cdn'))
# store the source database file(s) in the output directory for future reference
- shutil.copy(comp_db_path, Path(output_path)/Path(comp_db_path).name)
+ shutil.copy(comp_db_path, Path(output_path) / Path(comp_db_path).name)
if frag_meta is not None:
- shutil.copy(comp_db_meta, Path(output_path)/Path(comp_db_meta).name)
+ shutil.copy(comp_db_meta, Path(output_path) / Path(comp_db_meta).name)
- if create_zip == "1":
+ if create_zip == '1':
+ files = [f'{output_path}/{file}' for file in os.listdir(output_path)]
- files = [f"{output_path}/{file}" for file in os.listdir(output_path)]
-
- with ZipFile(output_path+".zip", 'w') as zip:
+ with ZipFile(output_path + '.zip', 'w') as zip: # noqa: A001
for file in files:
- zip.write(file, arcname=Path(file).name)
-
- shutil.rmtree(output_path)
+ zip.write(file, arcname=Path(file).name)
- print("Successfully generated component vulnerability figures.")
+ shutil.rmtree(output_path)
+ print('Successfully generated component vulnerability figures.') # noqa: T201
-def plot_repair(comp_db_path, output_path, create_zip="0"):
- #TODO:
+def plot_repair(comp_db_path, output_path, create_zip='0'): # noqa: C901, D103, PLR0912, PLR0915
+ # TODO: # noqa: TD002
# change limit_states names
- if create_zip == "1":
+ if create_zip == '1':
output_path = output_path[:-4]
# initialize the output dir
# if exists, remove it
- if os.path.exists(output_path):
+ if os.path.exists(output_path): # noqa: PTH110
shutil.rmtree(output_path)
# then create it
- Path(output_path).mkdir(parents=True, exist_ok=True);
-
+ Path(output_path).mkdir(parents=True, exist_ok=True)
# open the input component database
repair_df = convert_to_MultiIndex(
- convert_to_MultiIndex(pd.read_csv(comp_db_path, index_col=0), axis=1),
- axis=0)
+ convert_to_MultiIndex(pd.read_csv(comp_db_path, index_col=0), axis=1), axis=0
+ )
- # The metadata is assumed to be stored at the same location under the same
+ # The metadata is assumed to be stored at the same location under the same
# name, in a JSON file
- comp_db_meta = comp_db_path[:-3]+'json'
+ comp_db_meta = comp_db_path[:-3] + 'json'
# check if the metadata is there and open it
if Path(comp_db_meta).is_file():
- with open(comp_db_meta, 'r') as f:
+ with open(comp_db_meta) as f: # noqa: PLW1514, PTH123
repair_meta = json.load(f)
else:
-
# otherwise, assign None to facilitate checks later
repair_meta = None
# perform the plotting for each component independently
- for comp_id in repair_df.index.unique(level=0): #[410:418]:
-
- # perform plotting for each repair consequence type indepdendently
+ for comp_id in repair_df.index.unique(level=0): # [410:418]: # noqa: PLR1702
+ # perform plotting for each repair consequence type independently
for c_type in repair_df.loc[comp_id].index:
-
# load the component-specific part of the database
comp_data = repair_df.loc[(comp_id, c_type)]
# and the component-specific metadata - if it exists
- if repair_meta != None:
- if comp_id in repair_meta.keys():
+ if repair_meta != None: # noqa: E711
+ if comp_id in repair_meta.keys(): # noqa: SIM118
comp_meta = repair_meta[comp_id]
else:
comp_meta = None
@@ -433,39 +483,40 @@ def plot_repair(comp_db_path, output_path, create_zip="0"):
# create two subplots, one for the curve and one for the tabular data
fig = make_subplots(
- rows=1, cols=3,
- specs = [[{"type":"xy"},{"type":"xy"},{"type":"table"}],],
- shared_yaxes = True,
- column_widths = [0.45,0.05, 0.52],
- horizontal_spacing = 0.02,
- vertical_spacing=0.02
- )
+ rows=1,
+ cols=3,
+ specs=[
+ [{'type': 'xy'}, {'type': 'xy'}, {'type': 'table'}],
+ ],
+ shared_yaxes=True,
+ column_widths=[0.45, 0.05, 0.52],
+ horizontal_spacing=0.02,
+ vertical_spacing=0.02,
+ )
# initialize the table collecting parameters
table_vals = []
# get all potential limit state labels
limit_states = [
- val for val in comp_data.index.unique(level=0) if 'DS' in val]
+ val for val in comp_data.index.unique(level=0) if 'DS' in val
+ ]
# check for each limit state
- for LS in limit_states:
+ for LS in limit_states: # noqa: N806
+ fields = ['Theta_0', 'Family', 'Theta_1']
- fields = ['Theta_0','Family','Theta_1']
-
- comp_data_LS = comp_data[LS]
+ comp_data_LS = comp_data[LS] # noqa: N806
for optional_label in ['Family', 'Theta_1']:
if optional_label not in comp_data_LS.index:
comp_data_LS[optional_label] = None
# if any of the fields above is set
- if np.all(pd.isna(comp_data_LS[fields].values)) == False:
-
+ if np.all(pd.isna(comp_data_LS[fields].values)) == False: # noqa: E712
# Then we assume that is valuable information that needs to be
# shown in the table while the other fields will show 'null'
- table_vals.append(
- np.insert(comp_data_LS[fields].values, 0, LS))
+ table_vals.append(np.insert(comp_data_LS[fields].values, 0, LS))
# transpose the table to work well with plotly's API
table_vals = np.array(table_vals).T
@@ -477,92 +528,101 @@ def plot_repair(comp_db_path, output_path, create_zip="0"):
for ds_i, val in enumerate(table_vals[1]):
if '|' in str(val):
table_vals[1][ds_i] = 'varies'
- elif pd.isna(val) == True:
- table_vals[1][ds_i] = "N/A"
+ elif pd.isna(val) == True: # noqa: E712
+ table_vals[1][ds_i] = 'N/A'
else:
conseq_val = float(val)
if conseq_val < 1:
table_vals[1][ds_i] = f'{conseq_val:.4g}'
- elif conseq_val < 10:
- table_vals[1][ds_i] = f'{conseq_val:.3g}'
- elif conseq_val < 1e6:
+ elif conseq_val < 10: # noqa: PLR2004
+ table_vals[1][ds_i] = f'{conseq_val:.3g}'
+ elif conseq_val < 1e6: # noqa: PLR2004
table_vals[1][ds_i] = f'{conseq_val:.0f}'
else:
table_vals[1][ds_i] = f'{conseq_val:.3g}'
# round dispersion parameters to 2 digits
table_vals[-1] = [
- f'{float(sig):.2f}' if pd.isna(sig)==False else "N/A"
- for sig in table_vals[-1]
+ f'{float(sig):.2f}' if pd.isna(sig) == False else 'N/A' # noqa: E712
+ for sig in table_vals[-1]
]
# replace missing distribution labels with N/A
table_vals[-2] = [
- family if pd.isna(family)==False else "N/A"
+ family if pd.isna(family) == False else 'N/A' # noqa: E712
for family in table_vals[-2]
]
# converted simultaneous damage models might have a lot of DSs
- if table_vals.shape[1] > 8:
+ if table_vals.shape[1] > 8: # noqa: PLR2004
lots_of_ds = True
else:
lots_of_ds = False
# set the font size
- font_size = 16 if lots_of_ds == False else 11
-
+ font_size = 16 if lots_of_ds == False else 11 # noqa: E712
+
# create the table
# properties shared between consequence types
- c_pad = (9-len(c_type))*' '
- table_header = ['Damage
State',
- 'Median
Conseq.',
- ' Conseq.
Distribution',
- f' Conseq.
Dispersion']
- cell_alignment = ['center','center','center','center']
- column_widths = [45,45,60,55]
-
- fig.add_trace(go.Table(
- columnwidth = column_widths,
- header=dict(
- values=table_header,
- align=cell_alignment,
- fill = dict(color='rgb(200,200,200)'),
- line = dict(color='black'),
- font = dict(color='black', size=16)
+ c_pad = (9 - len(c_type)) * ' ' # noqa: F841
+ table_header = [
+ 'Damage
State',
+ 'Median
Conseq.',
+ ' Conseq.
Distribution',
+ ' Conseq.
Dispersion',
+ ]
+ cell_alignment = ['center', 'center', 'center', 'center']
+ column_widths = [45, 45, 60, 55]
+
+ fig.add_trace(
+ go.Table(
+ columnwidth=column_widths,
+ header=dict( # noqa: C408
+ values=table_header,
+ align=cell_alignment,
+ fill=dict(color='rgb(200,200,200)'), # noqa: C408
+ line=dict(color='black'), # noqa: C408
+ font=dict(color='black', size=16), # noqa: C408
),
- cells=dict(
- values=table_vals,
- height = 30 if lots_of_ds == False else 19,
- align=cell_alignment,
- fill = dict(color='rgba(0,0,0,0)'),
- line = dict(color='black'),
- font = dict(color='black', size=font_size)
- )
- ), row=1, col=3)
+ cells=dict( # noqa: C408
+ values=table_vals,
+ height=30 if lots_of_ds == False else 19, # noqa: E712
+ align=cell_alignment,
+ fill=dict(color='rgba(0,0,0,0)'), # noqa: C408
+ line=dict(color='black'), # noqa: C408
+ font=dict(color='black', size=font_size), # noqa: C408
+ ),
+ ),
+ row=1,
+ col=3,
+ )
# get the number (and label) of damage states
limit_states = model_params[0]
# mapping to a color in a sequential color scale
colors = {
- 1: [cl.scales['3']['seq']['PuBu'][2],],
+ 1: [
+ cl.scales['3']['seq']['PuBu'][2],
+ ],
2: cl.scales['3']['seq']['PuBu'][1:],
3: cl.scales['4']['seq']['PuBu'][1:],
4: cl.scales['6']['seq']['PuBu'][2:],
5: cl.scales['7']['seq']['PuBu'][2:],
6: cl.scales['7']['seq']['PuBu'][1:],
- 7: cl.scales['7']['seq']['PuBu'],
- # Simultaneous elevators have a lot of DSs and need special
+ 7: cl.scales['7']['seq']['PuBu'],
+ # Simultaneous elevators have a lot of DSs and need special
# treatment
- 15: (cl.scales['9']['seq']['PuBu'] +
- cl.scales['8']['seq']['YlGnBu'][::-1][1:-1])
+ 15: (
+ cl.scales['9']['seq']['PuBu']
+ + cl.scales['8']['seq']['YlGnBu'][::-1][1:-1]
+ ),
}
- if comp_data.loc[('Incomplete','')] != 1:
-
+ if comp_data.loc[('Incomplete', '')] != 1:
# set the parameters for displaying uncertainty
- p_min, p_max = 0.16, 0.84 # +- 1 std
+ p_min, p_max = 0.16, 0.84 # +- 1 std # noqa: F841
# initialize quantity limits
q_min = 0
@@ -570,13 +630,12 @@ def plot_repair(comp_db_path, output_path, create_zip="0"):
# walk through median parameters
for mu_capacity in model_params[1]:
-
# if any of them is quantity dependent
if '|' in str(mu_capacity):
-
# then parse the quantity limits
q_lims = np.array(
- mu_capacity.split('|')[1].split(','), dtype=float)
+ mu_capacity.split('|')[1].split(','), dtype=float
+ )
# Add the lower and upper limits to get a q_max that
# will lead to a nice plot
@@ -584,64 +643,75 @@ def plot_repair(comp_db_path, output_path, create_zip="0"):
# if none of the medians is quantity-dependent,
if q_max == -np.inf:
-
# Set q_max to 1.0 to scale the plot appropriately
q_max = 1.0
# anchor locations for annotations providing DS information
- x_loc = 0.533 if lots_of_ds == False else 0.535
- y_space = 0.088 if lots_of_ds == False else 0.0543
- y_loc = 0.784 + y_space if lots_of_ds == False else 0.786 + y_space
- info_font_size = 10 if lots_of_ds == False else 9
+ x_loc = 0.533 if lots_of_ds == False else 0.535 # noqa: E712
+ y_space = 0.088 if lots_of_ds == False else 0.0543 # noqa: E712
+ y_loc = 0.784 + y_space if lots_of_ds == False else 0.786 + y_space # noqa: E712
+ info_font_size = 10 if lots_of_ds == False else 9 # noqa: E712
# x anchor for annotations providing median function data
- x_loc_func = 0.697 if lots_of_ds == False else 0.689
+ x_loc_func = 0.697 if lots_of_ds == False else 0.689 # noqa: E712
need_x_axis = False
-
- for ds_i, mu_capacity in enumerate(model_params[1]):
+ for ds_i, mu_capacity in enumerate(model_params[1]):
# first, check if the median is a function:
if '|' in str(mu_capacity):
-
need_x_axis = True
# get the consequence (Y) and quantity (X) values
- c_vals, q_vals = np.array([
- vals.split(',') for vals in mu_capacity.split('|')],
- dtype = float)
+ c_vals, q_vals = np.array(
+ [vals.split(',') for vals in mu_capacity.split('|')],
+ dtype=float,
+ )
else:
-
- c_vals = np.array([mu_capacity,], dtype=float)
- q_vals = np.array([0.,], dtype=float)
-
- # add one more value to each end to represent the
+ c_vals = np.array(
+ [
+ mu_capacity,
+ ],
+ dtype=float,
+ )
+ q_vals = np.array(
+ [
+ 0.0,
+ ],
+ dtype=float,
+ )
+
+ # add one more value to each end to represent the
# constant parts
- q_vals = np.insert(q_vals,0,q_min)
- c_vals = np.insert(c_vals,0,c_vals[0])
+ q_vals = np.insert(q_vals, 0, q_min)
+ c_vals = np.insert(c_vals, 0, c_vals[0])
- q_vals = np.append(q_vals,q_max)
- c_vals = np.append(c_vals,c_vals[-1])
+ q_vals = np.append(q_vals, q_max)
+ c_vals = np.append(c_vals, c_vals[-1])
# plot the median consequence
- fig.add_trace(go.Scatter(
- x = q_vals,
- y = c_vals,
- mode = 'lines',
- line = dict(
- width=3,
- color=colors[np.min([len(model_params[1]),7])][ds_i % 7]
+ fig.add_trace(
+ go.Scatter(
+ x=q_vals,
+ y=c_vals,
+ mode='lines',
+ line=dict( # noqa: C408
+ width=3,
+ color=colors[np.min([len(model_params[1]), 7])][
+ ds_i % 7
+ ],
+ ),
+ name=model_params[0][ds_i],
+ legendgroup=model_params[0][ds_i],
),
- name = model_params[0][ds_i],
- legendgroup = model_params[0][ds_i]
- ), row=1, col=1)
+ row=1,
+ col=1,
+ )
# check if dispersion is prescribed for this consequence
dispersion = model_params[3][ds_i]
- if ((pd.isna(dispersion) == False) and
- (dispersion != 'N/A')):
-
+ if (pd.isna(dispersion) == False) and (dispersion != 'N/A'): # noqa: E712
dispersion = float(dispersion)
if model_params[2][ds_i] == 'normal':
@@ -651,253 +721,289 @@ def plot_repair(comp_db_path, output_path, create_zip="0"):
std_plus_label = 'mu + std'
std_minus_label = 'mu - std'
elif model_params[2][ds_i] == 'lognormal':
- std_plus = np.exp(np.log(c_vals)+dispersion)
- std_minus = np.exp(np.log(c_vals)-dispersion)
+ std_plus = np.exp(np.log(c_vals) + dispersion)
+ std_minus = np.exp(np.log(c_vals) - dispersion)
std_plus_label = 'mu + lnstd'
std_minus_label = 'mu - lnstd'
else:
- continue
+ continue
# plot the std lines
- fig.add_trace(go.Scatter(
- x = q_vals,
- y = std_plus,
- mode = 'lines',
- line = dict(
- width=1,
- color=colors[np.min([len(model_params[1]),7])][ds_i % 7],
- dash='dash'
- ),
- name = model_params[0][ds_i]+' '+std_plus_label,
- legendgroup = model_params[0][ds_i],
- showlegend = False
- ), row=1, col=1)
-
- fig.add_trace(go.Scatter(
- x = q_vals,
- y = std_minus,
- mode = 'lines',
- line = dict(
- width=1,
- color=colors[np.min([len(model_params[1]),7])][ds_i % 7],
- dash='dash'
+ fig.add_trace(
+ go.Scatter(
+ x=q_vals,
+ y=std_plus,
+ mode='lines',
+ line=dict( # noqa: C408
+ width=1,
+ color=colors[np.min([len(model_params[1]), 7])][
+ ds_i % 7
+ ],
+ dash='dash',
+ ),
+ name=model_params[0][ds_i] + ' ' + std_plus_label,
+ legendgroup=model_params[0][ds_i],
+ showlegend=False,
+ ),
+ row=1,
+ col=1,
+ )
+
+ fig.add_trace(
+ go.Scatter(
+ x=q_vals,
+ y=std_minus,
+ mode='lines',
+ line=dict( # noqa: C408
+ width=1,
+ color=colors[np.min([len(model_params[1]), 7])][
+ ds_i % 7
+ ],
+ dash='dash',
+ ),
+ name=model_params[0][ds_i] + ' ' + std_minus_label,
+ legendgroup=model_params[0][ds_i],
+ showlegend=False,
),
- name = model_params[0][ds_i]+' '+std_minus_label,
- legendgroup = model_params[0][ds_i],
- showlegend = False
- ), row=1, col=1)
+ row=1,
+ col=1,
+ )
# and plot distribution pdfs on top
if model_params[2][ds_i] == 'normal':
sig = c_vals[-1] * dispersion
q_pdf = np.linspace(
- np.max([norm.ppf(0.025, loc=c_vals[-1], scale=sig),0]),
+ np.max(
+ [norm.ppf(0.025, loc=c_vals[-1], scale=sig), 0]
+ ),
norm.ppf(0.975, loc=c_vals[-1], scale=sig),
- num=100
+ num=100,
)
c_pdf = norm.pdf(q_pdf, loc=c_vals[-1], scale=sig)
elif model_params[2][ds_i] == 'lognormal':
q_pdf = np.linspace(
- np.exp(norm.ppf(0.025, loc=np.log(c_vals[-1]),
- scale=dispersion)),
- np.exp(norm.ppf(0.975, loc=np.log(c_vals[-1]),
- scale=dispersion)),
- num=100
+ np.exp(
+ norm.ppf(
+ 0.025,
+ loc=np.log(c_vals[-1]),
+ scale=dispersion,
+ )
+ ),
+ np.exp(
+ norm.ppf(
+ 0.975,
+ loc=np.log(c_vals[-1]),
+ scale=dispersion,
+ )
+ ),
+ num=100,
+ )
+ c_pdf = norm.pdf(
+ np.log(q_pdf),
+ loc=np.log(c_vals[-1]),
+ scale=dispersion,
)
- c_pdf = norm.pdf(np.log(q_pdf), loc=np.log(c_vals[-1]),
- scale=dispersion)
c_pdf /= np.max(c_pdf)
- fig.add_trace(go.Scatter(
- x = c_pdf,
- y = q_pdf,
- mode = 'lines',
- line = dict(
- width=1,
- color=colors[np.min([len(model_params[1]),7])][ds_i % 7]
- ),
- fill = 'tozeroy',
- name = model_params[0][ds_i]+' pdf',
- legendgroup = model_params[0][ds_i],
- showlegend = False
- ), row=1, col=2)
+ fig.add_trace(
+ go.Scatter(
+ x=c_pdf,
+ y=q_pdf,
+ mode='lines',
+ line=dict( # noqa: C408
+ width=1,
+ color=colors[np.min([len(model_params[1]), 7])][
+ ds_i % 7
+ ],
+ ),
+ fill='tozeroy',
+ name=model_params[0][ds_i] + ' pdf',
+ legendgroup=model_params[0][ds_i],
+ showlegend=False,
+ ),
+ row=1,
+ col=2,
+ )
# adjust y_loc for annotations
- y_loc = y_loc - y_space
+ y_loc = y_loc - y_space # noqa: PLR6104
# add annotations for median function parameters, if needed
if '|' in str(mu_capacity):
-
- c_vals, q_vals = [vals.split(',') for vals in mu_capacity.split('|')]
+ c_vals, q_vals = (
+ vals.split(',') for vals in mu_capacity.split('|')
+ )
func_text = f'Multilinear Function Breakpoints
Medians: {", ".join(c_vals)}
Quantities: {", ".join(q_vals)}'
fig.add_annotation(
- text=f'*',
+ text='*',
hovertext=func_text,
- xref='paper', yref='paper',
- axref='pixel', ayref='pixel',
- xanchor = 'left', yanchor='bottom',
- font=dict(size=info_font_size),
- showarrow = False,
- ax = 0, ay = 0,
- x = x_loc_func, y = y_loc)
-
- # check if metadata is available
- if comp_meta != None:
+ xref='paper',
+ yref='paper',
+ axref='pixel',
+ ayref='pixel',
+ xanchor='left',
+ yanchor='bottom',
+ font=dict(size=info_font_size), # noqa: C408
+ showarrow=False,
+ ax=0,
+ ay=0,
+ x=x_loc_func,
+ y=y_loc,
+ )
- ds_meta = comp_meta['DamageStates'][f'DS{ds_i+1}']
+ # check if metadata is available
+ if comp_meta != None: # noqa: E711
+ ds_meta = comp_meta['DamageStates'][f'DS{ds_i + 1}']
- if ds_meta.get('Description', False) != False:
- ds_description = '
'.join(wrap(ds_meta["Description"], width=55))
+ if ds_meta.get('Description', False) != False: # noqa: E712
+ ds_description = '
'.join(
+ wrap(ds_meta['Description'], width=55)
+ )
else:
ds_description = ''
-
- if ds_meta.get('RepairAction', False) != False:
- ds_repair = '
'.join(wrap(ds_meta["RepairAction"], width=55))
+
+ if ds_meta.get('RepairAction', False) != False: # noqa: E712
+ ds_repair = '
'.join(
+ wrap(ds_meta['RepairAction'], width=55)
+ )
else:
- ds_repair = ''
+ ds_repair = ''
- if ds_repair != '':
+ if ds_repair != '': # noqa: PLC1901
ds_text = f'{model_params[0][ds_i]}
{ds_description}
Repair Action
{ds_repair}'
else:
- ds_text = f'{model_params[0][ds_i]}
{ds_description}'
+ ds_text = (
+ f'{model_params[0][ds_i]}
{ds_description}'
+ )
fig.add_annotation(
- text=f'*',
+ text='*',
hovertext=ds_text,
- xref='paper', yref='paper',
- axref='pixel', ayref='pixel',
- xanchor = 'left', yanchor='bottom',
- font=dict(size=info_font_size),
- showarrow = False,
- ax = 0, ay = 0,
- x = x_loc, y = y_loc)
+ xref='paper',
+ yref='paper',
+ axref='pixel',
+ ayref='pixel',
+ xanchor='left',
+ yanchor='bottom',
+ font=dict(size=info_font_size), # noqa: C408
+ showarrow=False,
+ ax=0,
+ ay=0,
+ x=x_loc,
+ y=y_loc,
+ )
else:
-
# add an empty figure still; to highlight incomplete data
- fig.add_trace(go.Scatter(
- x = [0,],
- y = [0,],
- mode = 'lines',
- line = dict(
- width=3,
- color=colors[1][0]
+ fig.add_trace(
+ go.Scatter(
+ x=[
+ 0,
+ ],
+ y=[
+ 0,
+ ],
+ mode='lines',
+ line=dict(width=3, color=colors[1][0]), # noqa: C408
+ name=f'Incomplete Repair {c_type} Consequence Data',
),
- name = f'Incomplete Repair {c_type} Consequence Data',
- ), row=1, col=2)
-
- shared_ax_props = dict(
- showgrid = True,
- linecolor = 'black',
- gridwidth = 0.05,
- gridcolor = 'rgb(220,220,220)'
+ row=1,
+ col=2,
+ )
+
+ shared_ax_props = dict( # noqa: C408
+ showgrid=True,
+ linecolor='black',
+ gridwidth=0.05,
+ gridcolor='rgb(220,220,220)',
)
- quantity_unit = comp_data.loc[('Quantity','Unit')]
- if quantity_unit in ['unitless','1 EA','1 ea']:
+ quantity_unit = comp_data.loc[('Quantity', 'Unit')]
+ if quantity_unit in ['unitless', '1 EA', '1 ea']: # noqa: PLR6201
quantity_unit = '-'
elif quantity_unit.split()[0] == '1':
quantity_unit = quantity_unit.split()[1]
- dv_unit = comp_data.loc[('DV','Unit')]
- if dv_unit in ['unitless',]:
+ dv_unit = comp_data.loc[('DV', 'Unit')]
+ if dv_unit == 'unitless':
dv_unit = '-'
# layout settings
fig.update_layout(
-
# minimize margins
- margin=dict(b=50,r=5,l=5,t=5),
-
+ margin=dict(b=50, r=5, l=5, t=5), # noqa: C408
# height and width targets single-column web view
height=400,
width=950,
-
# transparent background and paper
paper_bgcolor='rgba(0,0,0,0)',
- plot_bgcolor = 'rgba(0,0,0,0)',
-
+ plot_bgcolor='rgba(0,0,0,0)',
# legend on to allow turning DSs off
showlegend=True,
-
- xaxis1 = dict(
- title_text = f"Damage Quantity [{quantity_unit}]",
+ xaxis1=dict(
+ title_text=f'Damage Quantity [{quantity_unit}]',
range=[q_min, q_max],
- **shared_ax_props
- ) if need_x_axis == True else dict(
- showgrid = False,
- showticklabels = False
- ),
-
- yaxis1 = dict(
- title_text=f"{c_type} [{dv_unit}]",
+ **shared_ax_props,
+ )
+ if need_x_axis == True # noqa: E712
+ else dict(showgrid=False, showticklabels=False), # noqa: C408
+ yaxis1=dict(
+ title_text=f'{c_type} [{dv_unit}]',
rangemode='tozero',
**shared_ax_props,
),
-
- xaxis2 = dict(
- showgrid = False,
- showticklabels = False,
- title_text = "",
+ xaxis2=dict( # noqa: C408
+ showgrid=False,
+ showticklabels=False,
+ title_text='',
),
-
- yaxis2 = dict(
- showgrid = False,
- showticklabels = False
- ),
-
+ yaxis2=dict(showgrid=False, showticklabels=False), # noqa: C408
# position legend to top of the figure
- legend = dict(
- yanchor = 'top',
- xanchor = 'right',
- font = dict(
- size=12
- ),
- orientation = 'v',
- y = 1.0,
- x = -0.08,
- )
+ legend=dict( # noqa: C408
+ yanchor='top',
+ xanchor='right',
+ font=dict(size=12), # noqa: C408
+ orientation='v',
+ y=1.0,
+ x=-0.08,
+ ),
)
# save figure to html
- with open(f'{output_path}/{comp_id}-{c_type}.html', "w") as f:
+ with open(f'{output_path}/{comp_id}-{c_type}.html', 'w') as f: # noqa: FURB103, PLW1514, PTH123
# Minimize size by not saving javascript libraries which means
# internet connection is required to view the figure.
f.write(fig.to_html(full_html=False, include_plotlyjs='cdn'))
# store the source database file(s) in the output directory for future reference
- shutil.copy(comp_db_path, Path(output_path)/Path(comp_db_path).name)
+ shutil.copy(comp_db_path, Path(output_path) / Path(comp_db_path).name)
if repair_meta is not None:
- shutil.copy(comp_db_meta, Path(output_path)/Path(comp_db_meta).name)
+ shutil.copy(comp_db_meta, Path(output_path) / Path(comp_db_meta).name)
- if create_zip == "1":
+ if create_zip == '1':
+ files = [f'{output_path}/{file}' for file in os.listdir(output_path)]
- files = [f"{output_path}/{file}" for file in os.listdir(output_path)]
-
- with ZipFile(output_path+".zip", 'w') as zip:
+ with ZipFile(output_path + '.zip', 'w') as zip: # noqa: A001
for file in files:
- zip.write(file, arcname=Path(file).name)
-
- shutil.rmtree(output_path)
+ zip.write(file, arcname=Path(file).name)
- print("Successfully generated component repair consequence figures.")
+ shutil.rmtree(output_path)
+ print('Successfully generated component repair consequence figures.') # noqa: T201
-def check_diff(comp_db_path, output_path):
+def check_diff(comp_db_path, output_path): # noqa: D103
# if the output path already exists
- if os.path.exists(output_path):
-
+ if os.path.exists(output_path): # noqa: PLR1702, PTH110
# check for both the csv and json files
for ext in ['csv', 'json']:
-
- comp_db = comp_db_path[:-3]+ext
+ comp_db = comp_db_path[:-3] + ext
if not Path(comp_db).is_file():
continue
@@ -908,10 +1014,10 @@ def check_diff(comp_db_path, output_path):
# check if a file with the same name exists in the output dir
if comp_db in os.listdir(output_path):
-
# open the two files and compare them
- with open(Path(source_path)/comp_db, 'r') as f1, open(Path(output_path)/comp_db, 'r') as f2:
-
+ with open(Path(source_path) / comp_db) as f1, open( # noqa: PLW1514, PTH123
+ Path(output_path) / comp_db
+ ) as f2:
if ext == 'csv':
new_file = f1.readlines()
old_file = f2.readlines()
@@ -922,11 +1028,10 @@ def check_diff(comp_db_path, output_path):
continue
# if at least one line does not match, we need to generate
- else:
+ else: # noqa: RET507
return True
elif ext == 'json':
-
new_file = json.load(f1)
old_file = json.load(f2)
@@ -935,7 +1040,7 @@ def check_diff(comp_db_path, output_path):
continue
# otherwise, we need to generate
- else:
+ else: # noqa: RET507
return True
# if there is no db file in the output dir, we need to generate
@@ -946,53 +1051,47 @@ def check_diff(comp_db_path, output_path):
return False
# if the output path does not exist, we need to generate
- else:
+ else: # noqa: RET505
return True
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('viz_type')
parser.add_argument('comp_db_path')
- parser.add_argument('-o', '--output_path',
- default="./comp_viz/") #replace with None
- parser.add_argument('-z', '--zip', default="0")
+ parser.add_argument(
+ '-o', '--output_path', default='./comp_viz/'
+ ) # replace with None
+ parser.add_argument('-z', '--zip', default='0')
args = parser.parse_args(args)
- if args.viz_type in ['fragility', 'repair']:
-
+ if args.viz_type in ['fragility', 'repair']: # noqa: PLR6201
comp_db_path = args.comp_db_path
output_path = args.output_path
# verify that comp_db_path points to a file
if not Path(comp_db_path).is_file():
- raise FileNotFoundError("comp_db_path does not point to a file.")
+ raise FileNotFoundError('comp_db_path does not point to a file.') # noqa: EM101, TRY003
if check_diff(comp_db_path, output_path):
-
if args.viz_type == 'fragility':
-
plot_fragility(comp_db_path, output_path, args.zip)
elif args.viz_type == 'repair':
-
- plot_repair(comp_db_path, output_path, args.zip)
+ plot_repair(comp_db_path, output_path, args.zip)
else:
-
- print("No need to generate, figures already exist in the output folder.")
+ print('No need to generate, figures already exist in the output folder.') # noqa: T201
elif args.viz_type == 'query':
-
if args.comp_db_path == 'default_db':
+ print(pelicun_path) # noqa: T201
- print(pelicun_path)
+ # print("--- %s seconds ---" % (time.time() - start_time))
- #print("--- %s seconds ---" % (time.time() - start_time))
# python3 DL_visuals.py repair /Users/adamzs/SimCenter/applications/performDL/pelicun3/pelicun/resources/SimCenterDBDL/loss_repair_DB_FEMA_P58_2nd.csv
if __name__ == '__main__':
-
main(sys.argv[1:])
diff --git a/modules/performDL/pelicun3/HDF_to_CSV.py b/modules/performDL/pelicun3/HDF_to_CSV.py
index 13ccbe1bf..88bdd59dc 100644
--- a/modules/performDL/pelicun3/HDF_to_CSV.py
+++ b/modules/performDL/pelicun3/HDF_to_CSV.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -37,30 +36,28 @@
# Contributors:
# Adam Zsarnóczay
-import pandas as pd
-import sys
import argparse
+import sys
from pathlib import Path
+import pandas as pd
-def convert_HDF(HDF_path):
- HDF_ext = HDF_path.split('.')[-1]
- CSV_base = HDF_path[:-len(HDF_ext)-1]
+def convert_HDF(HDF_path): # noqa: N802, N803, D103
+ HDF_ext = HDF_path.split('.')[-1] # noqa: N806
+ CSV_base = HDF_path[: -len(HDF_ext) - 1] # noqa: N806
- HDF_path = Path(HDF_path).resolve()
+ HDF_path = Path(HDF_path).resolve() # noqa: N806
store = pd.HDFStore(HDF_path)
- for key in store.keys():
-
- store[key].to_csv(f'{CSV_base}_{key[1:].replace("/","_")}.csv')
+ for key in store.keys(): # noqa: SIM118
+ store[key].to_csv(f'{CSV_base}_{key[1:].replace("/", "_")}.csv')
store.close()
if __name__ == '__main__':
-
args = sys.argv[1:]
parser = argparse.ArgumentParser()
diff --git a/modules/performDL/pelicun3/pelicun3_wrapper.py b/modules/performDL/pelicun3/pelicun3_wrapper.py
index 4faed1331..f94c9f218 100644
--- a/modules/performDL/pelicun3/pelicun3_wrapper.py
+++ b/modules/performDL/pelicun3/pelicun3_wrapper.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
diff --git a/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp b/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp
index 8e9d60903..fb492e976 100644
--- a/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp
+++ b/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp
@@ -17,7 +17,7 @@ int getRV(json_t *edp, std::vector &rvList);
void eraseAllSubstring(std::string & mainStr, const std::string & toErase)
{
size_t pos = std::string::npos;
- // Search for the substring in string in a loop untill nothing is found
+ // Search for the substring in string in a loop until nothing is found
while ((pos = mainStr.find(toErase) )!= std::string::npos)
{
// If found then erase it from string
diff --git a/modules/performFEM/feappv/createFeapDriver.cpp b/modules/performFEM/feappv/createFeapDriver.cpp
index 28cadec3e..0d5bed60e 100755
--- a/modules/performFEM/feappv/createFeapDriver.cpp
+++ b/modules/performFEM/feappv/createFeapDriver.cpp
@@ -17,7 +17,7 @@ int getRV(json_t *edp, std::vector &rvList);
void eraseAllSubstring(std::string & mainStr, const std::string & toErase)
{
size_t pos = std::string::npos;
- // Search for the substring in string in a loop untill nothing is found
+ // Search for the substring in string in a loop until nothing is found
while ((pos = mainStr.find(toErase) )!= std::string::npos)
{
// If found then erase it from string
diff --git a/modules/performFEM/surrogateGP/gpPredict.py b/modules/performFEM/surrogateGP/gpPredict.py
index bcd77ed40..153035365 100644
--- a/modules/performFEM/surrogateGP/gpPredict.py
+++ b/modules/performFEM/surrogateGP/gpPredict.py
@@ -1,85 +1,92 @@
-import time
-import pickle as pickle
-import numpy as np
+import json as json # noqa: CPY001, D100, INP001, PLC0414
import os
-import sys
-import json as json
+import pickle as pickle # noqa: PLC0414, S403
import shutil
-from scipy.stats import lognorm, norm
-import subprocess
+import subprocess # noqa: S404
+import sys
+import time
+import numpy as np
+from scipy.stats import lognorm, norm
try:
- moduleName = "GPy"
- import GPy as GPy
-except:
- print("Error running surrogate prediction - Failed to import module: Surrogate modeling module uses GPy python package which is facing a version compatibility issue at this moment (01.05.2024). To use the surrogate module, one needs to update manually the GPy version to 1.13. The instruction can be found in the the documentation: https://nheri-simcenter.github.io/quoFEM-Documentation/common/user_manual/usage/desktop/SimCenterUQSurrogate.html#lblsimsurrogate")
- exit(-1)
-
+ moduleName = 'GPy' # noqa: N816
+ import GPy as GPy # noqa: PLC0414
+except: # noqa: E722
+ print( # noqa: T201
+ 'Error running surrogate prediction - Failed to import module: Surrogate modeling module uses GPy python package which is facing a version compatibility issue at this moment (01.05.2024). To use the surrogate module, one needs to update manually the GPy version to 1.13. The instruction can be found in the the documentation: https://nheri-simcenter.github.io/quoFEM-Documentation/common/user_manual/usage/desktop/SimCenterUQSurrogate.html#lblsimsurrogate'
+ )
+ exit(-1) # noqa: PLR1722
try:
- moduleName = "GPy"
- import GPy as GPy
- moduleName = "emukit"
- from emukit.multi_fidelity.convert_lists_to_array import convert_x_list_to_array, convert_xy_lists_to_arrays
- moduleName = "Pandas"
+ moduleName = 'GPy' # noqa: N816
+ import GPy as GPy # noqa: PLC0414
+
+ moduleName = 'emukit' # noqa: N816
+ from emukit.multi_fidelity.convert_lists_to_array import (
+ convert_x_list_to_array,
+ )
+
+ moduleName = 'Pandas' # noqa: N816
import pandas as pd
error_tag = False # global variable
-except:
+except: # noqa: E722
error_tag = True
- print("Error running surrogate prediction - Failed to import module:" + moduleName)
- exit(-1)
+ print( # noqa: T201
+ 'Error running surrogate prediction - Failed to import module:' + moduleName
+ )
+ exit(-1) # noqa: PLR1722
# from emukit.multi_fidelity.convert_lists_to_array import convert_x_list_to_array, convert_xy_lists_to_arrays
-def main(params_dir,surrogate_dir,json_dir,result_file,input_json):
- global error_file
+def main(params_dir, surrogate_dir, json_dir, result_file, input_json): # noqa: C901, D103, PLR0912, PLR0914, PLR0915
+ global error_file # noqa: PLW0602
- os_type=sys.platform.lower()
- run_type ='runninglocal'
+ os_type = sys.platform.lower()
+ run_type = 'runninglocal'
#
# create a log file
#
- msg0 = os.path.basename(os.getcwd()) + " : "
- file_object = open('surrogateLog.log', 'a')
+ msg0 = os.path.basename(os.getcwd()) + ' : ' # noqa: PTH109, PTH119
+ file_object = open('surrogateLog.log', 'a') # noqa: PLW1514, PTH123, SIM115
- folderName = os.path.basename(os.getcwd())
- sampNum = folderName.split(".")[-1]
+ folderName = os.path.basename(os.getcwd()) # noqa: PTH109, PTH119, N806
+ sampNum = folderName.split('.')[-1] # noqa: N806
#
# read json -- current input file
#
def error_exit(msg):
- error_file.write(msg) # local
+ error_file.write(msg) # local
error_file.close()
- file_object.write(msg0 + msg) # global file
+ file_object.write(msg0 + msg) # global file
file_object.close()
- print(msg)
- exit(-1)
+ print(msg) # noqa: T201
+ exit(-1) # noqa: PLR1722
def error_warning(msg):
- #error_file.write(msg)
+ # error_file.write(msg)
file_object.write(msg)
- #print(msg)
+ # print(msg)
- if not os.path.exists(json_dir):
- msg = 'Error in surrogate prediction: File not found -' + json_dir
- error_exit(msg)
+ if not os.path.exists(json_dir): # noqa: PTH110
+ msg = 'Error in surrogate prediction: File not found -' + json_dir
+ error_exit(msg)
- with open(json_dir) as f:
+ with open(json_dir) as f: # noqa: PLW1514, PTH123
try:
sur = json.load(f)
except ValueError:
msg = 'invalid json format: ' + json_dir
error_exit(msg)
- isEEUQ = sur["isEEUQ"]
+ isEEUQ = sur['isEEUQ'] # noqa: N806
if isEEUQ:
dakota_path = 'sc_scInput.json'
@@ -87,50 +94,47 @@ def error_warning(msg):
dakota_path = input_json
try:
- with open(dakota_path) as f: # current input file
+ with open(dakota_path) as f: # current input file # noqa: PLW1514, PTH123
inp_tmp = json.load(f)
- except:
+ except: # noqa: E722
try:
- with open('sc_inputRWHALE.json') as f: # current input file
- inp_tmp = json.load(f)
- except:
- pass
-
+ # current input file
+ with open('sc_inputRWHALE.json') as f: # noqa: PLW1514, PTH123
+ inp_tmp = json.load(f)
+ except: # noqa: S110, E722
+ pass
try:
if isEEUQ:
- inp_fem = inp_tmp["Applications"]["Modeling"]
+ inp_fem = inp_tmp['Applications']['Modeling']
else:
- inp_fem = inp_tmp["FEM"]
- except:
- inp_fem={}
- print('invalid json format - dakota.json')
-
- norm_var_thr = inp_fem.get("varThres",0.02)
- when_inaccurate = inp_fem.get("femOption","continue")
- do_mf = False
- myseed = inp_fem.get("gpSeed",None)
- prediction_option = inp_fem.get("predictionOption", "random")
- if myseed==None:
- folderName = os.path.basename(os.path.dirname(os.getcwd()))
- myseed = int(folderName)*int(1.e7)
- np.random.seed(int(myseed)+int(sampNum))
+ inp_fem = inp_tmp['FEM']
+ except: # noqa: E722
+ inp_fem = {}
+ print('invalid json format - dakota.json') # noqa: T201
+
+ norm_var_thr = inp_fem.get('varThres', 0.02)
+ when_inaccurate = inp_fem.get('femOption', 'continue')
+ do_mf = False # noqa: F841
+ myseed = inp_fem.get('gpSeed', None)
+ prediction_option = inp_fem.get('predictionOption', 'random')
+ if myseed == None: # noqa: E711
+ folderName = os.path.basename(os.path.dirname(os.getcwd())) # noqa: PTH109, PTH119, PTH120, N806
+ myseed = int(folderName) * int(1.0e7)
+ np.random.seed(int(myseed) + int(sampNum))
# if no g and rv,
-
#
# read json -- original input for training surrogate
#
-
f.close()
- did_stochastic = sur["doStochastic"]
- did_logtransform = sur["doLogtransform"]
- did_normalization = sur["doNormalization"]
- kernel = sur["kernName"]
-
+ did_stochastic = sur['doStochastic']
+ did_logtransform = sur['doLogtransform']
+ did_normalization = sur['doNormalization']
+ kernel = sur['kernName']
if kernel == 'Radial Basis':
kern_name = 'rbf'
@@ -139,32 +143,32 @@ def error_warning(msg):
elif kernel == 'Matern 3/2':
kern_name = 'Mat32'
elif kernel == 'Matern 5/2':
- kern_name = 'Mat52'
- did_mf = sur["doMultiFidelity"]
+ kern_name = 'Mat52' # noqa: F841
+ did_mf = sur['doMultiFidelity']
# from json
- g_name_sur = list()
+ g_name_sur = list() # noqa: C408
ng_sur = 0
- Y=np.zeros((sur['highFidelityInfo']['valSamp'],sur['ydim']))
+ Y = np.zeros((sur['highFidelityInfo']['valSamp'], sur['ydim'])) # noqa: N806
for g in sur['ylabels']:
g_name_sur += [g]
- Y[:,ng_sur]=np.array(sur['yExact'][g])
+ Y[:, ng_sur] = np.array(sur['yExact'][g])
ng_sur += 1
- rv_name_sur = list()
+ rv_name_sur = list() # noqa: C408
nrv_sur = 0
- X=np.zeros((sur['highFidelityInfo']['valSamp'],sur['xdim']))
+ X = np.zeros((sur['highFidelityInfo']['valSamp'], sur['xdim'])) # noqa: N806
for rv in sur['xlabels']:
rv_name_sur += [rv]
- X[:,nrv_sur]=np.array(sur['xExact'][rv])
+ X[:, nrv_sur] = np.array(sur['xExact'][rv])
nrv_sur += 1
try:
- constIdx = sur['highFidelityInfo']["constIdx"]
- constVal = sur['highFidelityInfo']["constVal"]
- except:
- constIdx = []
- constVal = []
+ constIdx = sur['highFidelityInfo']['constIdx'] # noqa: N806
+ constVal = sur['highFidelityInfo']['constVal'] # noqa: N806
+ except: # noqa: E722
+ constIdx = [] # noqa: N806
+ constVal = [] # noqa: N806
# Read pickles
@@ -180,103 +184,123 @@ def decorator(func):
return decorator
@monkeypatch_method(GPy.likelihoods.Gaussian)
- def gaussian_variance(self, Y_metadata=None):
+ def gaussian_variance(self, Y_metadata=None): # noqa: N803
if Y_metadata is None:
return self.variance
- else:
+ else: # noqa: RET505
return self.variance * Y_metadata['variance_structure']
@monkeypatch_method(GPy.core.GP)
- def set_XY2(self, X=None, Y=None, Y_metadata=None):
+ def set_XY2(self, X=None, Y=None, Y_metadata=None): # noqa: N802, N803
if Y_metadata is not None:
if self.Y_metadata is None:
self.Y_metadata = Y_metadata
else:
self.Y_metadata.update(Y_metadata)
- #print("metadata_updated")
+ # print("metadata_updated")
self.set_XY(X, Y)
- def get_stochastic_variance(X, Y, x, ny):
- #X_unique, X_idx, indices, counts = np.unique(X, axis=0, return_index=True, return_counts=True, return_inverse=True)
- X_unique, dummy, indices, counts = np.unique(X, axis=0, return_index=True, return_counts=True,
- return_inverse=True)
+ def get_stochastic_variance(X, Y, x, ny): # noqa: N803
+ # X_unique, X_idx, indices, counts = np.unique(X, axis=0, return_index=True, return_counts=True, return_inverse=True)
+ X_unique, dummy, indices, counts = np.unique( # noqa: F841, N806
+ X, axis=0, return_index=True, return_counts=True, return_inverse=True
+ )
- idx_repl = [i for i in np.where(counts > 1)[0]]
+ idx_repl = [i for i in np.where(counts > 1)[0]] # noqa: C416
-
- if len(idx_repl)>0:
+ if len(idx_repl) > 0:
n_unique = X_unique.shape[0]
- Y_mean, Y_var = np.zeros((n_unique, 1)), np.zeros((n_unique, 1))
+ Y_mean, Y_var = np.zeros((n_unique, 1)), np.zeros((n_unique, 1)) # noqa: N806
for idx in range(n_unique):
- Y_subset = Y[[i for i in np.where(indices == idx)[0]], :]
+ Y_subset = Y[[i for i in np.where(indices == idx)[0]], :] # noqa: C416, N806
Y_mean[idx, :] = np.mean(Y_subset, axis=0)
Y_var[idx, :] = np.var(Y_subset, axis=0)
- if (np.max(Y_var) / np.var(Y_mean) < 1.e-10) and len(idx_repl) > 0:
+ if (np.max(Y_var) / np.var(Y_mean) < 1.0e-10) and len(idx_repl) > 0: # noqa: PLR2004
return np.ones((X.shape[0], 1))
-
- kernel_var = GPy.kern.Matern52(input_dim=nrv_sur, ARD=True) + GPy.kern.Linear(input_dim=nrv_sur, ARD=True)
+ kernel_var = GPy.kern.Matern52(
+ input_dim=nrv_sur, ARD=True
+ ) + GPy.kern.Linear(input_dim=nrv_sur, ARD=True)
log_vars = np.log(Y_var[idx_repl])
- m_var = GPy.models.GPRegression(X_unique[idx_repl, :], log_vars, kernel_var, normalizer=True, Y_metadata=None)
- #print("Collecting variance field of ny={}".format(ny))
- for key, val in sur["modelInfo"][g_name_sur[ny]+"_Var"].items():
- exec('m_var.' + key + '= np.array(val)')
+ m_var = GPy.models.GPRegression(
+ X_unique[idx_repl, :],
+ log_vars,
+ kernel_var,
+ normalizer=True,
+ Y_metadata=None,
+ )
+ # print("Collecting variance field of ny={}".format(ny))
+ for key, val in sur['modelInfo'][g_name_sur[ny] + '_Var'].items(): # noqa: B007, PERF102
+ exec('m_var.' + key + '= np.array(val)') # noqa: S102
log_var_pred, dum = m_var.predict(X_unique)
var_pred = np.exp(log_var_pred)
if did_normalization:
- Y_normFact = np.var(Y_mean)
+ Y_normFact = np.var(Y_mean) # noqa: N806
else:
- Y_normFact = 1
-
- norm_var_str = (var_pred.T[0]) / Y_normFact # if normalization was used..
+ Y_normFact = 1 # noqa: N806
+ norm_var_str = (
+ (var_pred.T[0]) / Y_normFact
+ ) # if normalization was used..
log_var_pred_x, dum = m_var.predict(x)
nugget_var_pred_x = np.exp(log_var_pred_x.T[0]) / Y_normFact
-
- else:
- X_unique = X
- Y_mean = Y
- indices = range(0, Y.shape[0])
- kernel_var = GPy.kern.Matern52(input_dim=nrv_sur, ARD=True) + GPy.kern.Linear(input_dim=nrv_sur, ARD=True)
- log_vars = np.atleast_2d(sur["modelInfo"][g_name_sur[ny] + "_Var"]["TrainingSamplesY"]).T
- m_var = GPy.models.GPRegression(X, log_vars, kernel_var, normalizer=True,
- Y_metadata=None)
-
- #print("Variance field obtained for ny={}".format(ny))
- for key, val in sur["modelInfo"][g_name_sur[ny] + "_Var"].items():
- exec('m_var.' + key + '= np.array(val)')
+ else:
+ X_unique = X # noqa: N806
+ Y_mean = Y # noqa: N806
+ indices = range(Y.shape[0])
+
+ kernel_var = GPy.kern.Matern52(
+ input_dim=nrv_sur, ARD=True
+ ) + GPy.kern.Linear(input_dim=nrv_sur, ARD=True)
+ log_vars = np.atleast_2d(
+ sur['modelInfo'][g_name_sur[ny] + '_Var']['TrainingSamplesY']
+ ).T
+ m_var = GPy.models.GPRegression(
+ X, log_vars, kernel_var, normalizer=True, Y_metadata=None
+ )
+
+ # print("Variance field obtained for ny={}".format(ny))
+ for key, val in sur['modelInfo'][g_name_sur[ny] + '_Var'].items(): # noqa: B007, PERF102
+ exec('m_var.' + key + '= np.array(val)') # noqa: S102
log_var_pred, dum = m_var.predict(X)
var_pred = np.exp(log_var_pred)
if did_normalization:
- Y_normFact = np.var(Y)
+ Y_normFact = np.var(Y) # noqa: N806
else:
- Y_normFact = 1
+ Y_normFact = 1 # noqa: N806
- norm_var_str = (var_pred.T[0]) / Y_normFact # if normalization was used..
+ norm_var_str = (
+ (var_pred.T[0]) / Y_normFact
+ ) # if normalization was used..
- log_var_pred_x, dum = m_var.predict(x)
+ log_var_pred_x, dum = m_var.predict(x) # noqa: F841
nugget_var_pred_x = np.exp(log_var_pred_x.T[0]) / Y_normFact
-
- return X_unique, Y_mean, norm_var_str, counts, nugget_var_pred_x, np.var(Y_mean)
+ return (
+ X_unique,
+ Y_mean,
+ norm_var_str,
+ counts,
+ nugget_var_pred_x,
+ np.var(Y_mean),
+ )
# REQUIRED: rv_name, y_var
# Collect also dummy rvs
- id_vec=[]
+ id_vec = []
rv_name_dummy = []
-
- t_total = time.process_time()
+ t_total = time.process_time() # noqa: F841
first_rv_found = False
first_dummy_found = False
@@ -284,113 +308,127 @@ def get_stochastic_variance(X, Y, x, ny):
# Check how many RVs overlap
#
- with open(params_dir, "r") as x_file:
+ with open(params_dir) as x_file: # noqa: PLW1514, PTH123
data = x_file.readlines()
nrv = int(data[0])
for i in range(nrv):
name_values = data[i + 1].split()
name = name_values[0]
- #print(name)
+ # print(name)
- #= pass if is string. GP cannot handle that
- if ((name == 'MultipleEvent') or (name == 'eventID')) and isEEUQ:
+ # = pass if is string. GP cannot handle that
+ if ((name == 'MultipleEvent') or (name == 'eventID')) and isEEUQ: # noqa: PLR1714
continue
- if not name_values[1].replace('.','',1).replace('e','',1).replace('-','',1).replace('+','',1).isdigit():
- # surrogate model does not accept descrete
+ if (
+ not name_values[1]
+ .replace('.', '', 1)
+ .replace('e', '', 1)
+ .replace('-', '', 1)
+ .replace('+', '', 1)
+ .isdigit()
+ ):
+ # surrogate model does not accept discrete
continue
- #= atleast_2d because there may be multiple samples
+ # = atleast_2d because there may be multiple samples
samples = np.atleast_2d([float(vals) for vals in name_values[1:]]).T
ns = len(samples)
- if not name in rv_name_sur:
+ if name not in rv_name_sur:
rv_name_dummy += [name]
if not first_dummy_found:
rv_val_dummy = samples
first_dummy_found = True
else:
- rv_val_dummy = np.hstack([rv_val_dummy,samples])
- continue;
+ rv_val_dummy = np.hstack([rv_val_dummy, samples])
+ continue
id_map = rv_name_sur.index(name)
- #print(name)
- #print(rv_name_sur)
- #try:
+ # print(name)
+ # print(rv_name_sur)
+ # try:
# id_map = rv_name_sur.index(name)
- #except ValueError:
+ # except ValueError:
# msg = 'Error importing input data: variable "{}" not identified.'.format(name)
# error_exit(msg)
if not first_rv_found:
nsamp = ns
- rv_tmp =samples
+ rv_tmp = samples
id_vec = [id_map]
first_rv_found = True
else:
rv_tmp = np.hstack([rv_tmp, samples])
id_vec += [id_map]
-
if ns != nsamp:
msg = 'Error importing input data: sample size in params.in is not consistent.'
error_exit(msg)
g_idx = []
- for edp in (inp_tmp["EDP"]):
+ for edp in inp_tmp['EDP']:
edp_names = []
- if edp["length"] == 1:
- edp_names += [edp["name"]]
+ if edp['length'] == 1:
+ edp_names += [edp['name']]
else:
- for i in range(0, edp["length"]):
- edp_names += [edp["name"] + "_" + str(i + 1)]
+ for i in range(edp['length']):
+ edp_names += [edp['name'] + '_' + str(i + 1)]
try:
- for i in range(0, edp["length"]):
+ for i in range(edp['length']):
id_map = g_name_sur.index(edp_names[i])
g_idx += [id_map]
except ValueError:
- msg = 'Error importing input data: qoi "{}" not identified.'.format(edp["name"])
+ msg = 'Error importing input data: qoi "{}" not identified.'.format(
+ edp['name']
+ )
error_exit(msg)
# if eeuq
first_eeuq_found = False
- if sur.get("intensityMeasureInfo") != None:
-
- with open("IMinput.json","w") as f:
- mySurrogateJson = sur["intensityMeasureInfo"]
- json.dump(mySurrogateJson,f)
-
- computeIM = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- 'createEVENT', 'groundMotionIM', 'IntensityMeasureComputer.py')
-
-
- pythonEXE = sys.executable
+ if sur.get('intensityMeasureInfo') != None: # noqa: E711
+ with open('IMinput.json', 'w') as f: # noqa: PLW1514, PTH123
+ mySurrogateJson = sur['intensityMeasureInfo'] # noqa: N806
+ json.dump(mySurrogateJson, f)
+
+ computeIM = os.path.join( # noqa: PTH118, N806
+ os.path.dirname( # noqa: PTH120
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
+ ),
+ 'createEVENT',
+ 'groundMotionIM',
+ 'IntensityMeasureComputer.py',
+ )
+
+ pythonEXE = sys.executable # noqa: N806
# compute IMs
- if os.path.exists('EVENT.json') and os.path.exists('IMinput.json'):
- os.system(f"{pythonEXE} {computeIM} --filenameAIM IMinput.json --filenameEVENT EVENT.json --filenameIM IM.json --geoMeanVar")
+ if os.path.exists('EVENT.json') and os.path.exists('IMinput.json'): # noqa: PTH110
+ os.system( # noqa: S605
+ f'{pythonEXE} {computeIM} --filenameAIM IMinput.json --filenameEVENT EVENT.json --filenameIM IM.json --geoMeanVar'
+ )
else:
- msg = 'IMinput.json and EVENT.json not found in workdir.{}. Cannot calculate IMs.'.format(sampNum)
+ msg = f'IMinput.json and EVENT.json not found in workdir.{sampNum}. Cannot calculate IMs.'
error_exit(msg)
first_eeuq_found = False
- if os.path.exists( 'IM.csv'):
- #print("IM.csv found")
+ if os.path.exists('IM.csv'): # noqa: PTH110
+ # print("IM.csv found")
tmp1 = pd.read_csv(('IM.csv'), index_col=None)
if tmp1.empty:
- #print("IM.csv in wordir.{} is empty.".format(cur_id))
+ # print("IM.csv in wordir.{} is empty.".format(cur_id))
return
- IMnames = list(map(str, tmp1))
- IMvals = tmp1.to_numpy()
+ IMnames = list(map(str, tmp1)) # noqa: N806
+ IMvals = tmp1.to_numpy() # noqa: N806
nrv2 = len(IMnames)
for i in range(nrv2):
name = IMnames[i]
- samples = np.atleast_2d(IMvals[:,i])
+ samples = np.atleast_2d(IMvals[:, i])
ns = len(samples)
try:
id_map = rv_name_sur.index(name)
except ValueError:
- msg = 'Error importing input data: variable "{}" not identified.'.format(name)
+ msg = f'Error importing input data: variable "{name}" not identified.'
error_exit(msg)
if not first_eeuq_found:
@@ -405,15 +443,18 @@ def get_stochastic_variance(X, Y, x, ny):
if ns != nsamp:
msg = 'Error importing input data: sample size in params.in is not consistent.'
error_exit(msg)
- # todo: fix for different nys m
-
-
- if len(id_vec+id_vec2) != nrv_sur:
- missing_ids = set([i for i in range(len(rv_name_sur))]) - set(id_vec + id_vec2)
- s = [str(rv_name_sur[id]) for id in missing_ids]
-
- if first_eeuq_found and all([missingEDP.endswith("-2") for missingEDP in s]):
- msg = "ground motion dimension does not match with that of the training"
+ # TODO: fix for different nys m # noqa: TD002
+
+ if len(id_vec + id_vec2) != nrv_sur:
+ missing_ids = set([i for i in range(len(rv_name_sur))]) - set( # noqa: C403, C416
+ id_vec + id_vec2
+ )
+ s = [str(rv_name_sur[id]) for id in missing_ids] # noqa: A001
+
+ if first_eeuq_found and all(
+ [missingEDP.endswith('-2') for missingEDP in s] # noqa: C419
+ ):
+ msg = 'ground motion dimension does not match with that of the training'
# for i in range(len(s)):
# name = s[i]
# samples = np.zeros((1,nsamp))
@@ -429,7 +470,7 @@ def get_stochastic_variance(X, Y, x, ny):
if first_eeuq_found:
if first_rv_found:
rv_tmp = np.hstack([rv_tmp, rv_tmp2])
- id_vec = id_vec + id_vec2
+ id_vec = id_vec + id_vec2 # noqa: PLR6104
else:
rv_tmp = np.hstack([rv_tmp2])
id_vec = id_vec2
@@ -437,21 +478,22 @@ def get_stochastic_variance(X, Y, x, ny):
nrv = len(id_vec)
if nrv != nrv_sur:
# missing_ids = set([i for i in range(len(rv_name_sur))]) - set(id_vec)
- missing_ids = set([i for i in range(len(rv_name_sur))]).difference(set(id_vec))
- #print(missing_ids)
- s = [str(rv_name_sur[id]) for id in missing_ids]
+ missing_ids = set([i for i in range(len(rv_name_sur))]).difference( # noqa: C403, C416
+ set(id_vec)
+ )
+ # print(missing_ids)
+ s = [str(rv_name_sur[id]) for id in missing_ids] # noqa: A001
msg = 'Error in Surrogate prediction: Number of dimension inconsistent: Please define '
- msg += ", ".join(s)
- msg += " at RV tab"
+ msg += ', '.join(s)
+ msg += ' at RV tab'
error_exit(msg)
- #if os.path.getsize('../surrogateLog.log') == 0:
+ # if os.path.getsize('../surrogateLog.log') == 0:
# file_object.write("numRV "+ str(nrv+len(rv_name_dummy)) +"\n")
- rv_val = np.zeros((nsamp,nrv))
+ rv_val = np.zeros((nsamp, nrv))
for i in range(nrv):
- rv_val[:,id_vec[i]] = rv_tmp[:,i]
-
+ rv_val[:, id_vec[i]] = rv_tmp[:, i]
if kernel == 'Radial Basis':
kr = GPy.kern.RBF(input_dim=nrv_sur, ARD=True)
@@ -463,51 +505,73 @@ def get_stochastic_variance(X, Y, x, ny):
kr = GPy.kern.Matern52(input_dim=nrv_sur, ARD=True)
if sur['doLinear']:
- kr = kr + GPy.kern.Linear(input_dim=nrv_sur, ARD=True)
+ kr = kr + GPy.kern.Linear(input_dim=nrv_sur, ARD=True) # noqa: PLR6104
if did_logtransform:
- Y = np.log(Y)
+ Y = np.log(Y) # noqa: N806
kg = kr
- m_list = list()
+ m_list = list() # noqa: C408
nugget_var_list = [0] * ng_sur
if not did_mf:
-
for ny in range(ng_sur):
-
if did_stochastic[ny]:
-
- m_list = m_list + [GPy.models.GPRegression(X, Y[:, ny][np.newaxis].transpose(), kernel=kg.copy(),
- normalizer=did_normalization)]
- X_unique, Y_mean, norm_var_str, counts, nugget_var_pred, Y_normFact = get_stochastic_variance(X,
- Y[:, ny][
- np.newaxis].T,
- rv_val,
- ny)
- Y_metadata = {'variance_structure': norm_var_str / counts}
+ m_list = m_list + [ # noqa: PLR6104, RUF005
+ GPy.models.GPRegression(
+ X,
+ Y[:, ny][np.newaxis].transpose(),
+ kernel=kg.copy(),
+ normalizer=did_normalization,
+ )
+ ]
+ (
+ X_unique, # noqa: N806
+ Y_mean, # noqa: N806
+ norm_var_str,
+ counts,
+ nugget_var_pred,
+ Y_normFact, # noqa: N806
+ ) = get_stochastic_variance(X, Y[:, ny][np.newaxis].T, rv_val, ny)
+ Y_metadata = {'variance_structure': norm_var_str / counts} # noqa: N806
m_list[ny].set_XY2(X_unique, Y_mean, Y_metadata=Y_metadata)
- for key, val in sur["modelInfo"][g_name_sur[ny]].items():
- exec('m_list[ny].' + key + '= np.array(val)')
+ for key, val in sur['modelInfo'][g_name_sur[ny]].items(): # noqa: B007, PERF102
+ exec('m_list[ny].' + key + '= np.array(val)') # noqa: S102
- nugget_var_list[ny] = m_list[ny].Gaussian_noise.parameters * nugget_var_pred * Y_normFact
+ nugget_var_list[ny] = (
+ m_list[ny].Gaussian_noise.parameters
+ * nugget_var_pred
+ * Y_normFact
+ )
else:
- m_list = m_list + [
- GPy.models.GPRegression(X, Y[:, ny][np.newaxis].transpose(), kernel=kg.copy(), normalizer=True)]
- for key, val in sur["modelInfo"][g_name_sur[ny]].items():
- exec('m_list[ny].' + key + '= np.array(val)')
-
- Y_normFact = np.var(Y[:, ny])
- nugget_var_list[ny] = np.squeeze(np.array(m_list[ny].Gaussian_noise.parameters) * np.array(Y_normFact))
+ m_list = m_list + [ # noqa: PLR6104, RUF005
+ GPy.models.GPRegression(
+ X,
+ Y[:, ny][np.newaxis].transpose(),
+ kernel=kg.copy(),
+ normalizer=True,
+ )
+ ]
+ for key, val in sur['modelInfo'][g_name_sur[ny]].items(): # noqa: B007, PERF102
+ exec('m_list[ny].' + key + '= np.array(val)') # noqa: S102
+
+ Y_normFact = np.var(Y[:, ny]) # noqa: N806
+ nugget_var_list[ny] = np.squeeze(
+ np.array(m_list[ny].Gaussian_noise.parameters)
+ * np.array(Y_normFact)
+ )
else:
- with open(surrogate_dir, "rb") as file:
- m_list = pickle.load(file)
+ with open(surrogate_dir, 'rb') as file: # noqa: PTH123
+ m_list = pickle.load(file) # noqa: S301
for ny in range(ng_sur):
- Y_normFact = np.var(Y[:, ny])
- nugget_var_list[ny] = m_list[ny].gpy_model["mixed_noise.Gaussian_noise.variance"] * Y_normFact
+ Y_normFact = np.var(Y[:, ny]) # noqa: N806
+ nugget_var_list[ny] = (
+ m_list[ny].gpy_model['mixed_noise.Gaussian_noise.variance']
+ * Y_normFact
+ )
# if did_stochastic:
#
@@ -545,19 +609,16 @@ def get_stochastic_variance(X, Y, x, ny):
# Y_normFact = np.var(Y[:, ny])
# nugget_var_list[ny] = m_list[ny].gpy_model["mixed_noise.Gaussian_noise.variance"]* Y_normFact
-
# to read:::
# kern_name='Mat52'
- #did_logtransform=True
+ # did_logtransform=True
# at ui
-
# f = open(work_dir + '/templatedir/dakota.json')
# inp = json.load(f)
# f.close()
-
# try:
# f = open(surrogate_dir, 'rb')
# except OSError:
@@ -571,76 +632,110 @@ def get_stochastic_variance(X, Y, x, ny):
# with f:
# m_list = pickle.load(f)
-
-
# read param in file and sort input
y_dim = len(m_list)
y_pred_median = np.zeros([nsamp, y_dim])
- y_pred_var_tmp=np.zeros([nsamp, y_dim]) # might be log space
- y_pred_var_m_tmp=np.zeros([nsamp, y_dim]) # might be log space
+ y_pred_var_tmp = np.zeros([nsamp, y_dim]) # might be log space
+ y_pred_var_m_tmp = np.zeros([nsamp, y_dim]) # might be log space
- y_pred_var=np.zeros([nsamp, y_dim])
- y_pred_var_m=np.zeros([nsamp, y_dim])
+ y_pred_var = np.zeros([nsamp, y_dim])
+ y_pred_var_m = np.zeros([nsamp, y_dim])
- y_data_var=np.zeros([nsamp, y_dim])
+ y_data_var = np.zeros([nsamp, y_dim])
y_samp = np.zeros([nsamp, y_dim])
y_q1 = np.zeros([nsamp, y_dim])
y_q3 = np.zeros([nsamp, y_dim])
y_q1m = np.zeros([nsamp, y_dim])
y_q3m = np.zeros([nsamp, y_dim])
for ny in range(y_dim):
- y_data_var[:,ny] = np.var(m_list[ny].Y)
+ y_data_var[:, ny] = np.var(m_list[ny].Y)
if ny in constIdx:
- y_pred_median_tmp, y_pred_var_tmp[ny], y_pred_var_m_tmp[ny] = np.ones([nsamp])*constVal[constIdx.index(ny)], np.zeros([nsamp]), np.zeros([nsamp])
+ y_pred_median_tmp, y_pred_var_tmp[ny], y_pred_var_m_tmp[ny] = (
+ np.ones([nsamp]) * constVal[constIdx.index(ny)],
+ np.zeros([nsamp]),
+ np.zeros([nsamp]),
+ )
else:
- y_pred_median_tmp, y_pred_var_tmp_tmp = predict(m_list[ny], rv_val, did_mf) ## noiseless
+ y_pred_median_tmp, y_pred_var_tmp_tmp = predict(
+ m_list[ny], rv_val, did_mf
+ ) # noiseless
y_pred_median_tmp = np.squeeze(y_pred_median_tmp)
y_pred_var_tmp_tmp = np.squeeze(y_pred_var_tmp_tmp)
y_pred_var_tmp[:, ny] = y_pred_var_tmp_tmp
- y_pred_var_m_tmp[:, ny] = y_pred_var_tmp_tmp + np.squeeze(nugget_var_list[ny])
- y_samp_tmp = np.random.normal(y_pred_median_tmp, np.sqrt(y_pred_var_m_tmp[:, ny]))
+ y_pred_var_m_tmp[:, ny] = y_pred_var_tmp_tmp + np.squeeze(
+ nugget_var_list[ny]
+ )
+ y_samp_tmp = np.random.normal(
+ y_pred_median_tmp, np.sqrt(y_pred_var_m_tmp[:, ny])
+ )
if did_logtransform:
- y_pred_median[:,ny]= np.exp(y_pred_median_tmp)
- y_pred_var[:,ny] = np.exp(2 * y_pred_median_tmp + y_pred_var_tmp[:, ny] ) * (np.exp(y_pred_var_tmp[:, ny]) - 1)
- y_pred_var_m[:,ny] = np.exp(2 * y_pred_median_tmp + y_pred_var_m_tmp[:, ny] ) * (np.exp(y_pred_var_m_tmp[:, ny]) - 1)
-
- y_samp[:,ny] = np.exp(y_samp_tmp)
-
- y_q1[:,ny] = lognorm.ppf(0.05, s=np.sqrt(y_pred_var_tmp[:, ny] ), scale=np.exp(y_pred_median_tmp))
- y_q3[:,ny]= lognorm.ppf(0.95, s=np.sqrt(y_pred_var_tmp[:, ny] ), scale=np.exp(y_pred_median_tmp))
- y_q1m[:,ny] = lognorm.ppf(0.05, s=np.sqrt(y_pred_var_m_tmp[:, ny] ), scale=np.exp(y_pred_median_tmp))
- y_q3m[:,ny]= lognorm.ppf(0.95, s=np.sqrt(y_pred_var_m_tmp[:, ny] ), scale=np.exp(y_pred_median_tmp))
+ y_pred_median[:, ny] = np.exp(y_pred_median_tmp)
+ y_pred_var[:, ny] = np.exp(
+ 2 * y_pred_median_tmp + y_pred_var_tmp[:, ny]
+ ) * (np.exp(y_pred_var_tmp[:, ny]) - 1)
+ y_pred_var_m[:, ny] = np.exp(
+ 2 * y_pred_median_tmp + y_pred_var_m_tmp[:, ny]
+ ) * (np.exp(y_pred_var_m_tmp[:, ny]) - 1)
+
+ y_samp[:, ny] = np.exp(y_samp_tmp)
+
+ y_q1[:, ny] = lognorm.ppf(
+ 0.05,
+ s=np.sqrt(y_pred_var_tmp[:, ny]),
+ scale=np.exp(y_pred_median_tmp),
+ )
+ y_q3[:, ny] = lognorm.ppf(
+ 0.95,
+ s=np.sqrt(y_pred_var_tmp[:, ny]),
+ scale=np.exp(y_pred_median_tmp),
+ )
+ y_q1m[:, ny] = lognorm.ppf(
+ 0.05,
+ s=np.sqrt(y_pred_var_m_tmp[:, ny]),
+ scale=np.exp(y_pred_median_tmp),
+ )
+ y_q3m[:, ny] = lognorm.ppf(
+ 0.95,
+ s=np.sqrt(y_pred_var_m_tmp[:, ny]),
+ scale=np.exp(y_pred_median_tmp),
+ )
else:
- y_pred_median[:,ny]=y_pred_median_tmp
- y_pred_var[:,ny]= y_pred_var_tmp[:, ny]
- y_pred_var_m[:,ny]= y_pred_var_m_tmp[:, ny]
- y_samp[:,ny] = y_samp_tmp
- y_q1[:,ny] = norm.ppf(0.05, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_tmp[:, ny] ))
- y_q3[:,ny] = norm.ppf(0.95, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_tmp[:, ny] ))
- y_q1m[:,ny] = norm.ppf(0.05, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_m_tmp[:, ny] ))
- y_q3m[:,ny] = norm.ppf(0.95, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_m_tmp[:, ny] ))
-
- if np.isnan(y_samp[:,ny]).any():
- y_samp[:,ny] = np.nan_to_num(y_samp[:,ny])
- if np.isnan(y_pred_var[:,ny]).any():
- y_pred_var[:,ny] = np.nan_to_num(y_pred_var[:,ny])
- if np.isnan(y_pred_var_m[:,ny]).any():
- y_pred_m_var[:,ny] = np.nan_to_num(y_pred_m_var[:,ny])
-
-
-
- #for parname in m_list[ny].parameter_names():
+ y_pred_median[:, ny] = y_pred_median_tmp
+ y_pred_var[:, ny] = y_pred_var_tmp[:, ny]
+ y_pred_var_m[:, ny] = y_pred_var_m_tmp[:, ny]
+ y_samp[:, ny] = y_samp_tmp
+ y_q1[:, ny] = norm.ppf(
+ 0.05, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_tmp[:, ny])
+ )
+ y_q3[:, ny] = norm.ppf(
+ 0.95, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_tmp[:, ny])
+ )
+ y_q1m[:, ny] = norm.ppf(
+ 0.05, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_m_tmp[:, ny])
+ )
+ y_q3m[:, ny] = norm.ppf(
+ 0.95, loc=y_pred_median_tmp, scale=np.sqrt(y_pred_var_m_tmp[:, ny])
+ )
+
+ if np.isnan(y_samp[:, ny]).any():
+ y_samp[:, ny] = np.nan_to_num(y_samp[:, ny])
+ if np.isnan(y_pred_var[:, ny]).any():
+ y_pred_var[:, ny] = np.nan_to_num(y_pred_var[:, ny])
+ if np.isnan(y_pred_var_m[:, ny]).any():
+ y_pred_m_var[:, ny] = np.nan_to_num(y_pred_m_var[:, ny]) # noqa: F821
+
+ # for parname in m_list[ny].parameter_names():
# if (kern_name in parname) and parname.endswith('variance'):
# exec('y_pred_prior_var[ny]=m_list[ny].' + parname)
- #error_ratio1 = y_pred_var.T / y_pred_prior_var
+ # error_ratio1 = y_pred_var.T / y_pred_prior_var
error_ratio2 = y_pred_var_m_tmp / y_data_var
- idx = np.argmax(error_ratio2,axis=1) + 1
+ idx = np.argmax(error_ratio2, axis=1) + 1
- '''
+ """
if np.max(error_ratio1) > norm_var_thr:
is_accurate = False
@@ -648,89 +743,103 @@ def get_stochastic_variance(X, Y, x, ny):
msg = 'Prediction error of output {} is {:.2f}%, which is greater than threshold={:.2f}% '.format(idx, np.max(
error_ratio1)*100, norm_var_thr*100)
- '''
+ """
- is_accurate_array = (np.max(error_ratio2,axis=1) < norm_var_thr)
+ is_accurate_array = np.max(error_ratio2, axis=1) < norm_var_thr
y_pred_subset = np.zeros([nsamp, len(g_idx)])
msg1 = []
- for ns in range(nsamp):
-
- msg0 = folderName.split(".")[0] + "." + str(int(sampNum)+ns) + " : "
+ for ns in range(nsamp): # noqa: PLR1702
+ msg0 = folderName.split('.')[0] + '.' + str(int(sampNum) + ns) + ' : '
if not is_accurate_array[ns]:
- msg1 += ['Prediction error level of output {} is {:.2f}%, which is greater than threshold={:.2f}% '.format(idx[ns], np.max(
- error_ratio2[ns]) * 100, norm_var_thr * 100)]
+ msg1 += [
+ f'Prediction error level of output {idx[ns]} is {np.max(error_ratio2[ns]) * 100:.2f}%, which is greater than threshold={norm_var_thr * 100:.2f}% '
+ ]
else:
msg1 += ['']
if not is_accurate_array[ns]:
-
if when_inaccurate == 'doSimulation':
-
#
# (1) create "workdir.idx " folder :need C++17 to use the files system namespace
#
- templatedirFolder = os.path.join(os.getcwd(), 'templatedir_SIM')
-
- if isEEUQ and nsamp==1: # because stochastic ground motion generation uses folder number when generating random seed.............
- current_dir_i = os.path.join(os.getcwd(), 'subworkdir.{}'.format(sampNum))
+ templatedirFolder = os.path.join(os.getcwd(), 'templatedir_SIM') # noqa: PTH109, PTH118, N806
+
+ if (
+ isEEUQ and nsamp == 1
+ ): # because stochastic ground motion generation uses folder number when generating random seed.............
+ current_dir_i = os.path.join( # noqa: PTH118
+ os.getcwd(), # noqa: PTH109
+ f'subworkdir.{sampNum}',
+ )
else:
- current_dir_i = os.path.join(os.getcwd(), 'subworkdir.{}'.format(1 + ns))
+ current_dir_i = os.path.join(os.getcwd(), f'subworkdir.{1 + ns}') # noqa: PTH109, PTH118
try:
shutil.copytree(templatedirFolder, current_dir_i)
- except Exception as ex:
+ except Exception: # noqa: BLE001
try:
shutil.copytree(templatedirFolder, current_dir_i)
- except Exception as ex:
- msg = "Error running FEM: " + str(ex)
+ except Exception as ex: # noqa: BLE001
+ msg = 'Error running FEM: ' + str(ex)
# change directory, create params.in
if isEEUQ:
- shutil.copyfile(os.path.join(os.getcwd(), 'params.in'), os.path.join(current_dir_i, 'params.in'))
- shutil.copyfile(os.path.join(os.getcwd(), 'EVENT.json.sc'), os.path.join(current_dir_i, 'EVENT.json.sc'))
+ shutil.copyfile(
+ os.path.join(os.getcwd(), 'params.in'), # noqa: PTH109, PTH118
+ os.path.join(current_dir_i, 'params.in'), # noqa: PTH118
+ )
+ shutil.copyfile(
+ os.path.join(os.getcwd(), 'EVENT.json.sc'), # noqa: PTH109, PTH118
+ os.path.join(current_dir_i, 'EVENT.json.sc'), # noqa: PTH118
+ )
#
# Replace parts of AIM
#
- with open(os.path.join(current_dir_i, 'AIM.json.sc'),'r') as f:
+ with open(os.path.join(current_dir_i, 'AIM.json.sc')) as f: # noqa: PLW1514, PTH118, PTH123
try:
- AIMsc = json.load(f)
+ AIMsc = json.load(f) # noqa: N806
except ValueError:
msg = 'invalid AIM in template. Simulation of original model cannot be perfomred'
error_exit(msg)
- AIMsc["Events"] = inp_tmp["Events"]
- AIMsc["Applications"]["Events"] = inp_tmp["Applications"]["Events"]
- with open(os.path.join(current_dir_i, 'AIM.json.sc'), 'w') as f:
- json.dump(AIMsc,f, indent=2)
+ AIMsc['Events'] = inp_tmp['Events']
+ AIMsc['Applications']['Events'] = inp_tmp['Applications'][
+ 'Events'
+ ]
+ with open(os.path.join(current_dir_i, 'AIM.json.sc'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
+ json.dump(AIMsc, f, indent=2)
#
# Copy PEER RECORDS
#
for fname in os.listdir(current_dir_i):
- if fname.startswith("PEER-Record-"):
- os.remove(os.path.join(current_dir_i, fname))
- if fname.startswith("RSN") and fname.endswith("AT2"):
- os.remove(os.path.join(current_dir_i, fname))
+ if fname.startswith('PEER-Record-'):
+ os.remove(os.path.join(current_dir_i, fname)) # noqa: PTH107, PTH118
+ if fname.startswith('RSN') and fname.endswith('AT2'):
+ os.remove(os.path.join(current_dir_i, fname)) # noqa: PTH107, PTH118
+
+ for fname in os.listdir(os.getcwd()): # noqa: PTH109
+ if fname.startswith('PEER-Record-'):
+ shutil.copyfile(
+ os.path.join(os.getcwd(), fname), # noqa: PTH109, PTH118
+ os.path.join(current_dir_i, fname), # noqa: PTH118
+ )
- for fname in os.listdir(os.getcwd()):
- if fname.startswith("PEER-Record-"):
- shutil.copyfile(os.path.join(os.getcwd(), fname), os.path.join(current_dir_i, fname))
-
- #
- # Replace parts of drive
- #
+ #
+ # Replace parts of drive
+ #
- if os_type.startswith("win"):
+ if os_type.startswith('win'):
driver_name = 'driver.bat'
else:
driver_name = 'driver'
- with open(os.path.join(os.getcwd(), driver_name), 'r') as f:
+ with open(os.path.join(os.getcwd(), driver_name)) as f: # noqa: PLW1514, PTH109, PTH118, PTH123
event_driver = f.readline()
- with open(os.path.join(current_dir_i, driver_name), 'r+') as f:
+ with open(os.path.join(current_dir_i, driver_name), 'r+') as f: # noqa: PLW1514, PTH118, PTH123
# Read the original contents of the file
contents = f.readlines()
# Modify the first line
@@ -742,10 +851,10 @@ def get_stochastic_variance(X, Y, x, ny):
f.writelines(contents)
else:
- outF = open(current_dir_i + "/params.in", "w")
- outF.write("{}\n".format(nrv))
+ outF = open(current_dir_i + '/params.in', 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ outF.write(f'{nrv}\n')
for i in range(nrv):
- outF.write("{} {}\n".format(rv_name_sur[i], rv_val[ns, i]))
+ outF.write(f'{rv_name_sur[i]} {rv_val[ns, i]}\n')
outF.close()
os.chdir(current_dir_i)
@@ -753,63 +862,75 @@ def get_stochastic_variance(X, Y, x, ny):
# run workflowDriver
if isEEUQ:
- if os_type.lower().startswith('win') and run_type.lower() == 'runninglocal':
- workflowDriver = "sc_driver.bat"
+ if (
+ os_type.lower().startswith('win')
+ and run_type.lower() == 'runninglocal'
+ ):
+ workflowDriver = 'sc_driver.bat' # noqa: N806
else:
- workflowDriver = "sc_driver"
+ workflowDriver = 'sc_driver' # noqa: N806
+ elif (
+ os_type.lower().startswith('win')
+ and run_type.lower() == 'runninglocal'
+ ):
+ workflowDriver = 'driver.bat' # noqa: N806
else:
- if os_type.lower().startswith('win') and run_type.lower() == 'runninglocal':
- workflowDriver = "driver.bat"
- else:
- workflowDriver = "driver"
+ workflowDriver = 'driver' # noqa: N806
- workflow_run_command = '{}/{}'.format(current_dir_i, workflowDriver)
- subprocess.Popen(workflow_run_command, shell=True).wait()
+ workflow_run_command = f'{current_dir_i}/{workflowDriver}'
+ subprocess.Popen(workflow_run_command, shell=True).wait() # noqa: S602
# back to directory, copy result.out
- #shutil.copyfile(os.path.join(sim_dir, 'results.out'), os.path.join(os.getcwd(), 'results.out'))
+ # shutil.copyfile(os.path.join(sim_dir, 'results.out'), os.path.join(os.getcwd(), 'results.out'))
- with open('results.out', 'r') as f:
+ with open('results.out') as f: # noqa: PLW1514, PTH123
y_pred = np.array([np.loadtxt(f)]).flatten()
- y_pred_subset[ns,:] = y_pred[g_idx]
+ y_pred_subset[ns, :] = y_pred[g_idx]
- os.chdir("../")
+ os.chdir('../')
- msg2 = msg0+msg1[ns]+'- RUN original model\n'
+ msg2 = msg0 + msg1[ns] + '- RUN original model\n'
error_warning(msg2)
- #exit(-1)
-
+ # exit(-1)
+
elif when_inaccurate == 'giveError':
- msg2 = msg0+msg1[ns]+'- EXIT\n'
+ msg2 = msg0 + msg1[ns] + '- EXIT\n'
error_exit(msg2)
elif when_inaccurate == 'continue':
- msg2 = msg0+msg1[ns]+'- CONTINUE [Warning: results may not be accurate]\n'
+ msg2 = (
+ msg0
+ + msg1[ns]
+ + '- CONTINUE [Warning: results may not be accurate]\n'
+ )
error_warning(msg2)
- if prediction_option.lower().startswith("median"):
- y_pred_subset[ns,:] = y_pred_median[ns,g_idx]
- elif prediction_option.lower().startswith("rand"):
- y_pred_subset[ns,:] = y_samp[ns,g_idx]
+ if prediction_option.lower().startswith('median'):
+ y_pred_subset[ns, :] = y_pred_median[ns, g_idx]
+ elif prediction_option.lower().startswith('rand'):
+ y_pred_subset[ns, :] = y_samp[ns, g_idx]
else:
- msg3 = msg0+'Prediction error level of output {} is {:.2f}%\n'.format(idx[ns], np.max(error_ratio2[ns])*100)
+ msg3 = (
+ msg0
+ + f'Prediction error level of output {idx[ns]} is {np.max(error_ratio2[ns]) * 100:.2f}%\n'
+ )
error_warning(msg3)
- if prediction_option.lower().startswith("median"):
- y_pred_subset[ns,:] = y_pred_median[ns,g_idx]
- elif prediction_option.lower().startswith("rand"):
- y_pred_subset[ns,:] = y_samp[ns,g_idx]
+ if prediction_option.lower().startswith('median'):
+ y_pred_subset[ns, :] = y_pred_median[ns, g_idx]
+ elif prediction_option.lower().startswith('rand'):
+ y_pred_subset[ns, :] = y_samp[ns, g_idx]
np.savetxt(result_file, y_pred_subset, fmt='%.5e')
- y_pred_median_subset=y_pred_median[:,g_idx]
- y_q1_subset=y_q1[:,g_idx]
- y_q3_subset=y_q3[:,g_idx]
- y_q1m_subset=y_q1m[:,g_idx]
- y_q3m_subset=y_q3m[:,g_idx]
- y_pred_var_subset=y_pred_var[:,g_idx]
- y_pred_var_m_subset=y_pred_var_m[:,g_idx]
+ y_pred_median_subset = y_pred_median[:, g_idx]
+ y_q1_subset = y_q1[:, g_idx]
+ y_q3_subset = y_q3[:, g_idx]
+ y_q1m_subset = y_q1m[:, g_idx]
+ y_q3m_subset = y_q3m[:, g_idx]
+ y_pred_var_subset = y_pred_var[:, g_idx]
+ y_pred_var_m_subset = y_pred_var_m[:, g_idx]
#
# tab file
@@ -819,66 +940,103 @@ def get_stochastic_variance(X, Y, x, ny):
# Add dummy RVs
#
if first_dummy_found:
- rv_name_sur = rv_name_sur + rv_name_dummy
- rv_val = np.hstack([rv_val, rv_val_dummy ])
-
- g_name_subset = [g_name_sur[i] for i in g_idx]
+ rv_name_sur = rv_name_sur + rv_name_dummy # noqa: PLR6104
+ rv_val = np.hstack([rv_val, rv_val_dummy])
+ g_name_subset = [g_name_sur[i] for i in g_idx]
- if int(sampNum)==1:
- with open('../surrogateTabHeader.out', 'w') as header_file:
+ if int(sampNum) == 1:
+ with open('../surrogateTabHeader.out', 'w') as header_file: # noqa: FURB103, PLW1514, PTH123
# write header
# if os.path.getsize('../surrogateTab.out') == 0:
- header_file.write("%eval_id interface " + " ".join(rv_name_sur) + " " + " ".join(
- g_name_subset) + " " + ".median ".join(g_name_subset) + ".median " + ".q5 ".join(
- g_name_subset) + ".q5 " + ".q95 ".join(g_name_subset) + ".q95 " + ".var ".join(
- g_name_subset) + ".var " + ".q5_w_mnoise ".join(
- g_name_subset) + ".q5_w_mnoise " + ".q95_w_mnoise ".join(
- g_name_subset) + ".q95_w_mnoise " + ".var_w_mnoise ".join(g_name_subset) + ".var_w_mnoise \n")
+ header_file.write(
+ '%eval_id interface '
+ + ' '.join(rv_name_sur)
+ + ' '
+ + ' '.join(g_name_subset)
+ + ' '
+ + '.median '.join(g_name_subset)
+ + '.median '
+ + '.q5 '.join(g_name_subset)
+ + '.q5 '
+ + '.q95 '.join(g_name_subset)
+ + '.q95 '
+ + '.var '.join(g_name_subset)
+ + '.var '
+ + '.q5_w_mnoise '.join(g_name_subset)
+ + '.q5_w_mnoise '
+ + '.q95_w_mnoise '.join(g_name_subset)
+ + '.q95_w_mnoise '
+ + '.var_w_mnoise '.join(g_name_subset)
+ + '.var_w_mnoise \n'
+ )
# write values
-
- with open('../surrogateTab.out', 'a') as tab_file:
+ with open('../surrogateTab.out', 'a') as tab_file: # noqa: PLW1514, PTH123
# write header
- #if os.path.getsize('../surrogateTab.out') == 0:
+ # if os.path.getsize('../surrogateTab.out') == 0:
# tab_file.write("%eval_id interface "+ " ".join(rv_name_sur) + " "+ " ".join(g_name_subset) + " " + ".median ".join(g_name_subset) + ".median "+ ".q5 ".join(g_name_subset) + ".q5 "+ ".q95 ".join(g_name_subset) + ".q95 " +".var ".join(g_name_subset) + ".var " + ".q5_w_mnoise ".join(g_name_subset) + ".q5_w_mnoise "+ ".q95_w_mnoise ".join(g_name_subset) + ".q95_w_mnoise " +".var_w_mnoise ".join(g_name_subset) + ".var_w_mnoise \n")
# write values
for ns in range(nsamp):
- rv_list = " ".join("{:e}".format(rv) for rv in rv_val[ns,:])
- ypred_list = " ".join("{:e}".format(yp) for yp in y_pred_subset[ns,:])
- ymedian_list = " ".join("{:e}".format(ym) for ym in y_pred_median_subset[ns,:])
- yQ1_list = " ".join("{:e}".format(yq1) for yq1 in y_q1_subset[ns,:])
- yQ3_list = " ".join("{:e}".format(yq3) for yq3 in y_q3_subset[ns,:])
- ypredvar_list=" ".join("{:e}".format(ypv) for ypv in y_pred_var_subset[ns,:])
- yQ1m_list = " ".join("{:e}".format(yq1) for yq1 in y_q1m_subset[ns,:])
- yQ3m_list = " ".join("{:e}".format(yq3) for yq3 in y_q3m_subset[ns,:])
- ypredvarm_list=" ".join("{:e}".format(ypv) for ypv in y_pred_var_m_subset[ns,:])
-
- tab_file.write(str(int(sampNum)+ns)+" NO_ID "+ rv_list + " "+ ypred_list + " " + ymedian_list+ " "+ yQ1_list + " "+ yQ3_list +" "+ ypredvar_list + " "+ yQ1m_list + " "+ yQ3m_list +" "+ ypredvarm_list + " \n")
+ rv_list = ' '.join(f'{rv:e}' for rv in rv_val[ns, :])
+ ypred_list = ' '.join(f'{yp:e}' for yp in y_pred_subset[ns, :])
+ ymedian_list = ' '.join(f'{ym:e}' for ym in y_pred_median_subset[ns, :])
+ yQ1_list = ' '.join(f'{yq1:e}' for yq1 in y_q1_subset[ns, :]) # noqa: N806
+ yQ3_list = ' '.join(f'{yq3:e}' for yq3 in y_q3_subset[ns, :]) # noqa: N806
+ ypredvar_list = ' '.join(f'{ypv:e}' for ypv in y_pred_var_subset[ns, :])
+ yQ1m_list = ' '.join(f'{yq1:e}' for yq1 in y_q1m_subset[ns, :]) # noqa: N806
+ yQ3m_list = ' '.join(f'{yq3:e}' for yq3 in y_q3m_subset[ns, :]) # noqa: N806
+ ypredvarm_list = ' '.join(
+ f'{ypv:e}' for ypv in y_pred_var_m_subset[ns, :]
+ )
+
+ tab_file.write(
+ str(int(sampNum) + ns)
+ + ' NO_ID '
+ + rv_list
+ + ' '
+ + ypred_list
+ + ' '
+ + ymedian_list
+ + ' '
+ + yQ1_list
+ + ' '
+ + yQ3_list
+ + ' '
+ + ypredvar_list
+ + ' '
+ + yQ1m_list
+ + ' '
+ + yQ3m_list
+ + ' '
+ + ypredvarm_list
+ + ' \n'
+ )
error_file.close()
file_object.close()
-def predict(m, X, did_mf):
+def predict(m, X, did_mf): # noqa: N803, D103
if not did_mf:
return m.predict_noiseless(X)
- else:
- #TODO change below to noiseless
- X_list = convert_x_list_to_array([X, X])
- X_list_l = X_list[:X.shape[0]]
- X_list_h = X_list[X.shape[0]:]
+ else: # noqa: RET505
+ # TODO change below to noiseless # noqa: TD002, TD004
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806
+ X_list_l = X_list[: X.shape[0]] # noqa: N806, F841
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
return m.predict(X_list_h)
-if __name__ == "__main__":
- error_file = open('../surrogate.err', "w")
- inputArgs = sys.argv
+if __name__ == '__main__':
+ error_file = open('../surrogate.err', 'w') # noqa: PLW1514, PTH123, SIM115
+ inputArgs = sys.argv # noqa: N816
if not inputArgs[2].endswith('.json'):
msg = 'ERROR: surrogate information file (.json) not set'
- error_file.write(msg); exit(-1)
+ error_file.write(msg)
+ exit(-1) # noqa: PLR1722
# elif not inputArgs[3].endswith('.pkl'):
# msg = 'ERROR: surrogate model file (.pkl) not set'
@@ -888,12 +1046,12 @@ def predict(m, X, did_mf):
# msg = 'ERROR: put right number of argv'
# print(msg); error_file.write(msg); exit(-1)
- '''
+ """
params_dir = 'params.in'
surrogate_dir = 'C:/Users/yisan/Desktop/quoFEMexamples/surrogates/SimGpModel_2_better.pkl'
result_file = 'results_GP.out'
- '''
- '''
+ """
+ """
try:
opts, args = getopt.getopt(argv)
except getopt.GetoptError:
@@ -909,21 +1067,19 @@ def predict(m, X, did_mf):
inputfile = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
- '''
-
-
+ """
params_dir = inputArgs[1]
surrogate_meta_dir = inputArgs[2]
- input_json = inputArgs[3] # scInput.json
-
+ input_json = inputArgs[3] # scInput.json
- if len(inputArgs) > 4:
+ if len(inputArgs) > 4: # noqa: PLR2004
surrogate_dir = inputArgs[4]
else:
- surrogate_dir = "dummy" # not used
-
- result_file = "results.out"
+ surrogate_dir = 'dummy' # not used
- sys.exit(main(params_dir,surrogate_dir,surrogate_meta_dir,result_file, input_json))
+ result_file = 'results.out'
+ sys.exit(
+ main(params_dir, surrogate_dir, surrogate_meta_dir, result_file, input_json)
+ )
diff --git a/modules/performHUA/INCORECensusUtil.py b/modules/performHUA/INCORECensusUtil.py
index bd126e16b..2c8092274 100644
--- a/modules/performHUA/INCORECensusUtil.py
+++ b/modules/performHUA/INCORECensusUtil.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 The Regents of the University of California
#
# This file is part of the SimCenter Backend Applications
@@ -37,107 +36,123 @@
# Dr. Stevan Gavrilovic, UC Berkeley
#
+import argparse
+import importlib
+import json
import os
+import subprocess # noqa: S404
import sys
-import importlib
-import subprocess
-import argparse, posixpath, json
-
+
if __name__ == '__main__':
-
- print('Pulling census data')
-
-
+ print('Pulling census data') # noqa: T201
+
# Get any missing dependencies
- packageInstalled = False
+ packageInstalled = False # noqa: N816
import requests
+
if not hasattr(requests, 'get'):
- print('Installing the requests package')
- subprocess.check_call([sys.executable, "-m", "pip", "install", 'requests'])
- packageInstalled = True
+ print('Installing the requests package') # noqa: T201
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'requests']) # noqa: S603
+ packageInstalled = True # noqa: N816
-
packages = ['geopandas']
for p in packages:
if importlib.util.find_spec(p) is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", p])
- packageInstalled = True
- print('Installing the ' +p+ ' package')
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', p]) # noqa: S603
+ packageInstalled = True # noqa: N816
+ print('Installing the ' + p + ' package') # noqa: T201
- if packageInstalled == True :
- print('New packages were installed. Please restart the process.')
+ if packageInstalled == True: # noqa: E712
+ print('New packages were installed. Please restart the process.') # noqa: T201
sys.exit(0)
parser = argparse.ArgumentParser()
parser.add_argument('--census_config')
args = parser.parse_args()
- with open(args.census_config) as f:
+ with open(args.census_config) as f: # noqa: PLW1514, PTH123
config_info = json.load(f)
# Output directory
output_dir = config_info['OutputDirectory']
-
+
try:
- os.mkdir(f"{output_dir}")
- except:
- print('Output folder already exists.')
-
-
+ os.mkdir(f'{output_dir}') # noqa: PTH102
+ except: # noqa: E722
+ print('Output folder already exists.') # noqa: T201
+
# State counties, e.g., ['01001', '01003']
state_counties = config_info['CountiesArray']
-
# Population demographics vintage, e.g., "2010"
- popDemoVintage = config_info['PopulationDemographicsVintage']
-
+ popDemoVintage = config_info['PopulationDemographicsVintage'] # noqa: N816
+
# Custom census vars
census_vars = config_info['CensusVariablesArray']
-
+
# Custom ACS vars
acs_vars = config_info['ACSVariablesArray']
-
- if popDemoVintage != '2000' and popDemoVintage != '2010' and popDemoVintage != '2020':
- print('Only 2000, 2010, and 2020 decennial census data supported. The provided vintage ',popDemoVintage,' is not supported')
-
+ if (
+ popDemoVintage != '2000' # noqa: PLR1714
+ and popDemoVintage != '2010'
+ and popDemoVintage != '2020'
+ ):
+ print( # noqa: T201
+ 'Only 2000, 2010, and 2020 decennial census data supported. The provided vintage ',
+ popDemoVintage,
+ ' is not supported',
+ )
+
sys.exit(-1)
-
- # Vintage for household demographics
- houseIncomeVintage = config_info['HouseholdIncomeVintage']
- if houseIncomeVintage != '2010' and houseIncomeVintage != '2015' and houseIncomeVintage != '2020':
+ # Vintage for household demographics
+ houseIncomeVintage = config_info['HouseholdIncomeVintage'] # noqa: N816
- print('Only 2010, 2015, and 2020 ACS 5-yr data supported. The provided vintage ',houseIncomeVintage,' is not supported')
+ if (
+ houseIncomeVintage != '2010' # noqa: PLR1714
+ and houseIncomeVintage != '2015'
+ and houseIncomeVintage != '2020'
+ ):
+ print( # noqa: T201
+ 'Only 2010, 2015, and 2020 ACS 5-yr data supported. The provided vintage ',
+ houseIncomeVintage,
+ ' is not supported',
+ )
sys.exit(-1)
from pyincore_data.censusutil import CensusUtil
# Get the population demographics at the block level
- CensusUtil.get_blockdata_for_demographics(state_counties,
- census_vars,
- popDemoVintage,out_csv=False,
- out_shapefile=True,
- out_geopackage=False,
- out_geojson=False,
- file_name="PopulationDemographicsCensus"+popDemoVintage,
- output_dir=output_dir)
-
- #sys.exit(0)
-
- print('Done pulling census population demographics data')
+ CensusUtil.get_blockdata_for_demographics(
+ state_counties,
+ census_vars,
+ popDemoVintage,
+ out_csv=False,
+ out_shapefile=True,
+ out_geopackage=False,
+ out_geojson=False,
+ file_name='PopulationDemographicsCensus' + popDemoVintage,
+ output_dir=output_dir,
+ )
+
+ # sys.exit(0)
+
+ print('Done pulling census population demographics data') # noqa: T201
# Get the household income at the tract (2010 ACS) or block group level (2015 and 2020 ACS)
- CensusUtil.get_blockgroupdata_for_income(state_counties,
- acs_vars,
- houseIncomeVintage,
- out_csv=False,
- out_shapefile=True,
- out_geopackage=False,
- out_geojson=False,
- file_name="HouseholdIncomeACS"+houseIncomeVintage,
- output_dir=output_dir)
-
- print('Done pulling ACS household income data')
-
+ CensusUtil.get_blockgroupdata_for_income(
+ state_counties,
+ acs_vars,
+ houseIncomeVintage,
+ out_csv=False,
+ out_shapefile=True,
+ out_geopackage=False,
+ out_geojson=False,
+ file_name='HouseholdIncomeACS' + houseIncomeVintage,
+ output_dir=output_dir,
+ )
+
+ print('Done pulling ACS household income data') # noqa: T201
+
sys.exit(0)
diff --git a/modules/performHUA/pyincore_data/censusutil.py b/modules/performHUA/pyincore_data/censusutil.py
index 860f93b7b..b34f8f1c9 100644
--- a/modules/performHUA/pyincore_data/censusutil.py
+++ b/modules/performHUA/pyincore_data/censusutil.py
@@ -1,4 +1,4 @@
-# Based on the IN-CORE censusutil method
+# Based on the IN-CORE censusutil method # noqa: INP001, D100
# Modified by Dr. Stevan Gavrilovic, UC Berkeley, SimCenter
@@ -8,109 +8,123 @@
# terms of the Mozilla Public License v2.0 which accompanies this distribution,
# and is available at https://www.mozilla.org/en-US/MPL/2.0/
-import json
-from math import isnan
-from pathlib import Path
-
-import requests
import os
-import pandas as pd
-import geopandas as gpd
import urllib.request
-import shutil
+from pathlib import Path
+from zipfile import ZipFile
+import geopandas as gpd
+import pandas as pd
+import requests
from pyincore_data import globals
-from zipfile import ZipFile
logger = globals.LOGGER
-class CensusUtil():
- """Utility methods for Census data and API"""
+class CensusUtil:
+ """Utility methods for Census data and API""" # noqa: D400
@staticmethod
- def generate_census_api_url(state:str=None, county:str=None, year:str=None, data_source:str=None, columns:str=None,
- geo_type:str = None, data_name:str = None):
+ def generate_census_api_url(
+ state: str = None, # noqa: RUF013
+ county: str = None, # noqa: RUF013
+ year: str = None, # noqa: RUF013
+ data_source: str = None, # noqa: RUF013
+ columns: str = None, # noqa: RUF013
+ geo_type: str = None, # noqa: RUF013
+ data_name: str = None, # noqa: RUF013
+ ):
"""Create url string to access census data api.
- Args:
- state (str): A string of state FIPS with comma separated format. e.g, '41, 42' or '*'
- county (str): A string of county FIPS with comma separated format. e.g, '017,029,045,091,101' or '*'
- year (str): Census Year.
- data_source (str): Census dataset name. Can be found from https://api.census.gov/data.html
- columns (str): Column names for request data with comma separated format.
- e.g, 'GEO_ID,NAME,P005001,P005003,P005004,P005010'
- geo_type (str): Name of geo area. e.g, 'tract:*' or 'block%20group:*'
- data_name (str): Optional for getting different dataset. e.g, 'component'
-
- Returns:
- string: A string for representing census api url
+ Args:
+ ----
+ state (str): A string of state FIPS with comma separated format. e.g, '41, 42' or '*'
+ county (str): A string of county FIPS with comma separated format. e.g, '017,029,045,091,101' or '*'
+ year (str): Census Year.
+ data_source (str): Census dataset name. Can be found from https://api.census.gov/data.html
+ columns (str): Column names for request data with comma separated format.
+ e.g, 'GEO_ID,NAME,P005001,P005003,P005004,P005010'
+ geo_type (str): Name of geo area. e.g, 'tract:*' or 'block%20group:*'
+ data_name (str): Optional for getting different dataset. e.g, 'component'
+
+ Returns
+ -------
+ string: A string for representing census api url
"""
# check if the state is not none
if state is None:
- error_msg = "State value must be provided."
+ error_msg = 'State value must be provided.'
logger.error(error_msg)
- raise Exception(error_msg)
+ raise Exception(error_msg) # noqa: DOC501, TRY002
if geo_type is not None:
if county is None:
- error_msg = "State and county value must be provided when geo_type is provided."
+ error_msg = 'State and county value must be provided when geo_type is provided.'
logger.error(error_msg)
- raise Exception(error_msg)
+ raise Exception(error_msg) # noqa: DOC501, TRY002
# Set up url for Census API
base_url = f'https://api.census.gov/data/{year}/{data_source}'
if data_name is not None:
- base_url = f'https://api.census.gov/data/{year}/{data_source}/{data_name}'
+ base_url = (
+ f'https://api.census.gov/data/{year}/{data_source}/{data_name}'
+ )
data_url = f'{base_url}?get={columns}'
if county is None: # only state is provided. There shouldn't be any geo_type
data_url = f'{data_url}&for=state:{state}'
- else: # county has been provided and there could be geo_type or not
- if geo_type is None:
- data_url = f'{data_url}&in=state:{state}&for=county:{county}'
- else:
- data_url = f'{data_url}&for={geo_type}&in=state:{state}&in=county:{county}'
+ elif geo_type is None:
+ data_url = f'{data_url}&in=state:{state}&for=county:{county}'
+ else:
+ data_url = (
+ f'{data_url}&for={geo_type}&in=state:{state}&in=county:{county}'
+ )
return data_url
-
@staticmethod
def request_census_api(data_url):
"""Request census data to api and gets the output data
- Args:
- data_url (str): url for obtaining the data from census api
- Returns:
- dict, object: A json list and a dataframe for census api result
+ Args:
+ ----
+ data_url (str): url for obtaining the data from census api
+ Returns:
+ dict, object: A json list and a dataframe for census api result
- """
+ """ # noqa: D400
# Obtain Census API JSON Data
- request_json = requests.get(data_url)
+ request_json = requests.get(data_url) # noqa: S113
- if request_json.status_code != 200:
- error_msg = "Failed to download the data from Census API. Please check your parameters."
+ if request_json.status_code != 200: # noqa: PLR2004
+ error_msg = 'Failed to download the data from Census API. Please check your parameters.'
# logger.error(error_msg)
- raise Exception(error_msg)
+ raise Exception(error_msg) # noqa: DOC501, TRY002
# Convert the requested json into pandas dataframe
api_json = request_json.json()
api_df = pd.DataFrame(columns=api_json[0], data=api_json[1:])
- return api_df
-
+ return api_df # noqa: RET504
@staticmethod
- def get_blockdata_for_demographics(state_counties: list, census_vars: list, vintage: str = "2010",
- out_csv: bool = False, out_shapefile: bool = False,
- out_geopackage: bool = False, out_geojson: bool = False,
- file_name: str = "file_name", output_dir: str = "output_dir"):
-
+ def get_blockdata_for_demographics( # noqa: C901
+ state_counties: list,
+ census_vars: list,
+ vintage: str = '2010',
+ out_csv: bool = False, # noqa: FBT001, FBT002
+ out_shapefile: bool = False, # noqa: FBT001, FBT002
+ out_geopackage: bool = False, # noqa: FBT001, FBT002
+ out_geojson: bool = False, # noqa: FBT001, FBT002
+ file_name: str = 'file_name',
+ output_dir: str = 'output_dir',
+ ):
"""Generate population demographics dataset from census
Args:
+ ----
state_counties (list): A List of concatenated State and County FIPS Codes.
see full list https://www.nrcs.usda.gov/wps/portal/nrcs/detail/national/home/?cid=nrcs143_013697
vintage (str): Census Year.
@@ -120,170 +134,168 @@ def get_blockdata_for_demographics(state_counties: list, census_vars: list, vint
out_geojson (bool): Save processed census geodataframe as geojson
file_name (str): Name of the output files.
output_dir (str): Name of directory used to save output files.
-
- """
-
+
+ """ # noqa: D400
# ***********************
# Get the population data
# ***********************
-
+
# dataset_name (str): Census dataset name.
dataset_name = 'dec'
-
-
+
get_pop_vars = 'GEO_ID,NAME'
int_vars = census_vars
-
- if vintage == '2000' or vintage == '2010' :
-
+
+ if vintage == '2000' or vintage == '2010': # noqa: PLR1714
dataset_name += '/sf1'
-
+
# If no variable parameters passed by the user, use the default for 2000 and 2010 vintage
if not census_vars:
get_pop_vars += ',P005001,P005003,P005004,P005010'
-
+
# GEO_ID = Geographic ID
# NAME = Geographic Area Name
# P005001 = Total
# P005003 = Total!!Not Hispanic or Latino!!White alone
# P005004 = Total!!Not Hispanic or Latino!!Black or African American alone
# P005010 = Total!!Hispanic or Latino
-
+
# List variables to convert from dtype object to integer
int_vars = ['P005001', 'P005003', 'P005004', 'P005010']
else:
# Add the variables provided by the user
for var in census_vars:
- get_pop_vars +=','+var
-
- elif vintage == '2020' :
-
+ get_pop_vars += ',' + var
+
+ elif vintage == '2020':
dataset_name += '/pl'
-
+
# Variable parameters
# If no variable parameters passed by the user, use the default for 2000 and 2010 vintage
if not census_vars:
get_pop_vars += ',P2_001N,P2_002N,P2_005N,P2_006N'
-
+
# GEO_ID = Geographic ID
# NAME = Geographic Area Name
# P2_001N=!!Total:
# P2_002N=!!Total:!!Hispanic or Latino
# P2_005N=!!Total:!!Not Hispanic or Latino:!!Population of one race:!!White alone
# P2_006N=!!Total:!!Not Hispanic or Latino:!!Population of one race:!!Black or African American alone
-
+
# List variables to convert from dtype object to integer
int_vars = ['P2_001N', 'P2_002N', 'P2_005N', 'P2_006N']
else:
# Add the variables provided by the user
for var in census_vars:
- get_pop_vars +=','+var
+ get_pop_vars += ',' + var
+
+ else:
+ print('Only 2000, 2010, and 2020 decennial census supported') # noqa: T201
+ return None
- else :
- print('Only 2000, 2010, and 2020 decennial census supported')
- return
-
# Make directory to save output
- if not os.path.exists(output_dir):
- os.mkdir(output_dir)
+ if not os.path.exists(output_dir): # noqa: PTH110
+ os.mkdir(output_dir) # noqa: PTH102
# Make a directory to save downloaded shapefiles
- shapefile_dir = Path(output_dir)/'shapefiletemp'
-
- if not os.path.exists(shapefile_dir):
- os.mkdir(shapefile_dir)
+ shapefile_dir = Path(output_dir) / 'shapefiletemp'
+
+ if not os.path.exists(shapefile_dir): # noqa: PTH110
+ os.mkdir(shapefile_dir) # noqa: PTH102
# Set to hold the states - needed for 2020 census shapefile download
- stateSet = set()
-
+ stateSet = set() # noqa: N806
+
# loop through counties
appended_countydata = [] # start an empty container for the county data
for state_county in state_counties:
# deconcatenate state and county values
state = state_county[0:2]
county = state_county[2:5]
- logger.debug('State: '+state)
- logger.debug('County: '+county)
-
+ logger.debug('State: ' + state) # noqa: G003
+ logger.debug('County: ' + county) # noqa: G003
+
# Add the state to the set
stateSet.add(state)
# Set up hyperlink for Census API
api_hyperlink = CensusUtil.generate_census_api_url(
- state, county, vintage, dataset_name, get_pop_vars, 'block:*')
+ state, county, vintage, dataset_name, get_pop_vars, 'block:*'
+ )
- logger.info("Census API data from: " + api_hyperlink)
+ logger.info('Census API data from: ' + api_hyperlink) # noqa: G003
# Obtain Census API JSON Data
apidf = CensusUtil.request_census_api(api_hyperlink)
# Append county data makes it possible to have multiple counties
appended_countydata.append(apidf)
-
+
# Create dataframe from appended county data
cen_block = pd.concat(appended_countydata, ignore_index=True)
# Add variable named "Survey" that identifies Census survey program and survey year
- cen_block['Survey'] = vintage+' '+dataset_name
+ cen_block['Survey'] = vintage + ' ' + dataset_name
# Set block group FIPS code by concatenating state, county, tract, block fips
- cen_block['blockid'] = (cen_block['state']+cen_block['county'] +
- cen_block['tract']+cen_block['block'])
-
+ cen_block['blockid'] = (
+ cen_block['state']
+ + cen_block['county']
+ + cen_block['tract']
+ + cen_block['block']
+ )
# To avoid problems with how the block group id is read saving it
# as a string will reduce possibility for future errors
- cen_block['blockidstr'] = cen_block['blockid'].apply(lambda x: "BLOCK"+str(x).zfill(15))
+ cen_block['blockidstr'] = cen_block['blockid'].apply(
+ lambda x: 'BLOCK' + str(x).zfill(15)
+ )
# Convert variables from dtype object to integer
for var in int_vars:
cen_block[var] = cen_block[var].astype(int)
- #cen_block[var] = pd.to_numeric(cen_block[var], errors='coerce').convert_dtypes()
- print(var+' converted from object to integer')
-
-
- if (vintage == '2000' or vintage == '2010') and not census_vars:
+ # cen_block[var] = pd.to_numeric(cen_block[var], errors='coerce').convert_dtypes()
+ print(var + ' converted from object to integer') # noqa: T201
+
+ if (vintage == '2000' or vintage == '2010') and not census_vars: # noqa: PLR1714
# Generate new variables
cen_block['pwhitebg'] = cen_block['P005003'] / cen_block['P005001'] * 100
cen_block['pblackbg'] = cen_block['P005004'] / cen_block['P005001'] * 100
cen_block['phispbg'] = cen_block['P005010'] / cen_block['P005001'] * 100
-
+
# GEO_ID = Geographic ID
# NAME = Geographic Area Name
# P005001 = Total
# P005003 = Total!!Not Hispanic or Latino!!White alone
# P005004 = Total!!Not Hispanic or Latino!!Black or African American alone
# P005010 = Total!!Hispanic or Latino
-
+
elif vintage == '2020' and not census_vars:
-
cen_block['pwhitebg'] = cen_block['P2_005N'] / cen_block['P2_001N'] * 100
cen_block['pblackbg'] = cen_block['P2_006N'] / cen_block['P2_001N'] * 100
cen_block['phispbg'] = cen_block['P2_002N'] / cen_block['P2_001N'] * 100
-
+
# GEO_ID = Geographic ID
# NAME = Geographic Area Name
# P2_001N=!!Total:
# P2_002N=!!Total:!!Hispanic or Latino
# P2_005N=!!Total:!!Not Hispanic or Latino:!!Population of one race:!!White alone
# P2_006N=!!Total:!!Not Hispanic or Latino:!!Population of one race:!!Black or African American alone
-
-
+
# *******************************
# Download and extract shapefiles
# *******************************
-
-
+
# Download the shapefile information for the block groups in the select counties.
#
# These files can be found online at:
-
+
# For 2010 Census
# https://www2.census.gov/geo/tiger/TIGER2010/TABBLOCK/2010/
-
+
# For 2020 Census
# https://www2.census.gov/geo/tiger/TIGER2020/TABBLOCK20/
-
+
# Block group shapefiles are downloaded for each of the selected counties from
# the Census TIGER/Line Shapefiles at https://www2.census.gov/geo/tiger.
# Each counties file is downloaded as a zipfile and the contents are extracted.
@@ -293,110 +305,131 @@ def get_blockdata_for_demographics(state_counties: list, census_vars: list, vint
# *EPSG: 4326 uses a coordinate system (Lat, Lon)
# This coordinate system is required for mapping with folium.
-
appended_shp_files = [] # start an empty container for the county shapefiles
-
- merge_id = 'GEOID'+vintage[2:4]
+
+ merge_id = 'GEOID' + vintage[2:4]
# Tigerline provides the blocks for each county, thus each county needs to be downloaded individually
- if vintage == '2000' or vintage == '2010' :
-
- if vintage == '2000' :
+ if vintage == '2000' or vintage == '2010': # noqa: PLR1714
+ if vintage == '2000':
merge_id = 'BLKIDFP00'
-
+
# loop through counties
for state_county in state_counties:
-
# county_fips = state+county
- filename = f'tl_2010_{state_county}_tabblock'+vintage[2:4]
-
+ filename = f'tl_2010_{state_county}_tabblock' + vintage[2:4]
+
# Use wget to download the TIGER Shapefile for a county
# options -quiet = turn off wget output
# add directory prefix to save files to folder named after program name
- shapefile_url = f'https://www2.census.gov/geo/tiger/TIGER2010/TABBLOCK/{vintage}/' + filename + '.zip'
- print(('Downloading Census Block Shapefiles for State_County: '
- + state_county + ' from: '+shapefile_url).format(filename=filename))
-
- zip_file = os.path.join(shapefile_dir, filename + '.zip')
- urllib.request.urlretrieve(shapefile_url, zip_file)
-
+ shapefile_url = (
+ f'https://www2.census.gov/geo/tiger/TIGER2010/TABBLOCK/{vintage}/'
+ + filename
+ + '.zip'
+ )
+ print( # noqa: T201
+ (
+ 'Downloading Census Block Shapefiles for State_County: '
+ + state_county
+ + ' from: '
+ + shapefile_url
+ ).format(filename=filename)
+ )
+
+ zip_file = os.path.join(shapefile_dir, filename + '.zip') # noqa: PTH118
+ urllib.request.urlretrieve(shapefile_url, zip_file) # noqa: S310
+
with ZipFile(zip_file, 'r') as zip_obj:
zip_obj.extractall(path=shapefile_dir)
-
+
# Delete the zip file
- os.remove(zip_file)
-
- if Path(zip_file).is_file() == True :
- print("Error deleting the zip file ",zip_file)
-
- print('filename',f'{filename}.shp')
-
+ os.remove(zip_file) # noqa: PTH107
+
+ if Path(zip_file).is_file() == True: # noqa: E712
+ print('Error deleting the zip file ', zip_file) # noqa: T201
+
+ print('filename', f'{filename}.shp') # noqa: T201
+
# Read shapefile to GeoDataFrame
gdf = gpd.read_file(f'{shapefile_dir}/{filename}.shp')
-
+
# Set projection to EPSG 4326, which is required for folium
gdf = gdf.to_crs(epsg=4326)
-
+
# Append county data
appended_shp_files.append(gdf)
-
-
- elif vintage == '2020' :
-
+
+ elif vintage == '2020':
# loop through the states
for state in stateSet:
-
filename = f'tl_2020_{state}_tabblock20'
-
+
# Check if file is cached
path = Path(f'{shapefile_dir}/{filename}.shp')
# if file does not exist
- if path.is_file() == False :
+ if path.is_file() == False: # noqa: E712
# Use wget to download the TIGER Shapefile for a county
# options -quiet = turn off wget output
# add directory prefix to save files to folder named after program name
- shapefile_url = 'https://www2.census.gov/geo/tiger/TIGER2020/TABBLOCK20/' + filename + '.zip'
-
- print(('Downloading Census Block Shapefiles for State: '
- + state + ' from: '+shapefile_url).format(filename=filename))
-
- zip_file = os.path.join(shapefile_dir, filename + '.zip')
- urllib.request.urlretrieve(shapefile_url, zip_file)
-
+ shapefile_url = (
+ 'https://www2.census.gov/geo/tiger/TIGER2020/TABBLOCK20/'
+ + filename
+ + '.zip'
+ )
+
+ print( # noqa: T201
+ (
+ 'Downloading Census Block Shapefiles for State: '
+ + state
+ + ' from: '
+ + shapefile_url
+ ).format(filename=filename)
+ )
+
+ zip_file = os.path.join(shapefile_dir, filename + '.zip') # noqa: PTH118
+ urllib.request.urlretrieve(shapefile_url, zip_file) # noqa: S310
+
with ZipFile(zip_file, 'r') as zip_obj:
zip_obj.extractall(path=shapefile_dir)
-
+
# Delete the zip file
- os.remove(zip_file)
-
- if Path(zip_file).is_file() == True :
- print("Error deleting the zip file ",zip_file)
-
+ os.remove(zip_file) # noqa: PTH107
+
+ if Path(zip_file).is_file() == True: # noqa: E712
+ print('Error deleting the zip file ', zip_file) # noqa: T201
+
else:
- print(f'Found file {filename}.shp in cache')
-
+ print(f'Found file {filename}.shp in cache') # noqa: T201
+
# Read shapefile to GeoDataFrame
gdf = gpd.read_file(f'{shapefile_dir}/{filename}.shp')
-
+
# Set projection to EPSG 4326, which is required for folium
gdf = gdf.to_crs(epsg=4326)
-
+
# Append county data
appended_shp_files.append(gdf)
-
-
+
# Create dataframe from appended block files
shp_block = pd.concat(appended_shp_files)
-
- print('Merging the census population demographics information to the shapefile')
+
+ print( # noqa: T201
+ 'Merging the census population demographics information to the shapefile'
+ )
# Clean Data - Merge Census demographic data to the appended shapefiles
- cen_shp_block_merged = pd.merge(shp_block, cen_block, left_on=merge_id, right_on='blockid', how='left')
+ cen_shp_block_merged = pd.merge( # noqa: PD015
+ shp_block, cen_block, left_on=merge_id, right_on='blockid', how='left'
+ )
+
+ # Set parameters for file save
+ save_columns = [
+ 'blockid',
+ 'blockidstr',
+ 'Survey',
+ ] # set column names to save
- # Set paramaters for file save
- save_columns = ['blockid', 'blockidstr', 'Survey'] # set column names to save
-
if not census_vars:
save_columns.extend(['pblackbg', 'phispbg', 'pwhitebg'])
@@ -405,44 +438,57 @@ def get_blockdata_for_demographics(state_counties: list, census_vars: list, vint
savefile = file_name # set file name
if out_csv:
- CensusUtil.convert_dislocation_pd_to_csv(cen_block, save_columns, output_dir, savefile)
+ CensusUtil.convert_dislocation_pd_to_csv(
+ cen_block, save_columns, output_dir, savefile
+ )
if out_shapefile:
- CensusUtil.convert_dislocation_gpd_to_shapefile(cen_shp_block_merged, output_dir, savefile)
+ CensusUtil.convert_dislocation_gpd_to_shapefile(
+ cen_shp_block_merged, output_dir, savefile
+ )
if out_geopackage:
- CensusUtil.convert_dislocation_gpd_to_geopackage(cen_shp_block_merged, output_dir, savefile)
-
+ CensusUtil.convert_dislocation_gpd_to_geopackage(
+ cen_shp_block_merged, output_dir, savefile
+ )
+
if out_geojson:
- CensusUtil.convert_dislocation_gpd_to_geojson(cen_shp_block_merged, output_dir, savefile)
+ CensusUtil.convert_dislocation_gpd_to_geojson(
+ cen_shp_block_merged, output_dir, savefile
+ )
# clean up shapefile temp directory
# Try to remove tree; if failed show an error using try...except on screen
-# try:
-# shutil.rmtree(shapefile_dir)
-# if not out_shapefile and not out_csv and not out_html and not out_geopackage and not out_geojson:
-# shutil.rmtree(output_dir)
-# except OSError as e:
-# error_msg = "Error: Failed to remove either " + shapefile_dir \
-# + " or " + output_dir + " directory"
-# logger.error(error_msg)
-# raise Exception(error_msg)
-
-
- print("Done creating population demographics shapefile")
-
- return cen_block[save_columns]
+ # try:
+ # shutil.rmtree(shapefile_dir)
+ # if not out_shapefile and not out_csv and not out_html and not out_geopackage and not out_geojson:
+ # shutil.rmtree(output_dir)
+ # except OSError as e:
+ # error_msg = "Error: Failed to remove either " + shapefile_dir \
+ # + " or " + output_dir + " directory"
+ # logger.error(error_msg)
+ # raise Exception(error_msg)
+
+ print('Done creating population demographics shapefile') # noqa: T201
+ return cen_block[save_columns]
@staticmethod
- def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage: str = "2010",
- out_csv: bool = False, out_shapefile: bool = False,
- out_geopackage: bool = False, out_geojson: bool = False,
- file_name: str = "file_name", output_dir: str = "output_dir"):
-
+ def get_blockgroupdata_for_income( # noqa: C901
+ state_counties: list,
+ acs_vars: list,
+ vintage: str = '2010',
+ out_csv: bool = False, # noqa: FBT001, FBT002
+ out_shapefile: bool = False, # noqa: FBT001, FBT002
+ out_geopackage: bool = False, # noqa: FBT001, FBT002
+ out_geojson: bool = False, # noqa: FBT001, FBT002
+ file_name: str = 'file_name',
+ output_dir: str = 'output_dir',
+ ):
"""Generate household income dataset from census
Args:
+ ----
state_counties (list): A List of concatenated State and County FIPS Codes.
see full list https://www.nrcs.usda.gov/wps/portal/nrcs/detail/national/home/?cid=nrcs143_013697
vintage (str): Census Year.
@@ -452,23 +498,21 @@ def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage:
out_geojson (bool): Save processed census geodataframe as geojson
file_name (str): Name of the output files.
output_dir (str): Name of directory used to save output files.
-
- """
+ """ # noqa: D400
# dataset_name (str): ACS dataset name.
dataset_name = 'acs/acs5'
# *****************************
# Get the household income data
# *****************************
-
+
get_income_vars = 'GEO_ID,NAME'
int_vars = acs_vars
-
+
# Use the default vars if none provided by the user
- if not acs_vars :
-
- # Income data varaible tags for 2010, 2015, and 2020 5-year ACS
+ if not acs_vars:
+ # Income data variable tags for 2010, 2015, and 2020 5-year ACS
# B19001_001E - Estimate!!Total
# B19001_002E - Estimate!!Total!!Less than $10,000
# B19001_003E - Estimate!!Total!!$10,000 to $14,999
@@ -487,35 +531,50 @@ def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage:
# B19001_016E - Estimate!!Total!!$150,000 to $199,999
# B19001_017E - Estimate!!Total!!$200,000 or more
# B19013_001E - Estimate!!Median household income in the past 12 months (in 2016 inflation-adjusted dollars)
-
+
get_income_vars += ',B19001_001E,B19001_002E,B19001_003E,B19001_004E,\
B19001_005E,B19001_006E,B19001_007E,B19001_008E,B19001_009E,B19001_010E,\
B19001_011E,B19001_012E,B19001_013E,B19001_014E,B19001_015E,\
B19001_016E,B19001_017E,B19013_001E'
-
- int_vars = ['B19001_001E','B19001_002E','B19001_003E','B19001_004E',\
- 'B19001_005E','B19001_006E','B19001_007E','B19001_008E','B19001_009E','B19001_010E',\
- 'B19001_011E','B19001_012E','B19001_013E','B19001_014E','B19001_015E',\
- 'B19001_016E','B19001_017E','B19013_001E']
+
+ int_vars = [
+ 'B19001_001E',
+ 'B19001_002E',
+ 'B19001_003E',
+ 'B19001_004E',
+ 'B19001_005E',
+ 'B19001_006E',
+ 'B19001_007E',
+ 'B19001_008E',
+ 'B19001_009E',
+ 'B19001_010E',
+ 'B19001_011E',
+ 'B19001_012E',
+ 'B19001_013E',
+ 'B19001_014E',
+ 'B19001_015E',
+ 'B19001_016E',
+ 'B19001_017E',
+ 'B19013_001E',
+ ]
else:
# Add the variables provided by the user
for var in acs_vars:
- get_income_vars +=','+var
-
+ get_income_vars += ',' + var
# Make directory to save output
- if not os.path.exists(output_dir):
- os.mkdir(output_dir)
+ if not os.path.exists(output_dir): # noqa: PTH110
+ os.mkdir(output_dir) # noqa: PTH102
# Make a directory to save downloaded shapefiles
- shapefile_dir = Path(output_dir)/'shapefiletemp'
-
- if not os.path.exists(shapefile_dir):
- os.mkdir(shapefile_dir)
-
+ shapefile_dir = Path(output_dir) / 'shapefiletemp'
+
+ if not os.path.exists(shapefile_dir): # noqa: PTH110
+ os.mkdir(shapefile_dir) # noqa: PTH102
+
# Set to hold the states - needed for 2020 census shapefile download
- stateSet = set()
+ stateSet = set() # noqa: N806
# loop through counties
appended_countydata = [] # start an empty container for the county data
@@ -523,90 +582,105 @@ def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage:
# deconcatenate state and county values
state = state_county[0:2]
county = state_county[2:5]
- logger.debug('State: '+state)
- logger.debug('County: '+county)
-
+ logger.debug('State: ' + state) # noqa: G003
+ logger.debug('County: ' + county) # noqa: G003
+
# Add the state to the set
stateSet.add(state)
# Set up hyperlink for Census API
api_hyperlink = ''
-
- if vintage == '2010' :
+
+ if vintage == '2010':
api_hyperlink = CensusUtil.generate_census_api_url(
- state, county, vintage, dataset_name, get_income_vars, 'tract')
+ state, county, vintage, dataset_name, get_income_vars, 'tract'
+ )
else:
# Set up hyperlink for Census API
api_hyperlink = CensusUtil.generate_census_api_url(
- state, county, vintage, dataset_name, get_income_vars, 'block%20group')
+ state,
+ county,
+ vintage,
+ dataset_name,
+ get_income_vars,
+ 'block%20group',
+ )
- logger.info("Census API data from: " + api_hyperlink)
+ logger.info('Census API data from: ' + api_hyperlink) # noqa: G003
# Obtain Census API JSON Data
apidf = CensusUtil.request_census_api(api_hyperlink)
# Append county data makes it possible to have multiple counties
appended_countydata.append(apidf)
-
# Create dataframe from appended county data
cen_blockgroup = pd.concat(appended_countydata, ignore_index=True)
# Add variable named "Survey" that identifies Census survey program and survey year
- cen_blockgroup['Survey'] = vintage+' '+dataset_name
+ cen_blockgroup['Survey'] = vintage + ' ' + dataset_name
# 2010 ACS API does not support block group level resolution, use tract
- if vintage == '2010' :
+ if vintage == '2010':
# Set tract FIPS code by concatenating state, county, and tract
- cen_blockgroup['tractid'] = (cen_blockgroup['state']+cen_blockgroup['county'] +
- cen_blockgroup['tract'])
-
+ cen_blockgroup['tractid'] = (
+ cen_blockgroup['state']
+ + cen_blockgroup['county']
+ + cen_blockgroup['tract']
+ )
+
# To avoid problems with how the tract id is read saving it
# as a string will reduce possibility for future errors
- cen_blockgroup['tractidstr'] = cen_blockgroup['tractid'].apply(lambda x: "TRACT"+str(x).zfill(11))
+ cen_blockgroup['tractidstr'] = cen_blockgroup['tractid'].apply(
+ lambda x: 'TRACT' + str(x).zfill(11)
+ )
else:
# Set block group FIPS code by concatenating state, county, tract and block group fips
- cen_blockgroup['bgid'] = (cen_blockgroup['state']+cen_blockgroup['county'] +
- cen_blockgroup['tract']+cen_blockgroup['block group'])
-
+ cen_blockgroup['bgid'] = (
+ cen_blockgroup['state']
+ + cen_blockgroup['county']
+ + cen_blockgroup['tract']
+ + cen_blockgroup['block group']
+ )
+
# To avoid problems with how the block group id is read saving it
# as a string will reduce possibility for future errors
- cen_blockgroup['bgidstr'] = cen_blockgroup['bgid'].apply(lambda x: "BG"+str(x).zfill(12))
+ cen_blockgroup['bgidstr'] = cen_blockgroup['bgid'].apply(
+ lambda x: 'BG' + str(x).zfill(12)
+ )
# Convert variables from dtype object to integer
for var in int_vars:
- cen_blockgroup[var] = pd.to_numeric(cen_blockgroup[var], errors='coerce').convert_dtypes()
- #cen_blockgroup[var] = cen_blockgroup[var].astype(int)
- print(var+' converted from object to integer')
+ cen_blockgroup[var] = pd.to_numeric(
+ cen_blockgroup[var], errors='coerce'
+ ).convert_dtypes()
+ # cen_blockgroup[var] = cen_blockgroup[var].astype(int)
+ print(var + ' converted from object to integer') # noqa: T201
-
# ### Obtain Data - Download and extract shapefiles
# The Block Group IDs in the Census data are associated with the Block Group boundaries that can be mapped.
# To map this data, we need the shapefile information for the block groups in the select counties.
#
# These files can be found online at:
# https://www2.census.gov/geo/tiger/TIGER2010/BG/2010/
-
-
# *******************************
# Download and extract shapefiles
# *******************************
-
-
+
# Download the shapefile information for the block groups in the select counties.
#
# These files can be found online at:
-
+
# For 2010 ACS - API only supports up to the tract level
# https://www2.census.gov/geo/tiger/TIGER2010/TRACT/2010/
-
+
# For 2015 and 2020 ACS - API supports up to the block group level
# https://www2.census.gov/geo/tiger/TIGER2020/TABBLOCK20/
-
+
# Block group shapefiles are downloaded for each of the selected counties from
# the Census TIGER/Line Shapefiles at https://www2.census.gov/geo/tiger.
-
+
# Each state/counties file is downloaded as a zipfile and the contents are extracted.
# The shapefiles are reprojected to EPSG 4326 and appended as a single shapefile
# (as a GeoPandas GeoDataFrame) containing block groups for all of the selected counties.
@@ -614,114 +688,137 @@ def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage:
# *EPSG: 4326 uses a coordinate system (Lat, Lon)
# This coordinate system is required for mapping with folium.
-
appended_shp_files = [] # start an empty container for the county/state shapefiles
-
+
# Feature attributes that need to match to join layers
merge_id_left = 'GEOID'
merge_id_right = ''
-
+
# Tigerline provides the blocks for each county, thus each county needs to be downloaded individually
- if vintage == '2010' :
-
+ if vintage == '2010':
merge_id_left += '10'
-
+
merge_id_right = 'tractid'
-
+
# loop through counties
for state_county in state_counties:
-
# county_fips = state+county
filename = f'tl_2010_{state_county}_tract10'
-
+
# Use wget to download the TIGER Shapefile for a county
# options -quiet = turn off wget output
# add directory prefix to save files to folder named after program name
- shapefile_url = 'https://www2.census.gov/geo/tiger/TIGER2010/TRACT/2010/' + filename + '.zip'
-
- print(('Downloading Census Block Shapefiles for State_County: '
- + state_county + ' from: '+shapefile_url).format(filename=filename))
-
- zip_file = os.path.join(shapefile_dir, filename + '.zip')
- urllib.request.urlretrieve(shapefile_url, zip_file)
-
+ shapefile_url = (
+ 'https://www2.census.gov/geo/tiger/TIGER2010/TRACT/2010/'
+ + filename
+ + '.zip'
+ )
+
+ print( # noqa: T201
+ (
+ 'Downloading Census Block Shapefiles for State_County: '
+ + state_county
+ + ' from: '
+ + shapefile_url
+ ).format(filename=filename)
+ )
+
+ zip_file = os.path.join(shapefile_dir, filename + '.zip') # noqa: PTH118
+ urllib.request.urlretrieve(shapefile_url, zip_file) # noqa: S310
+
with ZipFile(zip_file, 'r') as zip_obj:
zip_obj.extractall(path=shapefile_dir)
-
+
# Delete the zip file
- os.remove(zip_file)
-
- if Path(zip_file).is_file() == True :
- print("Error deleting the zip file ",zip_file)
-
+ os.remove(zip_file) # noqa: PTH107
+
+ if Path(zip_file).is_file() == True: # noqa: E712
+ print('Error deleting the zip file ', zip_file) # noqa: T201
+
# Read shapefile to GeoDataFrame
gdf = gpd.read_file(f'{shapefile_dir}/{filename}.shp')
# Set projection to EPSG 4326, which is required for folium
gdf = gdf.to_crs(epsg=4326)
-
+
# Append county data
appended_shp_files.append(gdf)
-
-
- elif vintage == '2015' or vintage == '2020' :
-
+
+ elif vintage == '2015' or vintage == '2020': # noqa: PLR1714
merge_id_right = 'bgid'
-
+
# loop through the states
for state in stateSet:
-
filename = f'tl_{vintage}_{state}_bg'
-
+
# Check if file is cached
path = Path(f'{shapefile_dir}/{filename}.shp')
# if file does not exist
- if path.is_file() == False :
+ if path.is_file() == False: # noqa: E712
# Use wget to download the TIGER Shapefile for the state
# options -quiet = turn off wget output
# add directory prefix to save files to folder named after program name
- shapefile_url = f'https://www2.census.gov/geo/tiger/TIGER{vintage}/BG/' + filename + '.zip'
-
- print(('Downloading Census Block Shapefiles for State: '
- + state + ' from: '+shapefile_url).format(filename=filename))
-
- zip_file = os.path.join(shapefile_dir, filename + '.zip')
- urllib.request.urlretrieve(shapefile_url, zip_file)
-
+ shapefile_url = (
+ f'https://www2.census.gov/geo/tiger/TIGER{vintage}/BG/'
+ + filename
+ + '.zip'
+ )
+
+ print( # noqa: T201
+ (
+ 'Downloading Census Block Shapefiles for State: '
+ + state
+ + ' from: '
+ + shapefile_url
+ ).format(filename=filename)
+ )
+
+ zip_file = os.path.join(shapefile_dir, filename + '.zip') # noqa: PTH118
+ urllib.request.urlretrieve(shapefile_url, zip_file) # noqa: S310
+
with ZipFile(zip_file, 'r') as zip_obj:
zip_obj.extractall(path=shapefile_dir)
-
+
# Delete the zip file
- os.remove(zip_file)
-
- if Path(zip_file).is_file() == True :
- print("Error deleting the zip file ",zip_file)
-
+ os.remove(zip_file) # noqa: PTH107
+
+ if Path(zip_file).is_file() == True: # noqa: E712
+ print('Error deleting the zip file ', zip_file) # noqa: T201
+
else:
- print(f'Found file {filename}.shp in cache: ',path)
-
+ print(f'Found file {filename}.shp in cache: ', path) # noqa: T201
+
# Read shapefile to GeoDataFrame
gdf = gpd.read_file(f'{shapefile_dir}/{filename}.shp')
-
+
# Set projection to EPSG 4326, which is required for folium
gdf = gdf.to_crs(epsg=4326)
-
+
# Append county data
appended_shp_files.append(gdf)
# Create dataframe from appended county data
shp_blockgroup = pd.concat(appended_shp_files)
-
- print('Merging the ACS household income information to the shapefile')
- # Clean Data - Merge Census demographic data to the appended shapefiles
- cen_shp_blockgroup_merged = pd.merge(shp_blockgroup, cen_blockgroup,
- left_on=merge_id_left, right_on=merge_id_right, how='left')
+ print('Merging the ACS household income information to the shapefile') # noqa: T201
- # Set paramaters for file save
- if vintage == '2010' :
- save_columns = ['tractid', 'tractidstr', 'Survey'] # set column names to save
+ # Clean Data - Merge Census demographic data to the appended shapefiles
+ cen_shp_blockgroup_merged = pd.merge( # noqa: PD015
+ shp_blockgroup,
+ cen_blockgroup,
+ left_on=merge_id_left,
+ right_on=merge_id_right,
+ how='left',
+ )
+
+ # Set parameters for file save
+ if vintage == '2010':
+ save_columns = [
+ 'tractid',
+ 'tractidstr',
+ 'Survey',
+ ] # set column names to save
else:
save_columns = ['bgid', 'bgidstr', 'Survey'] # set column names to save
@@ -730,93 +827,116 @@ def get_blockgroupdata_for_income(state_counties: list, acs_vars: list, vintage:
savefile = file_name # set file name
if out_csv:
- CensusUtil.convert_dislocation_pd_to_csv(cen_blockgroup, save_columns, output_dir, savefile)
+ CensusUtil.convert_dislocation_pd_to_csv(
+ cen_blockgroup, save_columns, output_dir, savefile
+ )
if out_shapefile:
- CensusUtil.convert_dislocation_gpd_to_shapefile(cen_shp_blockgroup_merged, output_dir, savefile)
+ CensusUtil.convert_dislocation_gpd_to_shapefile(
+ cen_shp_blockgroup_merged, output_dir, savefile
+ )
if out_geopackage:
- CensusUtil.convert_dislocation_gpd_to_geopackage(cen_shp_blockgroup_merged, output_dir, savefile)
-
+ CensusUtil.convert_dislocation_gpd_to_geopackage(
+ cen_shp_blockgroup_merged, output_dir, savefile
+ )
+
if out_geojson:
- CensusUtil.convert_dislocation_gpd_to_geojson(cen_shp_blockgroup_merged, output_dir, savefile)
+ CensusUtil.convert_dislocation_gpd_to_geojson(
+ cen_shp_blockgroup_merged, output_dir, savefile
+ )
# clean up shapefile temp directory
# Try to remove tree; if failed show an error using try...except on screen
-# try:
-# shutil.rmtree(shapefile_dir)
-# if not out_shapefile and not out_csv and not out_html and not out_geopackage and not out_geojson:
-# shutil.rmtree(output_dir)
-# except OSError as e:
-# error_msg = "Error: Failed to remove either " + shapefile_dir \
-# + " or " + output_dir + " directory"
-# logger.error(error_msg)
-# raise Exception(error_msg)
-
- print("Done creating household income shapefile")
+ # try:
+ # shutil.rmtree(shapefile_dir)
+ # if not out_shapefile and not out_csv and not out_html and not out_geopackage and not out_geojson:
+ # shutil.rmtree(output_dir)
+ # except OSError as e:
+ # error_msg = "Error: Failed to remove either " + shapefile_dir \
+ # + " or " + output_dir + " directory"
+ # logger.error(error_msg)
+ # raise Exception(error_msg)
+
+ print('Done creating household income shapefile') # noqa: T201
return cen_blockgroup[save_columns]
-
-
@staticmethod
def convert_dislocation_gpd_to_shapefile(in_gpd, programname, savefile):
"""Create shapefile of dislocation geodataframe.
Args:
+ ----
in_gpd (object): Geodataframe of the dislocation.
programname (str): Output directory name.
savefile (str): Output shapefile name.
"""
# save cen_shp_blockgroup_merged shapefile
- print('Shapefile data file saved to: '+programname+'/'+savefile+".shp")
- in_gpd.to_file(programname+'/'+savefile+".shp")
-
+ print( # noqa: T201
+ 'Shapefile data file saved to: ' + programname + '/' + savefile + '.shp'
+ )
+ in_gpd.to_file(programname + '/' + savefile + '.shp')
@staticmethod
def convert_dislocation_gpd_to_geojson(in_gpd, programname, savefile):
"""Create geojson of dislocation geodataframe.
Args:
+ ----
in_gpd (object): Geodataframe of the dislocation.
programname (str): Output directory name.
savefile (str): Output geojson name.
"""
# save cen_shp_blockgroup_merged geojson
- print('Geodatabase data file saved to: '+programname+'/'+savefile+".geojson")
- in_gpd.to_file(programname+'/'+savefile+".geojson", driver="GeoJSON")
-
+ print( # noqa: T201
+ 'Geodatabase data file saved to: '
+ + programname
+ + '/'
+ + savefile
+ + '.geojson'
+ )
+ in_gpd.to_file(programname + '/' + savefile + '.geojson', driver='GeoJSON')
@staticmethod
def convert_dislocation_gpd_to_geopackage(in_gpd, programname, savefile):
"""Create shapefile of dislocation geodataframe.
Args:
+ ----
in_gpd (object): Geodataframe of the dislocation.
programname (str): Output directory name.
savefile (str): Output shapefile name.
"""
# save cen_shp_blockgroup_merged shapefile
- print('GeoPackage data file saved to: '+programname+'/'+savefile+".gpkg")
- in_gpd.to_file(programname+'/'+savefile+".gpkg", driver="GPKG", layer=savefile)
+ print( # noqa: T201
+ 'GeoPackage data file saved to: '
+ + programname
+ + '/'
+ + savefile
+ + '.gpkg'
+ )
+ in_gpd.to_file(
+ programname + '/' + savefile + '.gpkg', driver='GPKG', layer=savefile
+ )
@staticmethod
def convert_dislocation_pd_to_csv(in_pd, save_columns, programname, savefile):
"""Create csv of dislocation dataframe using the column names.
Args:
+ ----
in_pd (object): Geodataframe of the dislocation.
save_columns (list): A list of column names to use.
programname (str): Output directory name.
savefile (str): Output csv file name.
"""
-
# Save cen_blockgroup dataframe with save_column variables to csv named savefile
- print('CSV data file saved to: '+programname+'/'+savefile+".csv")
- in_pd[save_columns].to_csv(programname+'/'+savefile+".csv", index=False)
-
-
+ print('CSV data file saved to: ' + programname + '/' + savefile + '.csv') # noqa: T201
+ in_pd[save_columns].to_csv(
+ programname + '/' + savefile + '.csv', index=False
+ )
diff --git a/modules/performHUA/pyincore_data/globals.py b/modules/performHUA/pyincore_data/globals.py
index 68a4717d3..6d262150c 100644
--- a/modules/performHUA/pyincore_data/globals.py
+++ b/modules/performHUA/pyincore_data/globals.py
@@ -1,17 +1,19 @@
-# Copyright (c) 2021 University of Illinois and others. All rights reserved.
+# Copyright (c) 2021 University of Illinois and others. All rights reserved. # noqa: D100, INP001
#
# This program and the accompanying materials are made available under the
# terms of the Mozilla Public License v2.0 which accompanies this distribution,
# and is available at https://www.mozilla.org/en-US/MPL/2.0/
-import os
import logging
+import os
from logging import config as logging_config
-PACKAGE_VERSION = "0.3.0"
+PACKAGE_VERSION = '0.3.0'
-PYINCORE_DATA_ROOT_FOLDER = os.path.dirname(os.path.dirname(__file__))
+PYINCORE_DATA_ROOT_FOLDER = os.path.dirname(os.path.dirname(__file__)) # noqa: PTH120
-LOGGING_CONFIG = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.ini'))
+LOGGING_CONFIG = os.path.abspath( # noqa: PTH100
+ os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.ini') # noqa: PTH100, PTH118, PTH120
+)
logging_config.fileConfig(LOGGING_CONFIG)
LOGGER = logging.getLogger('pyincore-data')
diff --git a/modules/performREC/pyrecodes/run_pyrecodes.py b/modules/performREC/pyrecodes/run_pyrecodes.py
new file mode 100644
index 000000000..87cbf2123
--- /dev/null
+++ b/modules/performREC/pyrecodes/run_pyrecodes.py
@@ -0,0 +1,365 @@
+import json, os, shapely, argparse, sys, ujson, importlib
+import geopandas as gpd
+import numpy as np
+import pandas as pd
+from pathlib import Path
+# Delete below when pyrecodes can be installed as stand alone
+import sys
+sys.path.insert(0, '/Users/jinyanzhao/Desktop/SimCenterBuild/r2d_pyrecodes/')
+from pyrecodes import main
+
+
+def run_pyrecodes(rec_config, inputRWHALE, parallelType, mpiExec, numPROC):
+
+ # Initiate directory
+ rec_ouput_dir = os.path.join(inputRWHALE['runDir'],"Results", "Recovery")
+ if not os.path.exists(rec_ouput_dir):
+ os.mkdir(rec_ouput_dir)
+
+ # Find the realizations to run
+ damage_input = rec_config.pop('DamageInput')
+ realizations_to_run = select_realizations_to_run(\
+ damage_input,inputRWHALE)
+
+ # Replace SimCenterDefault with correct path
+ cmp_lib = rec_config["ComponentLibrary"]
+ if cmp_lib.startswith('SimCenterDefault'):
+ cmp_lib_name = cmp_lib.split('/')[1]
+ cmp_lib_dir = os.path.dirname(os.path.realpath(__file__))
+ cmp_lib = os.path.join(cmp_lib_dir, cmp_lib_name)
+ rec_config["ComponentLibrary"] = cmp_lib
+ # loop through each realizations. Needs to be parallelized
+ # Create the base of system configuration json
+ system_configuration = create_system_configuration(rec_config)
+ # Create the base of main json
+ main_json = dict()
+ main_json.update({"ComponentLibrary": {
+ "ComponentLibraryCreatorClass": "JSONComponentLibraryCreator",
+ "ComponentLibraryFile": rec_config["ComponentLibrary"]
+ }})
+
+ # initialize a dict to accumulate recovery results stats
+ result_det_path = os.path.join(inputRWHALE['runDir'],"Results",
+ f"Results_det.json")
+ with open(result_det_path, 'r') as f:
+ results_det = json.load(f)
+ result_agg = dict()
+ resilience_results = dict()
+
+ # Loop through realizations and run pyrecodes
+ numP = 1
+ procID = 0
+ doParallel = False
+ mpi_spec = importlib.util.find_spec("mpi4py")
+ found = mpi_spec is not None
+ if found and parallelType == 'parRUN':
+ import mpi4py
+ from mpi4py import MPI
+ comm = MPI.COMM_WORLD
+ numP = comm.Get_size()
+ procID = comm.Get_rank()
+ if numP < 2:
+ doParallel = False
+ numP = 1
+ procID = 0
+ else:
+ doParallel = True
+ count = 0
+ needsInitiation = True
+ ind_in_rank = 0
+ for ind, rlz_ind in enumerate(realizations_to_run):
+ # Create a realization directory
+ if count % numP == procID:
+ rlz_dir = os.path.join(rec_ouput_dir,str(rlz_ind))
+ if not os.path.exists(rlz_dir):
+ os.mkdir(rlz_dir)
+
+ # Update the system_configuration json
+ damage_rlz_file = os.path.join(inputRWHALE['runDir'],"Results",\
+ f"Results_{int(rlz_ind)}.json")
+ DamageInput = {"Type": "R2DDamageInput",
+ "Parameters": {"DamageFile": damage_rlz_file}}
+ system_configuration.update({"DamageInput":DamageInput})
+
+ # Write the system_configureation to a file
+ system_configuration_file = os.path.join(rlz_dir, \
+ "SystemConfiguration.json")
+ with open(system_configuration_file, 'w') as f:
+ ujson.dump(system_configuration, f)
+
+ # Update the main json
+ main_json.update({"System": {
+ "SystemCreatorClass": "ConcreteSystemCreator",
+ "SystemClass": "BuiltEnvironmentSystem",
+ "SystemConfigurationFile": system_configuration_file
+ }})
+
+ # Write the main json to a file
+ main_file = os.path.join(rlz_dir, "main.json")
+ with open(main_file, 'w') as f:
+ ujson.dump(main_json, f)
+
+ system = main.run(main_file)
+
+ system.calculate_resilience()
+
+ # Append the recovery time to results_rlz
+ if needsInitiation:
+ needsInitiation = False
+ num_of_rlz_per_rank = int(np.floor(len(realizations_to_run)/numP))
+ if procID < len(realizations_to_run)%numP:
+ num_of_rlz_per_rank += 1
+ # Initialize resilience_results
+ resilience_results_buffer = dict()
+ resilience_calculator_id = 0
+ resilience_results.update({
+ "time_steps": list(range(0, system.MAX_TIME_STEP+1))
+ })
+ resources_to_plot = system.resilience_calculators[resilience_calculator_id].system_supply.keys()
+ for resource_name in resources_to_plot:
+ resilience_results_buffer.update({
+ resource_name: {
+ "Supply": np.zeros([num_of_rlz_per_rank, system.MAX_TIME_STEP+1]),
+ "Demand": np.zeros([num_of_rlz_per_rank, system.MAX_TIME_STEP+1]),
+ "Consumption": np.zeros([num_of_rlz_per_rank, system.MAX_TIME_STEP+1])
+ }
+ })
+ # Initialize result_agg
+ result_agg_buffer = dict()
+ for asset_type, item in results_det.items():
+ asset_type_result = dict()
+ for asset_subtype, asset_subtype_item in item.items():
+ asset_subtype_result = dict()
+ for aim_id, aim in asset_subtype_item.items():
+ asset_subtype_result.update({aim_id:{
+ "RecoveryDuration":np.zeros(num_of_rlz_per_rank)
+ }})
+ asset_type_result.update({asset_subtype:asset_subtype_result})
+ result_agg_buffer.update({asset_type:asset_type_result})
+ del results_det
+
+ resilience_result_rlz_i = dict()
+ for resource_name in resources_to_plot:
+ resilience_result_rlz_i.update({
+ "time_steps": list(range(0, system.time_step+1)),
+ resource_name: {
+ "Supply": system.resilience_calculators[resilience_calculator_id].system_supply[resource_name][:system.time_step+1],
+ "Demand": system.resilience_calculators[resilience_calculator_id].system_demand[resource_name][:system.time_step+1],
+ "Consumption": system.resilience_calculators[resilience_calculator_id].system_consumption[resource_name][:system.time_step+1]
+ }
+ }
+ )
+ resilience_results_buffer[resource_name]['Supply'][ind_in_rank,:system.time_step+1] = \
+ system.resilience_calculators[resilience_calculator_id].system_supply[resource_name][:system.time_step+1]
+ resilience_results_buffer[resource_name]['Demand'][ind_in_rank,:system.time_step+1] = \
+ system.resilience_calculators[resilience_calculator_id].system_demand[resource_name][:system.time_step+1]
+ resilience_results_buffer[resource_name]['Consumption'][ind_in_rank,:system.time_step+1] = \
+ system.resilience_calculators[resilience_calculator_id].system_consumption[resource_name][:system.time_step+1]
+ resilience_result_rlz_i_file = os.path.join(rlz_dir, "ResilienceResult.json")
+ with open(resilience_result_rlz_i_file, 'w') as f:
+ ujson.dump(resilience_result_rlz_i, f)
+ result_file_name = os.path.join(inputRWHALE['runDir'],"Results",
+ f"Results_{rlz_ind}.json")
+ with open(result_file_name, 'r') as f:
+ results = json.load(f)
+ for comp in system.components:
+ if getattr(comp, 'r2d_comp', False) is True:
+ recovery_duration = getattr(comp, 'recoverd_time_step',system.MAX_TIME_STEP) - \
+ system.DISASTER_TIME_STEP
+ recovery_duration = max(0, recovery_duration)
+ results[comp.asset_type][comp.asset_subtype][comp.aim_id].update({
+ "Recovery": {"Duration":recovery_duration}
+ })
+ result_agg_buffer[comp.asset_type][comp.asset_subtype][comp.aim_id]\
+ ['RecoveryDuration'][ind_in_rank] = recovery_duration
+ with open(result_file_name, 'w') as f:
+ ujson.dump(results, f)
+
+ ind_in_rank += 1
+ count = count + 1
+
+ # wait for all to finish
+ if doParallel:
+ comm.Barrier()
+
+ # if rank 0, gather result_agg and resilience_results, write to file
+ # note that the gathered results dosen't follow the order in realization_to_run
+ # but this order is not needed when calculating mean and std
+ if doParallel:
+ # gather results_agg
+ for asset_type, item in result_agg_buffer.items():
+ asset_type_result = dict()
+ for asset_subtype, asset_subtype_item in item.items():
+ asset_subtype_result = dict()
+ for aim_id, aim in asset_subtype_item.items():
+ asset_subtype_result.update({aim_id:{
+ "RecoveryDuration":comm.gather(result_agg_buffer[asset_type][asset_subtype], root=0)
+ }})
+ asset_type_result.update({asset_subtype:asset_subtype_result})
+ result_agg.update({asset_type:asset_type_result})
+ # gather resilience_resutls
+ for resource_name in resources_to_plot:
+ if procID == 0:
+ resilience_results.update({
+ resource_name: {
+ "Supply": np.zeros([len(realizations_to_run), system.MAX_TIME_STEP+1]),
+ "Demand": np.zeros([len(realizations_to_run), system.MAX_TIME_STEP+1]),
+ "Consumption": np.zeros([len(realizations_to_run), system.MAX_TIME_STEP+1])
+ }
+ })
+ comm.gather(resilience_results_buffer[resource_name]["Supply"],
+ resilience_results[resource_name]["Supply"], root=0)
+ comm.gather(resilience_results_buffer[resource_name]["Demand"],
+ resilience_results[resource_name]["Demand"], root=0)
+ comm.gather(resilience_results_buffer[resource_name]["Consumption"],
+ resilience_results[resource_name]["Consumption"], root=0)
+ else:
+ for resource_name in resources_to_plot:
+ resilience_results.update({
+ resource_name: resilience_results_buffer[resource_name]
+ })
+ result_agg = result_agg_buffer
+
+ if procID==0:
+ # Calculate stats of the results and add to results_det.json
+ with open(result_det_path, 'r') as f:
+ results_det = json.load(f)
+ for asset_type, item in result_agg.items():
+ for asset_subtype, asset_subtype_item in item.items():
+ for aim_id, aim in asset_subtype_item.items():
+ if 'R2Dres' not in results_det[asset_type][asset_subtype][aim_id].keys():
+ results_det[asset_type][asset_subtype][aim_id].update({'R2Dres':{}})
+ results_det[asset_type][asset_subtype][aim_id]['R2Dres'].update({
+ "R2Dres_mean_RecoveryDuration":aim['RecoveryDuration'].mean(),
+ "R2Dres_std_RecoveryDuration":aim['RecoveryDuration'].std()
+ })
+ with open(result_det_path, 'w') as f:
+ ujson.dump(results_det, f)
+
+ recovery_result_path = os.path.join(rec_ouput_dir, "ResilienceResult.json")
+ for resource_name in resources_to_plot:
+ resilience_results[resource_name].update({
+ 'R2Dres_mean_Supply':resilience_results[resource_name]['Supply'].mean(axis=0).tolist(),
+ 'R2Dres_std_Supply':resilience_results[resource_name]['Supply'].std(axis=0).tolist(),
+ 'R2Dres_mean_Demand':resilience_results[resource_name]['Demand'].mean(axis=0).tolist(),
+ 'R2Dres_std_Demand':resilience_results[resource_name]['Demand'].std(axis=0).tolist(),
+ 'R2Dres_mean_Consumption':resilience_results[resource_name]['Consumption'].mean(axis=0).tolist(),
+ 'R2Dres_std_Consumption':resilience_results[resource_name]['Consumption'].std(axis=0).tolist()
+ })
+ resilience_results[resource_name].pop("Supply")
+ resilience_results[resource_name].pop("Demand")
+ resilience_results[resource_name].pop("Consumption")
+
+
+ with open(recovery_result_path, 'w') as f:
+ ujson.dump(resilience_results, f)
+
+ # Below are for development use
+ from pyrecodes import GeoVisualizer as gvis
+ geo_visualizer = gvis.R2D_GeoVisualizer(system.components)
+ geo_visualizer.plot_component_localities()
+ from pyrecodes import Plotter
+ plotter_object = Plotter.Plotter()
+ x_axis_label = 'Time step [day]'
+ resources_to_plot = ['Shelter', 'FunctionalHousing', 'ElectricPower', 'PotableWater']
+ resource_units = ['[beds/day]', '[beds/day]', '[MWh/day]', '[RC/day]']
+ # define which resilience calculator to use to plot the supply/demand/consumption of the resources
+ # they are ordered as in the system configuration file
+ resilience_calculator_id = 0
+ for i, resource_name in enumerate(resources_to_plot):
+ y_axis_label = f'{resource_name} {resource_units[i]} | {system.resilience_calculators[resilience_calculator_id].scope}'
+ axis_object = plotter_object.setup_lor_plot_fig(x_axis_label, y_axis_label)
+ time_range = system.time_step+1
+ time_steps_before_event = 10 #
+ plotter_object.plot_single_resource(list(range(-time_steps_before_event, time_range)),
+ resilience_results[resource_name]['R2Dres_mean_Supply'][:time_range],
+ resilience_results[resource_name]['R2Dres_mean_Demand'][:time_range],
+ resilience_results[resource_name]['R2Dres_mean_Consumption'][:time_range],
+ axis_object, warmup=time_steps_before_event)
+ print()
+def create_system_configuration(rec_config):
+ content_config = rec_config.pop('Content')
+ system_configuration = rec_config.copy()
+ if content_config['Creator'] == 'FromJsonFile':
+ with open(content_config['FilePath'], 'r') as f:
+ content = json.load(f)
+ system_configuration.update({"Content":content})
+ elif content_config['Creator'] == 'LocalityGeoJSON':
+ # think how users can input RecoveryResourceSupplier and Resources
+ pass
+
+ return system_configuration
+
+
+def select_realizations_to_run(damage_input, inputRWHALE):
+ rlzs_num = min([item['ApplicationData']['Realizations'] \
+ for _, item in inputRWHALE['Applications']['DL'].items()])
+ rlzs_available = np.array(range(rlzs_num))
+ if damage_input['Type'] == 'R2DDamageRealization':
+ rlz_filter = damage_input['Parameters']['Filter']
+ rlzs_requested = []
+ for rlzs in rlz_filter.split(','):
+ if "-" in rlzs:
+ rlzs_low, rlzs_high = rlzs.split("-")
+ rlzs_requested += list(range(int(rlzs_low), int(rlzs_high)+1))
+ else:
+ rlzs_requested.append(int(rlzs))
+ rlzs_requested = np.array(rlzs_requested)
+ rlzs_in_available = np.in1d(rlzs_requested, rlzs_available)
+ if rlzs_in_available.sum() != 0:
+ rlzs_to_run = rlzs_requested[
+ np.where(rlzs_in_available)[0]]
+ else:
+ rlzs_to_run = []
+ if damage_input['Type'] == 'R2DDamageSample':
+ sample_size = damage_input['Parameters']['SampleSize']
+ seed = damage_input['Parameters']['SampleSize']
+ if sample_size < rlzs_num:
+ np.random.seed(seed)
+ rlzs_to_run = np.sort(np.random.choice(rlzs_available, sample_size,\
+ replace = False)).tolist()
+ else:
+ rlzs_to_run = np.sort(rlzs_available).tolist()
+ return rlzs_to_run
+
+if __name__ == '__main__':
+
+ #Defining the command line arguments
+
+ workflowArgParser = argparse.ArgumentParser(
+ "Run Pyrecodes from the NHERI SimCenter rWHALE workflow for a set of assets.",
+ allow_abbrev=False)
+
+ workflowArgParser.add_argument("-c", "--configJsonPath",
+ help="Configuration file for running perycode")
+ workflowArgParser.add_argument("-i", "--inputRWHALEPath",
+ help="Configuration file specifying the rwhale applications and data "
+ "used")
+ workflowArgParser.add_argument("-p", "--parallelType",
+ default='seqRUN',
+ help="How parallel runs: options seqRUN, parSETUP, parRUN")
+ workflowArgParser.add_argument("-m", "--mpiexec",
+ default='mpiexec',
+ help="How mpi runs, e.g. ibrun, mpirun, mpiexec")
+ workflowArgParser.add_argument("-n", "--numP",
+ default='8',
+ help="If parallel, how many jobs to start with mpiexec option")
+
+ #Parsing the command line arguments
+ wfArgs = workflowArgParser.parse_args()
+
+ #Calling the main workflow method and passing the parsed arguments
+ numPROC = int(wfArgs.numP)
+
+ with open(Path(wfArgs.configJsonPath).resolve(), 'r') as f:
+ rec_config = json.load(f)
+ with open(Path(wfArgs.inputRWHALEPath).resolve(), 'r') as f:
+ inputRWHALE = json.load(f)
+
+ run_pyrecodes(rec_config=rec_config,\
+ inputRWHALE=inputRWHALE,
+ parallelType = wfArgs.parallelType,
+ mpiExec = wfArgs.mpiexec,
+ numPROC = numPROC)
+
+
\ No newline at end of file
diff --git a/modules/performRegionalEventSimulation/DummyEventApp/DEA.py b/modules/performRegionalEventSimulation/DummyEventApp/DEA.py
index a85d8b51a..11cc8570d 100644
--- a/modules/performRegionalEventSimulation/DummyEventApp/DEA.py
+++ b/modules/performRegionalEventSimulation/DummyEventApp/DEA.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -41,16 +40,14 @@
import argparse
-def simulate_event(in1, in2):
-
- print(f'SIMULATING EVENT: {in1}, {in2}')
+def simulate_event(in1, in2): # noqa: D103
+ print(f'SIMULATING EVENT: {in1}, {in2}') # noqa: T201
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
parser.add_argument('--appSpecInput1')
parser.add_argument('--appSpecInput2', type=int)
args = parser.parse_args()
- simulate_event(args.appSpecInput1, args.appSpecInput2)
\ No newline at end of file
+ simulate_event(args.appSpecInput1, args.appSpecInput2)
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/ComputeIntensityMeasure.py b/modules/performRegionalEventSimulation/regionalGroundMotion/ComputeIntensityMeasure.py
index 5d58f1304..b605129ea 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/ComputeIntensityMeasure.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/ComputeIntensityMeasure.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -34,953 +33,1260 @@
# You should have received a copy of the BSD 3-Clause License along with
# this file. If not, see .
#
-# The computation method of ground motion intensity map using Markhivida et al. and
+# The computation method of ground motion intensity map using Markhivida et al. and
# the Baker-Jayaram correlation models is contributed by Dr. Anne Husley's
-# seaturtles package (https://github.com/annehulsey/seaturtles).
+# seaturtles package (https://github.com/annehulsey/seaturtles).
#
# Contributors:
# Anne Husley
# Kuanshi Zhong
# Jinyan Zhao
-import warnings, h5py, re
-import numpy as np
+import re
-LOCAL_IM_GMPE = {"DS575H": ["Bommer, Stafford & Alarcon (2009)", "Afshari & Stewart (2016)"],
- "DS595H": ["Bommer, Stafford & Alarcon (2009)", "Afshari & Stewart (2016)"],
- "DS2080H": ["Afshari & Stewart (2016)"],
- "SA":["Chiou & Youngs (2014)", "Abrahamson, Silva & Kamai (2014)",\
- "Boore, Stewart, Seyhan & Atkinson (2014)", "Campbell & Bozorgnia (2014)"],
- "PGA":["Chiou & Youngs (2014)", "Abrahamson, Silva & Kamai (2014)",\
- "Boore, Stewart, Seyhan & Atkinson (2014)", "Campbell & Bozorgnia (2014)"],
- "PGV":["Chiou & Youngs (2014)", "Abrahamson, Silva & Kamai (2014)",\
- "Boore, Stewart, Seyhan & Atkinson (2014)", "Campbell & Bozorgnia (2014)"]}
-
-OPENSHA_IM_GMPE = {"SA": ["Abrahamson, Silva & Kamai (2014)", "Boore, Stewart, Seyhan & Atkinson (2014)",
- "Campbell & Bozorgnia (2014)", "Chiou & Youngs (2014)"],
- "PGA": ["Abrahamson, Silva & Kamai (2014)", "Boore, Stewart, Seyhan & Atkinson (2014)",
- "Campbell & Bozorgnia (2014)", "Chiou & Youngs (2014)"],
- "PGV": ["Abrahamson, Silva & Kamai (2014)", "Boore, Stewart, Seyhan & Atkinson (2014)",
- "Campbell & Bozorgnia (2014)", "Chiou & Youngs (2014)"]}
-
-IM_GMPE = {"LOCAL": LOCAL_IM_GMPE,
- "OPENSHA": OPENSHA_IM_GMPE}
-
-import os
-import subprocess
-import sys
-import json
+import h5py
import numpy as np
-from numpy.lib.utils import source
-import pandas as pd
-from gmpe import SignificantDurationModel, openSHAGMPE
-from tqdm import tqdm
-import time
-from pathlib import Path
-import socket
-import collections
+
+LOCAL_IM_GMPE = {
+ 'DS575H': ['Bommer, Stafford & Alarcon (2009)', 'Afshari & Stewart (2016)'],
+ 'DS595H': ['Bommer, Stafford & Alarcon (2009)', 'Afshari & Stewart (2016)'],
+ 'DS2080H': ['Afshari & Stewart (2016)'],
+ 'SA': [
+ 'Chiou & Youngs (2014)',
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ ],
+ 'PGA': [
+ 'Chiou & Youngs (2014)',
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ ],
+ 'PGV': [
+ 'Chiou & Youngs (2014)',
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ ],
+}
+
+OPENSHA_IM_GMPE = {
+ 'SA': [
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ 'Chiou & Youngs (2014)',
+ ],
+ 'PGA': [
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ 'Chiou & Youngs (2014)',
+ ],
+ 'PGV': [
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ 'Chiou & Youngs (2014)',
+ ],
+}
+
+IM_GMPE = {'LOCAL': LOCAL_IM_GMPE, 'OPENSHA': OPENSHA_IM_GMPE}
+
+import collections # noqa: E402
+import json # noqa: E402
+import os # noqa: E402
+import socket # noqa: E402
+import sys # noqa: E402
+import time # noqa: E402
+from pathlib import Path # noqa: E402
+
+import pandas as pd # noqa: E402
+from gmpe import SignificantDurationModel, openSHAGMPE # noqa: E402
+from tqdm import tqdm # noqa: E402
+
if 'stampede2' not in socket.gethostname():
- from FetchOpenSHA import *
- from FetchOpenQuake import get_site_rup_info_oq
-import threading
-import ujson
-
-class IM_Calculator:
- # Chiou & Youngs (2014) GMPE class
- CY = None
- # Abrahamson, Silvar, & Kamai (2014)
- ASK = None
- # Boore, Stewart, Seyhan & Atkinson (2014)
- BSSA = None
- # Campbell & Bozorgnia (2014)
- CB = None
-
- # profile
- timeGetRuptureInfo = 0
- timeGetIM = 0
- def __init__(self, source_info=dict(), im_dict=dict(), gmpe_dict=dict(),
- gmpe_weights_dict=dict(), im_type=None, site_info=dict()):
-
- # basic set-ups
- self.set_im_gmpe(im_dict, gmpe_dict, gmpe_weights_dict)
- self.set_im_type(im_type)
- self.set_sites(site_info)
- # self.set_source(source_info)
-
-
- def set_source(self, source_info):
- # set seismic source
- self.source_info = source_info.copy()
- gmpe_list = set()
- for _, item in self.gmpe_dict.items():
- gmpe_list = gmpe_list.union(set(item))
- if source_info['Type']=='ERF':
- if 'Chiou & Youngs (2014)' in gmpe_list or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list or\
- 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list or\
- 'Campbell & Bozorgnia (2014)' in gmpe_list:
- source_index = source_info.get('SourceIndex', None)
- rupture_index = source_info.get('RuptureIndex', None)
- # start = time.process_time_ns()
- site_rup_dict, station_info = get_rupture_info_CY2014(self.erf, source_index, rupture_index, self.site_info)
- # self.timeGetRuptureInfo += time.process_time_ns() - start
- elif source_info['Type']=='PointSource':
- if 'Chiou & Youngs (2014)' in gmpe_list or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list or\
- 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list or\
- 'Campbell & Bozorgnia (2014)' in gmpe_list:
- # start = time.process_time_ns()
- site_rup_dict, station_info = get_PointSource_info_CY2014(source_info, self.site_info)
- # self.timeGetRuptureInfo += time.process_time_ns() - start
- elif source_info['Type'] == 'oqSourceXML':
- if 'Chiou & Youngs (2014)' in gmpe_list or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list or\
- 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list or\
- 'Campbell & Bozorgnia (2014)' in gmpe_list:
- # start = time.process_time_ns()
- site_rup_dict, station_info = get_site_rup_info_oq(source_info, self.site_info)
- # self.timeGetRuptureInfo += time.process_time_ns() - start
- self.site_rup_dict = site_rup_dict
- self.site_info = station_info
-
- def set_im_gmpe(self, im_dict, gmpe_dict, gmpe_weights_dict):
- # set im and gmpe information
- self.im_dict = im_dict.copy()
- self.gmpe_dict = gmpe_dict.copy()
- self.gmpe_weights_dict = gmpe_weights_dict.copy()
-
- def set_im_type(self, im_type):
- # set im type
- if im_type is None:
- self.im_type = None
- elif list(self.im_dict.keys()) and (im_type not in list(self.im_dict.keys())):
- print('IM_Calculator.set_im_type: warning - {} is not in the defined IM lists.'.format(im_type))
- self.im_type = None
- else:
- self.im_type = im_type
-
- def set_sites(self, site_info):
- # set sites
- self.site_info = site_info
-
- def calculate_im(self):
- # set up intensity measure calculations
- # current im type
- im_type = self.im_type
- if im_type is None:
- print('IM_Calculator.calculate_im: error - no IM type found.')
- return
- # get current im dict
- cur_im_dict = self.im_dict.get(im_type)
- # get gmpe list
- gmpe_list = self.gmpe_dict.get(im_type, None)
- if gmpe_list is None:
- print('IM_Calculator.calculate_im: error - no GMPE list found for {}.'.format(im_type))
- return
- # get gmpe weights
- gmpe_weights_list = self.gmpe_weights_dict.get(im_type, None)
- # parse the gmpe list (split the list to two - local and opensha)
- gmpe_list_local = []
- gmpe_weigts_list_local = []
- gmpe_list_opensha = []
- gmpe_weigts_list_opensha = []
- for i, cur_gmpe in enumerate(gmpe_list):
- if cur_gmpe in LOCAL_IM_GMPE.get(im_type, []):
- gmpe_list_local.append(cur_gmpe)
- if gmpe_weights_list is not None:
- gmpe_weigts_list_local.append(gmpe_weights_list[i])
- else:
- gmpe_weights_list_local = None
- elif cur_gmpe in OPENSHA_IM_GMPE.get(im_type, []):
- gmpe_list_opensha.append(cur_gmpe)
- if gmpe_weights_list is not None:
- gmpe_weigts_list_opensha.append(gmpe_weights_list[i])
- else:
- gmpe_weights_list_opensha = None
- else:
- print('IM_Calculator.calculate_im: error - {} is not supported.'.format(cur_gmpe))
- return
- # now compute im values
- if len(gmpe_list_local) > 0:
- res_local = self.get_im_from_local(self.source_info, gmpe_list_local, im_type, cur_im_dict, gmpe_weights=gmpe_weights_list_local)
- else:
- res_local = dict()
- if len(gmpe_list_opensha) > 0:
- res_opensha = self.get_im_from_opensha(self.source_info, gmpe_list_opensha, self.gmpe_dict.get('Parameters'), self.erf,
- self.site_info, im_type, cur_im_dict, gmpe_weights=gmpe_weights_list_opensha)
- else:
- res_opensha = dict()
-
- # collect/combine im results
- if len(res_local)+len(res_opensha) == 0:
- print('IM_Calculator.calculate_im: error - no results available... please check GMPE availability')
- return dict()
- if len(res_local) == 0:
- res = res_opensha
- elif len(res_opensha) == 0:
- res = res_local
- else:
- res = compute_weighted_res([res_local, res_opensha],
- [np.sum(gmpe_weights_list_local, np.sum(gmpe_weights_list_opensha))])
-
- # return
- return res
- def get_im_from_opensha(self, source_info, gmpe_list, gmpe_para, erf, station_info, im_type, im_info, gmpe_weights=None):
-
- # Computing IM
- res_list = []
- res = dict()
- curgmpe_info = {}
- station_list = station_info.get('SiteList')
- im_info.update({"Type": im_type})
- for cur_gmpe in gmpe_list:
- # set up site properties
- siteSpec, sites, site_prop = get_site_prop(cur_gmpe, station_list)
- curgmpe_info['Type'] = cur_gmpe
- curgmpe_info['Parameters'] = gmpe_para
- cur_res, station_info = get_IM(curgmpe_info, erf, sites, siteSpec, site_prop, source_info, station_info, im_info)
- cur_res.update({'IM': im_type})
- res_list.append(cur_res)
- # weighting if any
- if gmpe_weights is not None:
- res = compute_weighted_res(res_list, gmpe_weights)
- else:
- res = res_list[0]
- # return
- return res
-
- def get_im_from_local(self, source_info, gmpe_list, im_type, im_info, gmpe_weights=None):
- # initiate
- res_list = []
- res = dict()
- # check IM type
- if im_type not in list(LOCAL_IM_GMPE.keys()):
- print('ComputeIntensityMeasure.get_im_from_local: error - IM type {} not supported'.format(im_type))
- return res
- # get availabel gmpe list
- avail_gmpe = LOCAL_IM_GMPE.get(im_type)
- # back compatibility for now (useful if other local GMPEs for SA is included)
- cur_T = im_info.get('Periods', None)
- # source and rupture
- if source_info['Type'] == 'PointSource':
- # magnitude
- eq_magnitude = source_info['Magnitude']
- eq_loc = [source_info['Location']['Latitude'],
- source_info['Location']['Longitude'],
- source_info['Location']['Depth']]
- # maf
- meanAnnualRate = None
- elif source_info['Type'] == 'ERF':
- source_index = source_info.get('SourceIndex', None)
- rupture_index = source_info.get('RuptureIndex', None)
- if None in [source_index, rupture_index]:
- print('ComputeIntensityMeasure.get_im_from_local: error - source/rupture index not given.')
- return res
- # magnitude
- # eq_magnitude = erf.getSource(source_index).getRupture(rupture_index).getMag()
- eq_magnitude = source_info["Magnitude"]
- # maf
- # timeSpan = erf.getTimeSpan()
- # meanAnnualRate = erf.getSource(source_index).getRupture(rupture_index).getMeanAnnualRate(timeSpan.getDuration())
- meanAnnualRate = source_info["MeanAnnualRate"]
- elif source_info['Type'] == 'oqSourceXML':
- source_index = source_info.get('SourceIndex', None)
- rupture_index = source_info.get('RuptureIndex', None)
- if None in [source_index, rupture_index]:
- print('ComputeIntensityMeasure.get_im_from_local: error - source/rupture index not given.')
- return res
- # magnitude
- eq_magnitude = source_info["Magnitude"]
- # maf
- meanAnnualRate = source_info["MeanAnnualRate"]
- else:
- print('ComputeIntensityMeasure.get_im_from_local: error - source type {} not supported'.format(source_info['Type']))
- return res
- for cur_gmpe in gmpe_list:
- gm_collector = []
- if cur_gmpe not in avail_gmpe:
- print('ComputeIntensityMeasure.get_im_from_local: warning - {} is not available.'.format(cur_gmpe))
- continue
- for cur_site in self.site_info:
- # current site-rupture distance
- cur_dist = cur_site["rRup"]
- cur_vs30 = cur_site['vs30']
- tmpResult = {'Mean': [],
- 'TotalStdDev': [],
- 'InterEvStdDev': [],
- 'IntraEvStdDev': []}
- if cur_gmpe == 'Bommer, Stafford & Alarcon (2009)':
- mean, stdDev, interEvStdDev, intraEvStdDev = SignificantDurationModel.bommer_stafford_alarcon_ds_2009(magnitude=eq_magnitude,
- distance=cur_dist, vs30=cur_vs30,duration_type=im_type)
- tmpResult['Mean'].append(float(mean))
- tmpResult['TotalStdDev'].append(float(stdDev))
- tmpResult['InterEvStdDev'].append(float(interEvStdDev))
- tmpResult['IntraEvStdDev'].append(float(intraEvStdDev))
- elif cur_gmpe == 'Afshari & Stewart (2016)':
- mean, stdDev, interEvStdDev, intraEvStdDev = SignificantDurationModel.afshari_stewart_ds_2016(magnitude=eq_magnitude,
- distance=cur_dist, vs30=cur_vs30, duration_type=im_type)
- tmpResult['Mean'].append(float(mean))
- tmpResult['TotalStdDev'].append(float(stdDev))
- tmpResult['InterEvStdDev'].append(float(interEvStdDev))
- tmpResult['IntraEvStdDev'].append(float(intraEvStdDev))
- elif cur_gmpe == 'Chiou & Youngs (2014)':
- # start = time.process_time_ns()
- tmpResult = self.CY.get_IM(eq_magnitude, self.site_rup_dict, cur_site, im_info)
- # self.timeGetIM += time.process_time_ns() - start
- elif cur_gmpe == 'Abrahamson, Silva & Kamai (2014)':
- # start = time.process_time_ns()
- tmpResult = self.ASK.get_IM(eq_magnitude, self.site_rup_dict, cur_site, im_info)
- # self.timeGetIM += time.process_time_ns() - start
- elif cur_gmpe == 'Boore, Stewart, Seyhan & Atkinson (2014)':
- # start = time.process_time_ns()
- tmpResult = self.BSSA.get_IM(eq_magnitude, self.site_rup_dict, cur_site, im_info)
- # self.timeGetIM += time.process_time_ns() - start
- elif cur_gmpe == 'Campbell & Bozorgnia (2014)':
- # start = time.process_time_ns()
- tmpResult = self.CB.get_IM(eq_magnitude, self.site_rup_dict, cur_site, im_info)
- # self.timeGetIM += time.process_time_ns() - start
- else:
- print('ComputeIntensityMeasure.get_im_from_local: gmpe_name {} is not supported.'.format(cur_gmpe))
- # collect sites
- # gm_collector.append({
- # "Location": {'Latitude':cur_site['lat'], 'Longitude':cur_site['lon']},
- # "SiteData": {key: cur_site[key] for key in cur_site if key not in ['lat','lon']},
- # 'ln'+im_type: tmpResult
- # })
- gm_collector.append({
- 'ln'+im_type: tmpResult
- })
-
- # Final results
- cur_res = {'Magnitude': eq_magnitude,
- 'MeanAnnualRate': meanAnnualRate,
- 'SiteSourceDistance': source_info.get('SiteSourceDistance',None),
- 'SiteRuptureDistance': source_info.get('SiteRuptureDistance',None),
- 'Periods': cur_T,
- 'IM': im_type,
- 'GroundMotions': gm_collector}
- # collect gmpes
- res_list.append(cur_res)
-
- # weighting if any
- if gmpe_weights is not None:
- res = compute_weighted_res(res_list, gmpe_weights)
- else:
- res = res_list[0]
- # return
- return res
-
-def collect_multi_im_res(res_dict):
- res_list = []
- if 'PGA' in res_dict.keys():
- res_list.append(res_dict['PGA'])
- if 'SA' in res_dict.keys():
- res_list.append(res_dict['SA'])
- if 'PGV' in res_dict.keys():
- res_list.append(res_dict['PGV'])
- res = dict()
- num_res = len(res_list)
- if num_res == 0:
- print('IM_Calculator._collect_res: error - the res_list is empty')
- return res
- for i, cur_res in enumerate(res_list):
- if i == 0:
- res = cur_res
- res['IM'] = [cur_res['IM']]
- if cur_res.get('Periods', None) is None:
- res['Periods'] = [None]
- elif type(cur_res.get('Periods')) in [float, int]:
- res['Periods'] = [cur_res.get('Periods')]
- else:
- res['Periods'] = cur_res.get('Periods')
- else:
- res['IM'].append(cur_res['IM'])
- if cur_res.get('Periods', None) is None:
- res['Periods'] = res['Periods']+[None]
- elif type(cur_res.get('Periods')) in [float, int]:
- res['Periods'] = res['Periods']+[cur_res.get('Periods')]
- else:
- res['Periods'] = res['Periods']+cur_res.get('Periods')
- # combine ground motion characteristics
- for j in range(len(cur_res['GroundMotions'])):
- tmp_res = cur_res['GroundMotions'][j].get('ln{}'.format(cur_res['IM']))
- res['GroundMotions'][j].update({'ln{}'.format(cur_res['IM']): tmp_res})
-
- # return
- return res
-
-def collect_multi_im_res_hdf5(res_list, im_list):
- res = dict()
- num_res = len(res_list)
- if num_res == 0:
- print('IM_Calculator._collect_res: error - the res_list is empty')
- return res
- num_sites = len(res_list[list(res_list.keys())[0]]['GroundMotions'])
- collected_mean = np.zeros([num_sites, len(im_list)])
- collected_intraStd = np.zeros([num_sites, len(im_list)])
- collected_interStd = np.zeros([num_sites, len(im_list)])
- for i, im in enumerate(im_list):
- if im.startswith('PGA'):
- collected_mean[:,i] = np.array([x['lnPGA']['Mean'][0] for x in res_list['PGA']['GroundMotions']])
- collected_interStd[:,i] = np.array([x['lnPGA']['InterEvStdDev'][0] for x in res_list['PGA']['GroundMotions']])
- collected_intraStd[:,i] = np.array([x['lnPGA']['IntraEvStdDev'][0] for x in res_list['PGA']['GroundMotions']])
- if im.startswith('SA'):
- period = float(re.search(r'\((.*?)\)', im).group(1))
- period_i = res_list['SA']['Periods'].index(period)
- collected_mean[:,i] = np.array([x['lnSA']['Mean'][period_i] for x in res_list['SA']['GroundMotions']])
- collected_interStd[:,i] = np.array([x['lnSA']['InterEvStdDev'][period_i] for x in res_list['SA']['GroundMotions']])
- collected_intraStd[:,i] = np.array([x['lnSA']['IntraEvStdDev'][period_i] for x in res_list['SA']['GroundMotions']])
- if im.startswith('PGV'):
- collected_mean[:,i] = np.array([x['lnPGV']['Mean'][0] for x in res_list['PGV']['GroundMotions']])
- collected_interStd[:,i] = np.array([x['lnPGV']['InterEvStdDev'][0] for x in res_list['PGV']['GroundMotions']])
- collected_intraStd[:,i] = np.array([x['lnPGV']['IntraEvStdDev'][0] for x in res_list['PGV']['GroundMotions']])
- res.update({'Mean':collected_mean})
- res.update({'InterEvStdDev':collected_interStd})
- res.update({'IntraEvStdDev':collected_intraStd})
- # return
- return res
-
-
-def get_im_dict(im_info):
- if im_info.get("Type", None) == "Vector":
- im_dict = im_info.copy()
- im_dict.pop('Type')
- if ("PGV" in im_dict.keys()):
- PGV_dict = im_dict.pop('PGV')
- im_dict.update({'PGV':PGV_dict})
- else:
- # back compatibility
- im_dict = {im_info.get("Type"): im_info.copy()}
-
- # return
- return im_dict
-
-
-def get_gmpe_from_im_vector(im_info, gmpe_info):
-
- gmpe_dict = dict()
- gmpe_weights_dict = dict()
- # check IM info type
- if not (im_info.get("Type", None) == "Vector"):
- print('ComputeIntensityMeasure.get_gmpe_from_im_vector: error: IntensityMeasure Type should be Vector.')
- return gmpe_dict, gmpe_weights_dict
- else:
- im_keys = list(im_info.keys())
- im_keys.remove('Type')
- for cur_im in im_keys:
- cur_gmpe = im_info[cur_im].get("GMPE", None)
- cur_weights = im_info[cur_im].get("GMPEWeights", None)
- if cur_gmpe is None:
- print('ComputeIntensityMeasure.get_gmpe_from_im_vector: warning: GMPE not found for {}'.format(cur_im))
- else:
- # back compatibility
- if type(cur_gmpe) == str:
- if cur_gmpe == 'NGAWest2 2014 Averaged':
- cur_gmpe = ["Abrahamson, Silva & Kamai (2014)", "Boore, Stewart, Seyhan & Atkinson (2014)",
- "Campbell & Bozorgnia (2014)", "Chiou & Youngs (2014)"]
- cur_weights = [0.25, 0.25, 0.25, 0.25]
- else:
- cur_gmpe = [cur_gmpe]
- cur_weights = None
- gmpe_dict.update({cur_im: cur_gmpe})
- gmpe_weights_dict.update({cur_im: cur_weights})
- # global parameters if any
- gmpe_dict.update({'Parameters': gmpe_info.get('Parameters',dict())})
- # return
- return gmpe_dict, gmpe_weights_dict
-
-
-def get_gmpe_from_im_legency(im_info, gmpe_info, gmpe_weights=None):
-
- # back compatibility for getting ims and gmpes
- gmpe_dict = dict()
- gmpe_weights_dict = dict()
- if gmpe_info['Type'] == 'NGAWest2 2014 Averaged':
- gmpe_list = ["Abrahamson, Silva & Kamai (2014)", "Boore, Stewart, Seyhan & Atkinson (2014)",
- "Campbell & Bozorgnia (2014)", "Chiou & Youngs (2014)"]
- if gmpe_weights is None:
- gmpe_weights = [0.25, 0.25, 0.25, 0.25]
- im_type = im_info.get('Type')
- gmpe_dict = {im_type: gmpe_list}
- else:
- gmpe_list = [gmpe_info['Type']]
- gmpe_weights = None
- im_type = im_info.get('Type')
- # for im_type in im_types:
- gmpe_dict.update({im_type: gmpe_list})
- gmpe_weights_dict = {im_type: gmpe_weights}
- # global parameters if any
- gmpe_dict.update({'Parameters': gmpe_info.get('Parameters',dict())})
- # return
- return gmpe_dict, gmpe_weights_dict
-
-
-def compute_im(scenarios, stations, EqRupture_info, gmpe_info, im_info, generator_info, output_dir, filename='IntensityMeasureMeanStd.hdf5', mth_flag=True):
-
- # Calling OpenSHA to compute median PSA
- if len(scenarios) < 10:
- filename = 'IntensityMeasureMeanStd.json'
- saveInJson = True
- im_raw = {}
- else:
- saveInJson = False
- filename = os.path.join(output_dir, filename)
- im_list = []
- if 'PGA' in im_info.keys():
- im_list.append('PGA')
- if 'SA' in im_info.keys():
- for cur_period in im_info['SA']['Periods']:
- im_list.append('SA({})'.format(str(cur_period)))
- if 'PGV' in im_info.keys():
- im_list.append('PGV')
- # Stations
- station_list = [{
- 'Location': {
- 'Latitude': stations[j]['lat'],
- 'Longitude': stations[j]['lon']
- }
- } for j in range(len(stations))]
- for j in range(len(stations)):
- if stations[j].get('vs30'):
- station_list[j].update({'Vs30': int(stations[j]['vs30'])})
- station_info = {'Type': 'SiteList',
- 'SiteList': station_list}
- # hazard occurrent model
- if generator_info['method']=='Subsampling':
- # check if the period in the hazard curve is in the period list in the intensity measure
- if generator_info['Parameters'].get('IntensityMeasure')=='SA':
- ho_period = generator_info['Parameters'].get('Period')
- if im_info['Type'] == 'Vector':
- if im_info.get('SA') is None:
- sys.exit('SA is used in hazard downsampling but not defined in the intensity measure tab')
- else:
- if ho_period in im_info['SA'].get('Periods'):
- pass
- else:
- tmp_periods = im_info['SA']['Periods']+[ho_period]
- tmp_periods.sort()
- im_info['SA']['Periods'] = tmp_periods
- else:
- if ho_period in im_info['SA'].get('Periods'):
- pass
- else:
- tmp_periods = im_info['SA']['Periods']+[ho_period]
- tmp_periods.sort()
- im_info['SA']['Periods'] = tmp_periods
- # prepare gmpe list for intensity measure
- if gmpe_info['Type'] in ['Vector']:
- gmpe_dict, gmpe_weights_dict = get_gmpe_from_im_vector(im_info, gmpe_info)
- else:
- gmpe_dict, gmpe_weights_dict = get_gmpe_from_im_legency(im_info, gmpe_info)
- # prepare intensity measure dict
- im_dict = get_im_dict(im_info)
-
- t_start = time.time()
- # Loop over scenarios
- if mth_flag is False:
- # create a IM calculator
- im_calculator = IM_Calculator(im_dict=im_dict, gmpe_dict=gmpe_dict,
- gmpe_weights_dict=gmpe_weights_dict, site_info=stations)
- if EqRupture_info['EqRupture']['Type'] in ['ERF']:
- im_calculator.erf = getERF(EqRupture_info)
- else:
- im_calculator.erf = None
- gmpe_set = set()
- for _, item in gmpe_dict.items():
- gmpe_set = gmpe_set.union(set(item))
- for gmpe in gmpe_set:
- if gmpe == "Chiou & Youngs (2014)":
- im_calculator.CY = openSHAGMPE.chiou_youngs_2013()
- if gmpe == 'Abrahamson, Silva & Kamai (2014)':
- im_calculator.ASK = openSHAGMPE.abrahamson_silva_kamai_2014()
- if gmpe == 'Boore, Stewart, Seyhan & Atkinson (2014)':
- im_calculator.BSSA = openSHAGMPE.boore_etal_2014()
- if gmpe == 'Campbell & Bozorgnia (2014)':
- im_calculator.CB = openSHAGMPE.campbell_bozorgnia_2014()
- # for i in tqdm(range(len(scenarios.keys())), desc=f"Evaluate GMPEs for {len(scenarios.keys())} scenarios"):
- # Initialize an hdf5 file for IMmeanStd
- if os.path.exists(filename):
- os.remove(filename)
- for i in tqdm(range(len(scenarios.keys())), desc=f"Evaluate GMPEs for {len(scenarios.keys())} scenarios"):
- # for i, key in enumerate(scenarios.keys()):
- # print('ComputeIntensityMeasure: Scenario #{}/{}'.format(i+1,len(scenarios)))
- # Rupture
- key = int(list(scenarios.keys())[i])
- source_info = scenarios[key]
- im_calculator.set_source(source_info)
- # Computing IM
- res_list = dict()
- for cur_im_type in list(im_dict.keys()):
- im_calculator.set_im_type(cur_im_type)
- res_list.update({cur_im_type:im_calculator.calculate_im()})
- # Collecting outputs
- # collectedResult.update({'SourceIndex':source_info['SourceIndex'], 'RuptureIndex':source_info['RuptureIndex']})
- if saveInJson:
- collectedResult = collect_multi_im_res(res_list)
- im_raw.update({key:collectedResult})
- else:
- collectedResult = collect_multi_im_res_hdf5(res_list, im_list)
- with h5py.File(filename, 'a') as f:
- # Add a group named by the scenario index and has four dataset
- # mean, totalSTd, interStd,itrastd
- grp = f.create_group(str(i))
- grp.create_dataset("Mean", data=collectedResult['Mean'])
- grp.create_dataset("InterEvStdDev", data=collectedResult['InterEvStdDev'])
- grp.create_dataset("IntraEvStdDev", data=collectedResult['IntraEvStdDev'])
- # if (i % 250 == 0):
- # if saveInJson:
- # print(f"Size of im_raw for {i} scenario is {sys.getsizeof(im_raw)}")
- # else:
- # print(f"Another 250 scenarios computed")
-
- if mth_flag:
- res_dict = {}
- sub_ths = []
- num_bins = 200
- bin_size = int(np.ceil(len(scenarios)/num_bins))
- ids_list = []
- scen_list = []
- for k in range(0, len(scenarios), bin_size):
- ids_list.append(list(scenarios.keys())[k:k+bin_size])
- scen_list.append([scenarios[x] for x in list(scenarios.keys())[k:k+bin_size]])
- #print(ids_list)
- for i in range(len(ids_list)):
- th = threading.Thread(target=compute_im_para, args=(ids_list[i], scen_list[i], im_dict, gmpe_dict, gmpe_weights_dict, station_info, res_dict))
- sub_ths.append(th)
- th.start()
-
- for th in sub_ths:
- th.join()
-
- # order the res_dict by id
- res_ordered = collections.OrderedDict(sorted(res_dict.items()))
- for i, cur_res in res_ordered.items():
- im_raw.append(cur_res)
-
- print('ComputeIntensityMeasure: mean and standard deviation of intensity measures {0} sec'.format(time.time() - t_start))
-
- if saveInJson:
- with open(filename, "w") as f:
- ujson.dump(im_raw, f, indent=1)
- # return
- return filename, im_list
-
-
-def compute_im_para(ids, scenario_infos, im_dict, gmpe_dict, gmpe_weights_dict, station_info, res_dict):
-
- for i, id in enumerate(ids):
- print('ComputeIntensityMeasure: Scenario #{}.'.format(id+1))
- scenario_info = scenario_infos[i]
- # create a IM calculator
- im_calculator = IM_Calculator(im_dict=im_dict, gmpe_dict=gmpe_dict,
- gmpe_weights_dict=gmpe_weights_dict, site_info=station_info)
- # set scenario information
- im_calculator.set_source(scenario_info)
- # computing IM
- res_list = []
- for cur_im_type in list(im_dict.keys()):
- im_calculator.set_im_type(cur_im_type)
- res_list.append(im_calculator.calculate_im())
- # clean
- del im_calculator
- # collect multiple ims
- res = collect_multi_im_res(res_list)
- # append res to res_dcit
- res_dict[id] = res
- # return
- return
-
-
-def export_im(stations, im_list, im_data, eq_data, output_dir, filename, csv_flag,\
- gf_im_list, scenario_ids):
- # Rename SA(xxx) to SA_xxx
- for i, im in enumerate(im_list):
- if im.startswith('SA'):
- im_list[i] = im_list[i].split('(')[0] + '_' + im_list[i].split('(')[1][:-1]
- #try:
- # Station number
- num_stations = len(stations)
- # Scenario number
- num_scenarios = len(eq_data)
- eq_data = np.array(eq_data)
- # Saving large files to HDF while small files to JSON
- if num_scenarios > 100000:
- # Pandas DataFrame
- h_scenarios = ['Scenario-'+str(x) for x in range(1, num_scenarios + 1)]
- h_eq = ['Latitude', 'Longitude', 'Vs30', 'Magnitude', 'MeanAnnualRate','SiteSourceDistance','SiteRuptureDistance']
- for x in range(1, im_data[0][0, :, :].shape[1]+1):
- for y in im_list:
- h_eq.append('Record-'+str(x)+'-{}'.format(y))
- index = pd.MultiIndex.from_product([h_scenarios, h_eq])
- columns = ['Site-'+str(x) for x in range(1, num_stations + 1)]
- df = pd.DataFrame(index=index, columns=columns, dtype=float)
- # Data
- for i in range(num_stations):
- tmp = []
- for j in range(num_scenarios):
- tmp.append(stations[i]['lat'])
- tmp.append(stations[i]['lon'])
- tmp.append(int(stations[i]['vs30']))
- tmp.append(eq_data[j][0])
- tmp.append(eq_data[j][1])
- tmp.append(eq_data[j][2])
- tmp.append(eq_data[j][3])
- for x in np.ndarray.tolist(im_data[j][i, :, :].T):
- for y in x:
- tmp.append(y)
- df['Site-'+str(i+1)] = tmp
- # HDF output
- try:
- os.remove(os.path.join(output_dir, filename.replace('.json', '.h5')))
- except:
- pass
- hdf = pd.HDFStore(os.path.join(output_dir, filename.replace('.json', '.h5')))
- hdf.put('SiteIM', df, format='table', complib='zlib')
- hdf.close()
- else:
- res = []
- for i in range(num_stations):
- tmp = {'Location': {
- 'Latitude': stations[i]['lat'],
- 'Longitude': stations[i]['lon']
- },
- 'Vs30': int(stations[i]['vs30'])
- }
- tmp.update({'IMS': im_list})
- tmp_im = []
- for j in range(num_scenarios):
- tmp_im.append(np.ndarray.tolist(im_data[j][i, :, :]))
- if len(tmp_im) == 1:
- # Simplifying the data structure if only one scenario exists
- tmp_im = tmp_im[0]
- tmp.update({'lnIM': tmp_im})
- res.append(tmp)
- maf_out = []
- for ind, cur_eq in enumerate(eq_data):
- if cur_eq[1]:
- mar = cur_eq[1]
- else:
- mar = 'N/A'
- if cur_eq[2]:
- ssd = cur_eq[2]
- else:
- ssd = 'N/A'
- if len(cur_eq)>3 and cur_eq[3]:
- srd = cur_eq[3]
- else:
- srd = 'N/A'
- tmp = {'Magnitude': float(cur_eq[0]),
- 'MeanAnnualRate': mar,
- 'SiteSourceDistance': ssd,
- 'SiteRuputureDistance': srd,
- 'ScenarioIndex': int(scenario_ids[ind])}
- maf_out.append(tmp)
- res = {'Station_lnIM': res,
- 'Earthquake_MAF': maf_out}
- # save SiteIM.json
- with open(os.path.join(output_dir, filename), "w") as f:
- json.dump(res, f, indent=2)
- # export the event grid and station csv files
- if csv_flag:
- # output EventGrid.csv
- station_name = ['site'+str(stations[j]['ID'])+'.csv' for\
- j in range(len(stations))]
- lat = [stations[j]['lat'] for j in range(len(stations))]
- lon = [stations[j]['lon'] for j in range(len(stations))]
- # vs30 = [stations[j]['vs30'] for j in range(len(stations))]
- # zTR = [stations[j]['DepthToRock'] for j in range(len(stations))]
- df = pd.DataFrame({
- 'GP_file': station_name,
- 'Longitude': lon,
- 'Latitude': lat,
- # 'Vs30': vs30,
- # 'DepthToRock': zTR
- })
- # if cur_eq[2]:
- # df['SiteSourceDistance'] = cur_eq[2]
- output_dir = os.path.join(os.path.dirname(Path(output_dir)),
- os.path.basename(Path(output_dir)))
- # seperate directory for IM
- output_dir = os.path.join(output_dir, 'IMs')
- try:
- os.makedirs(output_dir)
- except:
- print('HazardSimulation: output folder already exists.')
- # save the csv
- df.to_csv(os.path.join(output_dir, 'EventGrid.csv'), index = False)
- # output station#.csv
- # csv header
- csvHeader = im_list
- for cur_scen in range(len(im_data)):
- if len(im_data) > 1:
- # IMPORTANT: the scenario index starts with 1 in the front end.
- cur_scen_folder = 'scenario'+str(int(scenario_ids[cur_scen])+1)
- try:
- os.mkdir(os.path.join(output_dir, cur_scen_folder))
- except:
- pass
- # print('ComputeIntensityMeasure: scenario folder already exists.')
- cur_output_dir = os.path.join(output_dir, cur_scen_folder)
- else:
- cur_output_dir = output_dir
- # current IM data
- cur_im_data = im_data[cur_scen]
- for i, site_id in enumerate(station_name):
- df = dict()
- # Loop over all intensity measures
- for cur_im_tag in range(len(csvHeader)):
- if (csvHeader[cur_im_tag].startswith('SA')) or \
- (csvHeader[cur_im_tag] in ['PGA', 'PGV']):
- df.update({
- csvHeader[cur_im_tag]: np.exp(cur_im_data[i, cur_im_tag, :])
- })
- else:
- df.update({
- csvHeader[cur_im_tag]: cur_im_data[i, cur_im_tag, :]
- })
- df = pd.DataFrame(df)
- # Combine PGD from liquefaction, landslide and fault
- if 'liq_PGD_h' in df.columns or 'lsd_PGD_h'in df.columns or 'fd_PGD_h' in df.columns:
- PGD_h = np.zeros(df.shape[0])
- if 'liq_PGD_h' in df.columns:
- PGD_h += df['liq_PGD_h'].to_numpy()
- if 'lsd_PGD_h' in df.columns:
- PGD_h += df['lsd_PGD_h'].to_numpy()
- if 'fd_PGD_h' in df.columns:
- PGD_h += df['fd_PGD_h'].to_numpy()
- df['PGD_h'] = PGD_h
- if 'liq_PGD_v' in df.columns or 'lsd_PGD_v'in df.columns or 'fd_PGD_v' in df.columns:
- PGD_v = np.zeros(df.shape[0])
- if 'liq_PGD_v' in df.columns:
- PGD_v += df['liq_PGD_v'].to_numpy()
- if 'lsd_PGD_v' in df.columns:
- PGD_v += df['lsd_PGD_v'].to_numpy()
- if 'fd_PGD_v' in df.columns:
- PGD_v += df['fd_PGD_v'].to_numpy()
- df['PGD_v'] = PGD_v
- colToDrop = []
- for col in df.columns:
- if (not col.startswith('SA')) and (col not in ['PGA', 'PGV',\
- 'PGD_h', 'PGD_v']) and (col not in gf_im_list):
- colToDrop.append(col)
- df.drop(columns=colToDrop, inplace=True)
- # if 'liq_prob' in df.columns:
- # df.drop(columns=['liq_prob'], inplace=True)
- # if 'liq_susc' in df.columns:
- # df.drop(columns=['liq_susc'], inplace=True)
- df.fillna('NaN', inplace=True)
- df.to_csv(os.path.join(cur_output_dir, site_id), index = False)
-
-
- # output the site#.csv file including all scenarios
- if len(im_data) > 1:
- print('ComputeIntensityMeasure: saving all selected scenarios.')
- # lopp over sites
- for i, site_id in enumerate(station_name):
- df = dict()
- for cur_im_tag in range(len(csvHeader)):
- tmp_list = []
- # loop over all scenarios
- for cur_scen in range(len(im_data)):
- tmp_list = tmp_list + im_data[cur_scen][i, cur_im_tag, :].tolist()
- if (csvHeader[cur_im_tag].startswith('SA')) or \
- (csvHeader[cur_im_tag] in ['PGA', 'PGV']):
- df.update({
- csvHeader[cur_im_tag]: np.exp(tmp_list)
- })
- else:
- df.update({
- csvHeader[cur_im_tag]: tmp_list
- })
- df = pd.DataFrame(df)
- # Combine PGD from liquefaction, landslide and fault
- if 'liq_PGD_h' in df.columns or 'lsd_PGD_h'in df.columns or 'fd_PGD_h' in df.columns:
- PGD_h = np.zeros(df.shape[0])
- if 'liq_PGD_h' in df.columns:
- PGD_h += df['liq_PGD_h'].to_numpy()
- if 'lsd_PGD_h' in df.columns:
- PGD_h += df['lsd_PGD_h'].to_numpy()
- if 'fd_PGD_h' in df.columns:
- PGD_h += df['fd_PGD_h'].to_numpy()
- df['PGD_h'] = PGD_h
- if 'liq_PGD_v' in df.columns or 'lsd_PGD_v'in df.columns or 'fd_PGD_v' in df.columns:
- PGD_v = np.zeros(df.shape[0])
- if 'liq_PGD_v' in df.columns:
- PGD_v += df['liq_PGD_v'].to_numpy()
- if 'lsd_PGD_v' in df.columns:
- PGD_v += df['lsd_PGD_v'].to_numpy()
- if 'fd_PGD_v' in df.columns:
- PGD_v += df['fd_PGD_v'].to_numpy()
- df['PGD_v'] = PGD_v
- colToDrop = []
- for col in df.columns:
- if (not col.startswith('SA')) and (col not in ['PGA', 'PGV',\
- 'PGD_h', 'PGD_v']) and (col not in gf_im_list):
- colToDrop.append(col)
- df.drop(columns=colToDrop, inplace=True)
- df.fillna('NaN', inplace=True)
- df.to_csv(os.path.join(output_dir, site_id), index = False)
- # return
- return 0
- #except:
- # return
- #return 1
-
-
-def compute_weighted_res(res_list, gmpe_weights):
-
- # compute weighted average of gmpe results
- # initialize the return res (these three attributes are identical in different gmpe results)
- res = {'Magnitude': res_list[0]['Magnitude'],
- 'MeanAnnualRate': res_list[0]['MeanAnnualRate'],
- 'SiteSourceDistance': res_list[0].get('SiteSourceDistance',None),
- 'Periods': res_list[0]['Periods'],
- 'IM': res_list[0]['IM']}
- # number of gmpe
- num_gmpe = len(res_list)
- # check number of weights
- if not (num_gmpe == len(gmpe_weights)):
- print('ComputeIntensityMeasure: please check the weights of different GMPEs.')
- return 1
- # site number
- num_site = len(res_list[0]['GroundMotions'])
- # loop over different sites
- gm_collector = []
- for site_tag in range(num_site):
- # loop over different GMPE
- tmp_res = {}
- for i, cur_res in enumerate(res_list):
- cur_gmResults = cur_res['GroundMotions'][site_tag]
- # get keys
- im_keys = list(cur_gmResults.keys())
- for cur_im in im_keys:
- if not (cur_im in list(tmp_res.keys())):
- if cur_im in ['Location','SiteData']:
- tmp_res.update({cur_im: cur_gmResults[cur_im]})
- else:
- tmp_res.update({cur_im: {}})
- if not (cur_im in ['Location','SiteData']):
- # get components
- comp_keys = list(cur_gmResults[cur_im].keys())
- # loop over differen components
- for cur_comp in comp_keys:
- if not (cur_comp in list(tmp_res[cur_im].keys())):
- tmp_res[cur_im].update({cur_comp: []})
- for cur_value in cur_gmResults[cur_im][cur_comp]:
- if 'StdDev' in cur_comp:
- # standard deviation
- tmp_res[cur_im][cur_comp].append(np.sqrt(cur_value ** 2.0 * gmpe_weights[i]))
- else:
- # mean
- tmp_res[cur_im][cur_comp].append(cur_value * gmpe_weights[i])
- else:
- for j, cur_value in enumerate(cur_gmResults[cur_im][cur_comp]):
- if 'StdDev' in cur_comp:
- # standard deviation
- tmp_res[cur_im][cur_comp][j] = np.sqrt(tmp_res[cur_im][cur_comp][j] ** 2.0 + cur_value ** 2.0 * gmpe_weights[i])
- else:
- # mean
- tmp_res[cur_im][cur_comp][j] = tmp_res[cur_im][cur_comp][j] + cur_value * gmpe_weights[i]
- # collector
- gm_collector.append(tmp_res)
- # res
- res.update({'GroundMotions': gm_collector})
- # return
- return res
\ No newline at end of file
+ from FetchOpenQuake import get_site_rup_info_oq
+ from FetchOpenSHA import * # noqa: F403
+import threading # noqa: E402
+
+import ujson # noqa: E402
+
+
+class IM_Calculator: # noqa: D101
+ # Chiou & Youngs (2014) GMPE class
+ CY = None
+ # Abrahamson, Silvar, & Kamai (2014)
+ ASK = None
+ # Boore, Stewart, Seyhan & Atkinson (2014)
+ BSSA = None
+ # Campbell & Bozorgnia (2014)
+ CB = None
+
+ # profile
+ timeGetRuptureInfo = 0 # noqa: N815
+ timeGetIM = 0 # noqa: N815
+
+ def __init__(
+ self,
+ source_info=dict(), # noqa: B006, C408, ARG002
+ im_dict=dict(), # noqa: B006, C408
+ gmpe_dict=dict(), # noqa: B006, C408
+ gmpe_weights_dict=dict(), # noqa: B006, C408
+ im_type=None,
+ site_info=dict(), # noqa: B006, C408
+ ):
+ # basic set-ups
+ self.set_im_gmpe(im_dict, gmpe_dict, gmpe_weights_dict)
+ self.set_im_type(im_type)
+ self.set_sites(site_info)
+ # self.set_source(source_info)
+
+ def set_source(self, source_info): # noqa: D102
+ # set seismic source
+ self.source_info = source_info.copy()
+ gmpe_list = set()
+ for _, item in self.gmpe_dict.items(): # noqa: PERF102
+ gmpe_list = gmpe_list.union(set(item))
+ if source_info['Type'] == 'ERF':
+ if (
+ 'Chiou & Youngs (2014)' in gmpe_list
+ or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list
+ or 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list
+ or 'Campbell & Bozorgnia (2014)' in gmpe_list
+ ):
+ source_index = source_info.get('SourceIndex', None)
+ rupture_index = source_info.get('RuptureIndex', None)
+ # start = time.process_time_ns()
+ site_rup_dict, station_info = get_rupture_info_CY2014( # noqa: F405
+ self.erf, source_index, rupture_index, self.site_info
+ )
+ # self.timeGetRuptureInfo += time.process_time_ns() - start
+ elif source_info['Type'] == 'PointSource':
+ if (
+ 'Chiou & Youngs (2014)' in gmpe_list
+ or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list
+ or 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list
+ or 'Campbell & Bozorgnia (2014)' in gmpe_list
+ ):
+ # start = time.process_time_ns()
+ site_rup_dict, station_info = get_PointSource_info_CY2014( # noqa: F405
+ source_info, self.site_info
+ )
+ # self.timeGetRuptureInfo += time.process_time_ns() - start
+ elif source_info['Type'] == 'oqSourceXML':
+ if (
+ 'Chiou & Youngs (2014)' in gmpe_list
+ or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list
+ or 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list
+ or 'Campbell & Bozorgnia (2014)' in gmpe_list
+ ):
+ # start = time.process_time_ns()
+ site_rup_dict, station_info = get_site_rup_info_oq(
+ source_info, self.site_info
+ )
+ # self.timeGetRuptureInfo += time.process_time_ns() - start
+ self.site_rup_dict = site_rup_dict
+ self.site_info = station_info
+
+ def set_im_gmpe(self, im_dict, gmpe_dict, gmpe_weights_dict): # noqa: D102
+ # set im and gmpe information
+ self.im_dict = im_dict.copy()
+ self.gmpe_dict = gmpe_dict.copy()
+ self.gmpe_weights_dict = gmpe_weights_dict.copy()
+
+ def set_im_type(self, im_type): # noqa: D102
+ # set im type
+ if im_type is None:
+ self.im_type = None
+ elif list(self.im_dict.keys()) and (
+ im_type not in list(self.im_dict.keys())
+ ):
+ print( # noqa: T201
+ f'IM_Calculator.set_im_type: warning - {im_type} is not in the defined IM lists.'
+ )
+ self.im_type = None
+ else:
+ self.im_type = im_type
+
+ def set_sites(self, site_info): # noqa: D102
+ # set sites
+ self.site_info = site_info
+
+ def calculate_im(self): # noqa: C901, D102
+ # set up intensity measure calculations
+ # current im type
+ im_type = self.im_type
+ if im_type is None:
+ print('IM_Calculator.calculate_im: error - no IM type found.') # noqa: T201
+ return None
+ # get current im dict
+ cur_im_dict = self.im_dict.get(im_type)
+ # get gmpe list
+ gmpe_list = self.gmpe_dict.get(im_type, None)
+ if gmpe_list is None:
+ print( # noqa: T201
+ f'IM_Calculator.calculate_im: error - no GMPE list found for {im_type}.'
+ )
+ return None
+ # get gmpe weights
+ gmpe_weights_list = self.gmpe_weights_dict.get(im_type, None)
+ # parse the gmpe list (split the list to two - local and opensha)
+ gmpe_list_local = []
+ gmpe_weigts_list_local = []
+ gmpe_list_opensha = []
+ gmpe_weigts_list_opensha = []
+ for i, cur_gmpe in enumerate(gmpe_list):
+ if cur_gmpe in LOCAL_IM_GMPE.get(im_type, []):
+ gmpe_list_local.append(cur_gmpe)
+ if gmpe_weights_list is not None:
+ gmpe_weigts_list_local.append(gmpe_weights_list[i])
+ else:
+ gmpe_weights_list_local = None
+ elif cur_gmpe in OPENSHA_IM_GMPE.get(im_type, []):
+ gmpe_list_opensha.append(cur_gmpe)
+ if gmpe_weights_list is not None:
+ gmpe_weigts_list_opensha.append(gmpe_weights_list[i])
+ else:
+ gmpe_weights_list_opensha = None
+ else:
+ print( # noqa: T201
+ f'IM_Calculator.calculate_im: error - {cur_gmpe} is not supported.'
+ )
+ return None
+ # now compute im values
+ if len(gmpe_list_local) > 0:
+ res_local = self.get_im_from_local(
+ self.source_info,
+ gmpe_list_local,
+ im_type,
+ cur_im_dict,
+ gmpe_weights=gmpe_weights_list_local,
+ )
+ else:
+ res_local = dict() # noqa: C408
+ if len(gmpe_list_opensha) > 0:
+ res_opensha = self.get_im_from_opensha(
+ self.source_info,
+ gmpe_list_opensha,
+ self.gmpe_dict.get('Parameters'),
+ self.erf,
+ self.site_info,
+ im_type,
+ cur_im_dict,
+ gmpe_weights=gmpe_weights_list_opensha,
+ )
+ else:
+ res_opensha = dict() # noqa: C408
+
+ # collect/combine im results
+ if len(res_local) + len(res_opensha) == 0:
+ print( # noqa: T201
+ 'IM_Calculator.calculate_im: error - no results available... please check GMPE availability'
+ )
+ return dict() # noqa: C408
+ if len(res_local) == 0:
+ res = res_opensha
+ elif len(res_opensha) == 0:
+ res = res_local
+ else:
+ res = compute_weighted_res(
+ [res_local, res_opensha],
+ [np.sum(gmpe_weights_list_local, np.sum(gmpe_weights_list_opensha))],
+ )
+
+ # return
+ return res
+
+ def get_im_from_opensha( # noqa: D102, PLR6301
+ self,
+ source_info,
+ gmpe_list,
+ gmpe_para,
+ erf,
+ station_info,
+ im_type,
+ im_info,
+ gmpe_weights=None,
+ ):
+ # Computing IM
+ res_list = []
+ res = dict() # noqa: C408
+ curgmpe_info = {}
+ station_list = station_info.get('SiteList')
+ im_info.update({'Type': im_type})
+ for cur_gmpe in gmpe_list:
+ # set up site properties
+ siteSpec, sites, site_prop = get_site_prop(cur_gmpe, station_list) # noqa: N806, F405
+ curgmpe_info['Type'] = cur_gmpe
+ curgmpe_info['Parameters'] = gmpe_para
+ cur_res, station_info = get_IM( # noqa: F405
+ curgmpe_info,
+ erf,
+ sites,
+ siteSpec,
+ site_prop,
+ source_info,
+ station_info,
+ im_info,
+ )
+ cur_res.update({'IM': im_type})
+ res_list.append(cur_res)
+ # weighting if any
+ if gmpe_weights is not None:
+ res = compute_weighted_res(res_list, gmpe_weights)
+ else:
+ res = res_list[0]
+ # return
+ return res
+
+ def get_im_from_local( # noqa: C901, D102
+ self,
+ source_info,
+ gmpe_list,
+ im_type,
+ im_info,
+ gmpe_weights=None,
+ ):
+ # initiate
+ res_list = []
+ res = dict() # noqa: C408
+ # check IM type
+ if im_type not in list(LOCAL_IM_GMPE.keys()):
+ print( # noqa: T201
+ f'ComputeIntensityMeasure.get_im_from_local: error - IM type {im_type} not supported'
+ )
+ return res
+ # get available gmpe list
+ avail_gmpe = LOCAL_IM_GMPE.get(im_type)
+ # back compatibility for now (useful if other local GMPEs for SA is included)
+ cur_T = im_info.get('Periods', None) # noqa: N806
+ # source and rupture
+ if source_info['Type'] == 'PointSource':
+ # magnitude
+ eq_magnitude = source_info['Magnitude']
+ eq_loc = [ # noqa: F841
+ source_info['Location']['Latitude'],
+ source_info['Location']['Longitude'],
+ source_info['Location']['Depth'],
+ ]
+ # maf
+ meanAnnualRate = None # noqa: N806
+ elif source_info['Type'] == 'ERF':
+ source_index = source_info.get('SourceIndex', None)
+ rupture_index = source_info.get('RuptureIndex', None)
+ if None in [source_index, rupture_index]: # noqa: PLR6201
+ print( # noqa: T201
+ 'ComputeIntensityMeasure.get_im_from_local: error - source/rupture index not given.'
+ )
+ return res
+ # magnitude
+ # eq_magnitude = erf.getSource(source_index).getRupture(rupture_index).getMag()
+ eq_magnitude = source_info['Magnitude']
+ # maf
+ # timeSpan = erf.getTimeSpan()
+ # meanAnnualRate = erf.getSource(source_index).getRupture(rupture_index).getMeanAnnualRate(timeSpan.getDuration())
+ meanAnnualRate = source_info['MeanAnnualRate'] # noqa: N806
+ elif source_info['Type'] == 'oqSourceXML':
+ source_index = source_info.get('SourceIndex', None)
+ rupture_index = source_info.get('RuptureIndex', None)
+ if None in [source_index, rupture_index]: # noqa: PLR6201
+ print( # noqa: T201
+ 'ComputeIntensityMeasure.get_im_from_local: error - source/rupture index not given.'
+ )
+ return res
+ # magnitude
+ eq_magnitude = source_info['Magnitude']
+ # maf
+ meanAnnualRate = source_info['MeanAnnualRate'] # noqa: N806
+ else:
+ print( # noqa: T201
+ 'ComputeIntensityMeasure.get_im_from_local: error - source type {} not supported'.format(
+ source_info['Type']
+ )
+ )
+ return res
+ for cur_gmpe in gmpe_list:
+ gm_collector = []
+ if cur_gmpe not in avail_gmpe:
+ print( # noqa: T201
+ f'ComputeIntensityMeasure.get_im_from_local: warning - {cur_gmpe} is not available.'
+ )
+ continue
+ for cur_site in self.site_info:
+ # current site-rupture distance
+ cur_dist = cur_site['rRup']
+ cur_vs30 = cur_site['vs30']
+ tmpResult = { # noqa: N806
+ 'Mean': [],
+ 'TotalStdDev': [],
+ 'InterEvStdDev': [],
+ 'IntraEvStdDev': [],
+ }
+ if cur_gmpe == 'Bommer, Stafford & Alarcon (2009)':
+ mean, stdDev, interEvStdDev, intraEvStdDev = ( # noqa: N806
+ SignificantDurationModel.bommer_stafford_alarcon_ds_2009(
+ magnitude=eq_magnitude,
+ distance=cur_dist,
+ vs30=cur_vs30,
+ duration_type=im_type,
+ )
+ )
+ tmpResult['Mean'].append(float(mean))
+ tmpResult['TotalStdDev'].append(float(stdDev))
+ tmpResult['InterEvStdDev'].append(float(interEvStdDev))
+ tmpResult['IntraEvStdDev'].append(float(intraEvStdDev))
+ elif cur_gmpe == 'Afshari & Stewart (2016)':
+ mean, stdDev, interEvStdDev, intraEvStdDev = ( # noqa: N806
+ SignificantDurationModel.afshari_stewart_ds_2016(
+ magnitude=eq_magnitude,
+ distance=cur_dist,
+ vs30=cur_vs30,
+ duration_type=im_type,
+ )
+ )
+ tmpResult['Mean'].append(float(mean))
+ tmpResult['TotalStdDev'].append(float(stdDev))
+ tmpResult['InterEvStdDev'].append(float(interEvStdDev))
+ tmpResult['IntraEvStdDev'].append(float(intraEvStdDev))
+ elif cur_gmpe == 'Chiou & Youngs (2014)':
+ # start = time.process_time_ns()
+ tmpResult = self.CY.get_IM( # noqa: N806
+ eq_magnitude, self.site_rup_dict, cur_site, im_info
+ )
+ # self.timeGetIM += time.process_time_ns() - start
+ elif cur_gmpe == 'Abrahamson, Silva & Kamai (2014)':
+ # start = time.process_time_ns()
+ tmpResult = self.ASK.get_IM( # noqa: N806
+ eq_magnitude, self.site_rup_dict, cur_site, im_info
+ )
+ # self.timeGetIM += time.process_time_ns() - start
+ elif cur_gmpe == 'Boore, Stewart, Seyhan & Atkinson (2014)':
+ # start = time.process_time_ns()
+ tmpResult = self.BSSA.get_IM( # noqa: N806
+ eq_magnitude, self.site_rup_dict, cur_site, im_info
+ )
+ # self.timeGetIM += time.process_time_ns() - start
+ elif cur_gmpe == 'Campbell & Bozorgnia (2014)':
+ # start = time.process_time_ns()
+ tmpResult = self.CB.get_IM( # noqa: N806
+ eq_magnitude, self.site_rup_dict, cur_site, im_info
+ )
+ # self.timeGetIM += time.process_time_ns() - start
+ else:
+ print( # noqa: T201
+ f'ComputeIntensityMeasure.get_im_from_local: gmpe_name {cur_gmpe} is not supported.'
+ )
+ # collect sites
+ # gm_collector.append({
+ # "Location": {'Latitude':cur_site['lat'], 'Longitude':cur_site['lon']},
+ # "SiteData": {key: cur_site[key] for key in cur_site if key not in ['lat','lon']},
+ # 'ln'+im_type: tmpResult
+ # })
+ gm_collector.append({'ln' + im_type: tmpResult})
+
+ # Final results
+ cur_res = {
+ 'Magnitude': eq_magnitude,
+ 'MeanAnnualRate': meanAnnualRate,
+ 'SiteSourceDistance': source_info.get('SiteSourceDistance', None),
+ 'SiteRuptureDistance': source_info.get('SiteRuptureDistance', None),
+ 'Periods': cur_T,
+ 'IM': im_type,
+ 'GroundMotions': gm_collector,
+ }
+ # collect gmpes
+ res_list.append(cur_res)
+
+ # weighting if any
+ if gmpe_weights is not None:
+ res = compute_weighted_res(res_list, gmpe_weights)
+ else:
+ res = res_list[0]
+ # return
+ return res
+
+
+def collect_multi_im_res(res_dict): # noqa: C901, D103
+ res_list = []
+ if 'PGA' in res_dict.keys(): # noqa: SIM118
+ res_list.append(res_dict['PGA'])
+ if 'SA' in res_dict.keys(): # noqa: SIM118
+ res_list.append(res_dict['SA'])
+ if 'PGV' in res_dict.keys(): # noqa: SIM118
+ res_list.append(res_dict['PGV'])
+ res = dict() # noqa: C408
+ num_res = len(res_list)
+ if num_res == 0:
+ print('IM_Calculator._collect_res: error - the res_list is empty') # noqa: T201
+ return res
+ for i, cur_res in enumerate(res_list):
+ if i == 0:
+ res = cur_res
+ res['IM'] = [cur_res['IM']]
+ if cur_res.get('Periods', None) is None:
+ res['Periods'] = [None]
+ elif type(cur_res.get('Periods')) in [float, int]: # noqa: PLR6201
+ res['Periods'] = [cur_res.get('Periods')]
+ else:
+ res['Periods'] = cur_res.get('Periods')
+ else:
+ res['IM'].append(cur_res['IM'])
+ if cur_res.get('Periods', None) is None:
+ res['Periods'] = res['Periods'] + [None] # noqa: PLR6104
+ elif type(cur_res.get('Periods')) in [float, int]: # noqa: PLR6201
+ res['Periods'] = res['Periods'] + [cur_res.get('Periods')] # noqa: PLR6104
+ else:
+ res['Periods'] = res['Periods'] + cur_res.get('Periods') # noqa: PLR6104
+ # combine ground motion characteristics
+ for j in range(len(cur_res['GroundMotions'])):
+ tmp_res = cur_res['GroundMotions'][j].get(
+ 'ln{}'.format(cur_res['IM'])
+ )
+ res['GroundMotions'][j].update(
+ {'ln{}'.format(cur_res['IM']): tmp_res}
+ )
+
+ # return
+ return res
+
+
+def collect_multi_im_res_hdf5(res_list, im_list): # noqa: D103
+ res = dict() # noqa: C408
+ num_res = len(res_list)
+ if num_res == 0:
+ print('IM_Calculator._collect_res: error - the res_list is empty') # noqa: T201
+ return res
+ num_sites = len(res_list[list(res_list.keys())[0]]['GroundMotions']) # noqa: RUF015
+ collected_mean = np.zeros([num_sites, len(im_list)])
+ collected_intraStd = np.zeros([num_sites, len(im_list)]) # noqa: N806
+ collected_interStd = np.zeros([num_sites, len(im_list)]) # noqa: N806
+ for i, im in enumerate(im_list):
+ if im.startswith('PGA'):
+ collected_mean[:, i] = np.array(
+ [x['lnPGA']['Mean'][0] for x in res_list['PGA']['GroundMotions']]
+ )
+ collected_interStd[:, i] = np.array(
+ [
+ x['lnPGA']['InterEvStdDev'][0]
+ for x in res_list['PGA']['GroundMotions']
+ ]
+ )
+ collected_intraStd[:, i] = np.array(
+ [
+ x['lnPGA']['IntraEvStdDev'][0]
+ for x in res_list['PGA']['GroundMotions']
+ ]
+ )
+ if im.startswith('SA'):
+ period = float(re.search(r'\((.*?)\)', im).group(1))
+ period_i = res_list['SA']['Periods'].index(period)
+ collected_mean[:, i] = np.array(
+ [
+ x['lnSA']['Mean'][period_i]
+ for x in res_list['SA']['GroundMotions']
+ ]
+ )
+ collected_interStd[:, i] = np.array(
+ [
+ x['lnSA']['InterEvStdDev'][period_i]
+ for x in res_list['SA']['GroundMotions']
+ ]
+ )
+ collected_intraStd[:, i] = np.array(
+ [
+ x['lnSA']['IntraEvStdDev'][period_i]
+ for x in res_list['SA']['GroundMotions']
+ ]
+ )
+ if im.startswith('PGV'):
+ collected_mean[:, i] = np.array(
+ [x['lnPGV']['Mean'][0] for x in res_list['PGV']['GroundMotions']]
+ )
+ collected_interStd[:, i] = np.array(
+ [
+ x['lnPGV']['InterEvStdDev'][0]
+ for x in res_list['PGV']['GroundMotions']
+ ]
+ )
+ collected_intraStd[:, i] = np.array(
+ [
+ x['lnPGV']['IntraEvStdDev'][0]
+ for x in res_list['PGV']['GroundMotions']
+ ]
+ )
+ res.update({'Mean': collected_mean})
+ res.update({'InterEvStdDev': collected_interStd})
+ res.update({'IntraEvStdDev': collected_intraStd})
+ # return
+ return res
+
+
+def get_im_dict(im_info): # noqa: D103
+ if im_info.get('Type', None) == 'Vector':
+ im_dict = im_info.copy()
+ im_dict.pop('Type')
+ if 'PGV' in im_dict.keys(): # noqa: SIM118
+ PGV_dict = im_dict.pop('PGV') # noqa: N806
+ im_dict.update({'PGV': PGV_dict})
+ else:
+ # back compatibility
+ im_dict = {im_info.get('Type'): im_info.copy()}
+
+ # return
+ return im_dict
+
+
+def get_gmpe_from_im_vector(im_info, gmpe_info): # noqa: D103
+ gmpe_dict = dict() # noqa: C408
+ gmpe_weights_dict = dict() # noqa: C408
+ # check IM info type
+ if im_info.get('Type', None) != 'Vector':
+ print( # noqa: T201
+ 'ComputeIntensityMeasure.get_gmpe_from_im_vector: error: IntensityMeasure Type should be Vector.'
+ )
+ return gmpe_dict, gmpe_weights_dict
+ else: # noqa: RET505
+ im_keys = list(im_info.keys())
+ im_keys.remove('Type')
+ for cur_im in im_keys:
+ cur_gmpe = im_info[cur_im].get('GMPE', None)
+ cur_weights = im_info[cur_im].get('GMPEWeights', None)
+ if cur_gmpe is None:
+ print( # noqa: T201
+ f'ComputeIntensityMeasure.get_gmpe_from_im_vector: warning: GMPE not found for {cur_im}'
+ )
+ elif type(cur_gmpe) == str: # noqa: E721
+ if cur_gmpe == 'NGAWest2 2014 Averaged':
+ cur_gmpe = [
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ 'Chiou & Youngs (2014)',
+ ]
+ cur_weights = [0.25, 0.25, 0.25, 0.25]
+ else:
+ cur_gmpe = [cur_gmpe]
+ cur_weights = None
+ gmpe_dict.update({cur_im: cur_gmpe})
+ gmpe_weights_dict.update({cur_im: cur_weights})
+ # global parameters if any
+ gmpe_dict.update({'Parameters': gmpe_info.get('Parameters', dict())}) # noqa: C408
+ # return
+ return gmpe_dict, gmpe_weights_dict
+
+
+def get_gmpe_from_im_legency(im_info, gmpe_info, gmpe_weights=None): # noqa: D103
+ # back compatibility for getting ims and gmpes
+ gmpe_dict = dict() # noqa: C408
+ gmpe_weights_dict = dict() # noqa: C408
+ if gmpe_info['Type'] == 'NGAWest2 2014 Averaged':
+ gmpe_list = [
+ 'Abrahamson, Silva & Kamai (2014)',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)',
+ 'Campbell & Bozorgnia (2014)',
+ 'Chiou & Youngs (2014)',
+ ]
+ if gmpe_weights is None:
+ gmpe_weights = [0.25, 0.25, 0.25, 0.25]
+ im_type = im_info.get('Type')
+ gmpe_dict = {im_type: gmpe_list}
+ else:
+ gmpe_list = [gmpe_info['Type']]
+ gmpe_weights = None
+ im_type = im_info.get('Type')
+ # for im_type in im_types:
+ gmpe_dict.update({im_type: gmpe_list})
+ gmpe_weights_dict = {im_type: gmpe_weights}
+ # global parameters if any
+ gmpe_dict.update({'Parameters': gmpe_info.get('Parameters', dict())}) # noqa: C408
+ # return
+ return gmpe_dict, gmpe_weights_dict
+
+
+def compute_im( # noqa: C901, D103
+ scenarios,
+ stations,
+ EqRupture_info, # noqa: N803
+ gmpe_info,
+ im_info,
+ generator_info,
+ output_dir,
+ filename='IntensityMeasureMeanStd.hdf5',
+ mth_flag=True, # noqa: FBT002
+):
+ # Calling OpenSHA to compute median PSA
+ if len(scenarios) < 10: # noqa: PLR2004
+ filename = 'IntensityMeasureMeanStd.json'
+ saveInJson = True # noqa: N806
+ im_raw = {}
+ else:
+ saveInJson = False # noqa: N806
+ filename = os.path.join(output_dir, filename) # noqa: PTH118
+ im_list = []
+ if 'PGA' in im_info.keys(): # noqa: SIM118
+ im_list.append('PGA')
+ if 'SA' in im_info.keys(): # noqa: SIM118
+ for cur_period in im_info['SA']['Periods']:
+ im_list.append(f'SA({cur_period!s})') # noqa: PERF401
+ if 'PGV' in im_info.keys(): # noqa: SIM118
+ im_list.append('PGV')
+ # Stations
+ station_list = [
+ {
+ 'Location': {
+ 'Latitude': stations[j]['lat'],
+ 'Longitude': stations[j]['lon'],
+ }
+ }
+ for j in range(len(stations))
+ ]
+ for j in range(len(stations)):
+ if stations[j].get('vs30'):
+ station_list[j].update({'Vs30': int(stations[j]['vs30'])})
+ station_info = {'Type': 'SiteList', 'SiteList': station_list}
+ # hazard occurrent model
+ if generator_info['method'] == 'Subsampling':
+ # check if the period in the hazard curve is in the period list in the intensity measure
+ if generator_info['Parameters'].get('IntensityMeasure') == 'SA':
+ ho_period = generator_info['Parameters'].get('Period')
+ if im_info['Type'] == 'Vector':
+ if im_info.get('SA') is None:
+ sys.exit(
+ 'SA is used in hazard downsampling but not defined in the intensity measure tab'
+ )
+ elif ho_period in im_info['SA'].get('Periods'):
+ pass
+ else:
+ tmp_periods = im_info['SA']['Periods'] + [ho_period]
+ tmp_periods.sort()
+ im_info['SA']['Periods'] = tmp_periods
+ elif ho_period in im_info['SA'].get('Periods'):
+ pass
+ else:
+ tmp_periods = im_info['SA']['Periods'] + [ho_period]
+ tmp_periods.sort()
+ im_info['SA']['Periods'] = tmp_periods
+ # prepare gmpe list for intensity measure
+ if gmpe_info['Type'] == 'Vector':
+ gmpe_dict, gmpe_weights_dict = get_gmpe_from_im_vector(im_info, gmpe_info)
+ else:
+ gmpe_dict, gmpe_weights_dict = get_gmpe_from_im_legency(im_info, gmpe_info)
+ # prepare intensity measure dict
+ im_dict = get_im_dict(im_info)
+
+ t_start = time.time()
+ # Loop over scenarios
+ if mth_flag is False:
+ # create a IM calculator
+ im_calculator = IM_Calculator(
+ im_dict=im_dict,
+ gmpe_dict=gmpe_dict,
+ gmpe_weights_dict=gmpe_weights_dict,
+ site_info=stations,
+ )
+ if EqRupture_info['EqRupture']['Type'] == 'ERF':
+ im_calculator.erf = getERF(EqRupture_info) # noqa: F405
+ else:
+ im_calculator.erf = None
+ gmpe_set = set()
+ for _, item in gmpe_dict.items(): # noqa: PERF102
+ gmpe_set = gmpe_set.union(set(item))
+ for gmpe in gmpe_set:
+ if gmpe == 'Chiou & Youngs (2014)':
+ im_calculator.CY = openSHAGMPE.chiou_youngs_2013()
+ if gmpe == 'Abrahamson, Silva & Kamai (2014)':
+ im_calculator.ASK = openSHAGMPE.abrahamson_silva_kamai_2014()
+ if gmpe == 'Boore, Stewart, Seyhan & Atkinson (2014)':
+ im_calculator.BSSA = openSHAGMPE.boore_etal_2014()
+ if gmpe == 'Campbell & Bozorgnia (2014)':
+ im_calculator.CB = openSHAGMPE.campbell_bozorgnia_2014()
+ # for i in tqdm(range(len(scenarios.keys())), desc=f"Evaluate GMPEs for {len(scenarios.keys())} scenarios"):
+ # Initialize an hdf5 file for IMmeanStd
+ if os.path.exists(filename): # noqa: PTH110
+ os.remove(filename) # noqa: PTH107
+ for i in tqdm(
+ range(len(scenarios.keys())),
+ desc=f'Evaluate GMPEs for {len(scenarios.keys())} scenarios',
+ ):
+ # for i, key in enumerate(scenarios.keys()):
+ # print('ComputeIntensityMeasure: Scenario #{}/{}'.format(i+1,len(scenarios)))
+ # Rupture
+ key = int(list(scenarios.keys())[i])
+ source_info = scenarios[key]
+ im_calculator.set_source(source_info)
+ # Computing IM
+ res_list = dict() # noqa: C408
+ for cur_im_type in list(im_dict.keys()):
+ im_calculator.set_im_type(cur_im_type)
+ res_list.update({cur_im_type: im_calculator.calculate_im()})
+ # Collecting outputs
+ # collectedResult.update({'SourceIndex':source_info['SourceIndex'], 'RuptureIndex':source_info['RuptureIndex']})
+ if saveInJson:
+ collectedResult = collect_multi_im_res(res_list) # noqa: N806
+ im_raw.update({key: collectedResult})
+ else:
+ collectedResult = collect_multi_im_res_hdf5(res_list, im_list) # noqa: N806
+ with h5py.File(filename, 'a') as f:
+ # Add a group named by the scenario index and has four dataset
+ # mean, totalSTd, interStd,itrastd
+ grp = f.create_group(str(i))
+ grp.create_dataset('Mean', data=collectedResult['Mean'])
+ grp.create_dataset(
+ 'InterEvStdDev', data=collectedResult['InterEvStdDev']
+ )
+ grp.create_dataset(
+ 'IntraEvStdDev', data=collectedResult['IntraEvStdDev']
+ )
+ # if (i % 250 == 0):
+ # if saveInJson:
+ # print(f"Size of im_raw for {i} scenario is {sys.getsizeof(im_raw)}")
+ # else:
+ # print(f"Another 250 scenarios computed")
+
+ if mth_flag:
+ res_dict = {}
+ sub_ths = []
+ num_bins = 200
+ bin_size = int(np.ceil(len(scenarios) / num_bins))
+ ids_list = []
+ scen_list = []
+ for k in range(0, len(scenarios), bin_size):
+ ids_list.append(list(scenarios.keys())[k : k + bin_size])
+ scen_list.append(
+ [scenarios[x] for x in list(scenarios.keys())[k : k + bin_size]]
+ )
+ # print(ids_list)
+ for i in range(len(ids_list)):
+ th = threading.Thread(
+ target=compute_im_para,
+ args=(
+ ids_list[i],
+ scen_list[i],
+ im_dict,
+ gmpe_dict,
+ gmpe_weights_dict,
+ station_info,
+ res_dict,
+ ),
+ )
+ sub_ths.append(th)
+ th.start()
+
+ for th in sub_ths:
+ th.join()
+
+ # order the res_dict by id
+ res_ordered = collections.OrderedDict(sorted(res_dict.items()))
+ for i, cur_res in res_ordered.items(): # noqa: B007
+ im_raw.append(cur_res)
+
+ print( # noqa: T201
+ f'ComputeIntensityMeasure: mean and standard deviation of intensity measures {time.time() - t_start} sec'
+ )
+
+ if saveInJson:
+ with open(filename, 'w') as f: # noqa: PLW1514, PTH123
+ ujson.dump(im_raw, f, indent=1)
+ # return
+ return filename, im_list
+
+
+def compute_im_para( # noqa: D103
+ ids,
+ scenario_infos,
+ im_dict,
+ gmpe_dict,
+ gmpe_weights_dict,
+ station_info,
+ res_dict,
+):
+ for i, id in enumerate(ids): # noqa: A001
+ print(f'ComputeIntensityMeasure: Scenario #{id + 1}.') # noqa: T201
+ scenario_info = scenario_infos[i]
+ # create a IM calculator
+ im_calculator = IM_Calculator(
+ im_dict=im_dict,
+ gmpe_dict=gmpe_dict,
+ gmpe_weights_dict=gmpe_weights_dict,
+ site_info=station_info,
+ )
+ # set scenario information
+ im_calculator.set_source(scenario_info)
+ # computing IM
+ res_list = []
+ for cur_im_type in list(im_dict.keys()):
+ im_calculator.set_im_type(cur_im_type)
+ res_list.append(im_calculator.calculate_im())
+ # clean
+ del im_calculator
+ # collect multiple ims
+ res = collect_multi_im_res(res_list)
+ # append res to res_dcit
+ res_dict[id] = res
+ # return
+
+
+def export_im( # noqa: C901, D103, PLR0912
+ stations,
+ im_list,
+ im_data,
+ eq_data,
+ output_dir,
+ filename,
+ csv_flag,
+ gf_im_list,
+ scenario_ids,
+):
+ # Rename SA(xxx) to SA_xxx
+ for i, im in enumerate(im_list):
+ if im.startswith('SA'):
+ im_list[i] = (
+ im_list[i].split('(')[0] + '_' + im_list[i].split('(')[1][:-1]
+ )
+ # try:
+ # Station number
+ num_stations = len(stations)
+ # Scenario number
+ num_scenarios = len(eq_data)
+ eq_data = np.array(eq_data)
+ # Saving large files to HDF while small files to JSON
+ if num_scenarios > 100000: # noqa: PLR2004
+ # Pandas DataFrame
+ h_scenarios = ['Scenario-' + str(x) for x in range(1, num_scenarios + 1)]
+ h_eq = [
+ 'Latitude',
+ 'Longitude',
+ 'Vs30',
+ 'Magnitude',
+ 'MeanAnnualRate',
+ 'SiteSourceDistance',
+ 'SiteRuptureDistance',
+ ]
+ for x in range(1, im_data[0][0, :, :].shape[1] + 1):
+ for y in im_list:
+ h_eq.append('Record-' + str(x) + f'-{y}') # noqa: PERF401
+ index = pd.MultiIndex.from_product([h_scenarios, h_eq])
+ columns = ['Site-' + str(x) for x in range(1, num_stations + 1)]
+ df = pd.DataFrame(index=index, columns=columns, dtype=float) # noqa: PD901
+ # Data
+ for i in range(num_stations):
+ tmp = []
+ for j in range(num_scenarios):
+ tmp.append(stations[i]['lat']) # noqa: FURB113
+ tmp.append(stations[i]['lon'])
+ tmp.append(int(stations[i]['vs30']))
+ tmp.append(eq_data[j][0])
+ tmp.append(eq_data[j][1])
+ tmp.append(eq_data[j][2])
+ tmp.append(eq_data[j][3])
+ for x in np.ndarray.tolist(im_data[j][i, :, :].T):
+ for y in x:
+ tmp.append(y) # noqa: PERF402
+ df['Site-' + str(i + 1)] = tmp
+ # HDF output
+ try: # noqa: SIM105
+ os.remove(os.path.join(output_dir, filename.replace('.json', '.h5'))) # noqa: PTH107, PTH118
+ except: # noqa: S110, E722
+ pass
+ hdf = pd.HDFStore(os.path.join(output_dir, filename.replace('.json', '.h5'))) # noqa: PTH118
+ hdf.put('SiteIM', df, format='table', complib='zlib')
+ hdf.close()
+ else:
+ res = []
+ for i in range(num_stations):
+ tmp = {
+ 'Location': {
+ 'Latitude': stations[i]['lat'],
+ 'Longitude': stations[i]['lon'],
+ },
+ 'Vs30': int(stations[i]['vs30']),
+ }
+ tmp.update({'IMS': im_list})
+ tmp_im = []
+ for j in range(num_scenarios):
+ tmp_im.append(np.ndarray.tolist(im_data[j][i, :, :])) # noqa: PERF401
+ if len(tmp_im) == 1:
+ # Simplifying the data structure if only one scenario exists
+ tmp_im = tmp_im[0]
+ tmp.update({'lnIM': tmp_im})
+ res.append(tmp)
+ maf_out = []
+ for ind, cur_eq in enumerate(eq_data):
+ if cur_eq[1]:
+ mar = cur_eq[1]
+ else:
+ mar = 'N/A'
+ if cur_eq[2]:
+ ssd = cur_eq[2]
+ else:
+ ssd = 'N/A'
+ if len(cur_eq) > 3 and cur_eq[3]: # noqa: PLR2004
+ srd = cur_eq[3]
+ else:
+ srd = 'N/A'
+ tmp = {
+ 'Magnitude': float(cur_eq[0]),
+ 'MeanAnnualRate': mar,
+ 'SiteSourceDistance': ssd,
+ 'SiteRuputureDistance': srd,
+ 'ScenarioIndex': int(scenario_ids[ind]),
+ }
+ maf_out.append(tmp)
+ res = {'Station_lnIM': res, 'Earthquake_MAF': maf_out}
+ # save SiteIM.json
+ with open(os.path.join(output_dir, filename), 'w') as f: # noqa: PLW1514, PTH118, PTH123
+ json.dump(res, f, indent=2)
+ # export the event grid and station csv files
+ if csv_flag:
+ # output EventGrid.csv
+ station_name = [
+ 'site' + str(stations[j]['ID']) + '.csv' for j in range(len(stations))
+ ]
+ lat = [stations[j]['lat'] for j in range(len(stations))]
+ lon = [stations[j]['lon'] for j in range(len(stations))]
+ # vs30 = [stations[j]['vs30'] for j in range(len(stations))]
+ # zTR = [stations[j]['DepthToRock'] for j in range(len(stations))]
+ df = pd.DataFrame( # noqa: PD901
+ {
+ 'GP_file': station_name,
+ 'Longitude': lon,
+ 'Latitude': lat,
+ # 'Vs30': vs30,
+ # 'DepthToRock': zTR
+ }
+ )
+ # if cur_eq[2]:
+ # df['SiteSourceDistance'] = cur_eq[2]
+ output_dir = os.path.join( # noqa: PTH118
+ os.path.dirname(Path(output_dir)), # noqa: PTH120
+ os.path.basename(Path(output_dir)), # noqa: PTH119
+ )
+ # separate directory for IM
+ output_dir = os.path.join(output_dir, 'IMs') # noqa: PTH118
+ try:
+ os.makedirs(output_dir) # noqa: PTH103
+ except: # noqa: E722
+ print('HazardSimulation: output folder already exists.') # noqa: T201
+ # save the csv
+ df.to_csv(os.path.join(output_dir, 'EventGrid.csv'), index=False) # noqa: PTH118
+ # output station#.csv
+ # csv header
+ csvHeader = im_list # noqa: N806
+ for cur_scen in range(len(im_data)):
+ if len(im_data) > 1:
+ # IMPORTANT: the scenario index starts with 1 in the front end.
+ cur_scen_folder = 'scenario' + str(int(scenario_ids[cur_scen]) + 1)
+ try: # noqa: SIM105
+ os.mkdir(os.path.join(output_dir, cur_scen_folder)) # noqa: PTH102, PTH118
+ except: # noqa: S110, E722
+ pass
+ # print('ComputeIntensityMeasure: scenario folder already exists.')
+ cur_output_dir = os.path.join(output_dir, cur_scen_folder) # noqa: PTH118
+ else:
+ cur_output_dir = output_dir
+ # current IM data
+ cur_im_data = im_data[cur_scen]
+ for i, site_id in enumerate(station_name):
+ df = dict() # noqa: C408, PD901
+ # Loop over all intensity measures
+ for cur_im_tag in range(len(csvHeader)):
+ if (csvHeader[cur_im_tag].startswith('SA')) or (
+ csvHeader[cur_im_tag] in ['PGA', 'PGV'] # noqa: PLR6201
+ ):
+ df.update(
+ {
+ csvHeader[cur_im_tag]: np.exp(
+ cur_im_data[i, cur_im_tag, :]
+ )
+ }
+ )
+ else:
+ df.update(
+ {csvHeader[cur_im_tag]: cur_im_data[i, cur_im_tag, :]}
+ )
+ df = pd.DataFrame(df) # noqa: PD901
+ # Combine PGD from liquefaction, landslide and fault
+ if (
+ 'liq_PGD_h' in df.columns
+ or 'lsd_PGD_h' in df.columns
+ or 'fd_PGD_h' in df.columns
+ ):
+ PGD_h = np.zeros(df.shape[0]) # noqa: N806
+ if 'liq_PGD_h' in df.columns:
+ PGD_h += df['liq_PGD_h'].to_numpy() # noqa: N806
+ if 'lsd_PGD_h' in df.columns:
+ PGD_h += df['lsd_PGD_h'].to_numpy() # noqa: N806
+ if 'fd_PGD_h' in df.columns:
+ PGD_h += df['fd_PGD_h'].to_numpy() # noqa: N806
+ df['PGD_h'] = PGD_h
+ if (
+ 'liq_PGD_v' in df.columns
+ or 'lsd_PGD_v' in df.columns
+ or 'fd_PGD_v' in df.columns
+ ):
+ PGD_v = np.zeros(df.shape[0]) # noqa: N806
+ if 'liq_PGD_v' in df.columns:
+ PGD_v += df['liq_PGD_v'].to_numpy() # noqa: N806
+ if 'lsd_PGD_v' in df.columns:
+ PGD_v += df['lsd_PGD_v'].to_numpy() # noqa: N806
+ if 'fd_PGD_v' in df.columns:
+ PGD_v += df['fd_PGD_v'].to_numpy() # noqa: N806
+ df['PGD_v'] = PGD_v
+ colToDrop = [] # noqa: N806
+ for col in df.columns:
+ if (
+ (not col.startswith('SA'))
+ and (col not in ['PGA', 'PGV', 'PGD_h', 'PGD_v']) # noqa: PLR6201
+ and (col not in gf_im_list)
+ ):
+ colToDrop.append(col) # noqa: PERF401
+ df.drop(columns=colToDrop, inplace=True) # noqa: PD002
+ # if 'liq_prob' in df.columns:
+ # df.drop(columns=['liq_prob'], inplace=True)
+ # if 'liq_susc' in df.columns:
+ # df.drop(columns=['liq_susc'], inplace=True)
+ df.fillna('NaN', inplace=True) # noqa: PD002
+ df.to_csv(os.path.join(cur_output_dir, site_id), index=False) # noqa: PTH118
+
+ # output the site#.csv file including all scenarios
+ if len(im_data) > 1:
+ print('ComputeIntensityMeasure: saving all selected scenarios.') # noqa: T201
+ # lopp over sites
+ for i, site_id in enumerate(station_name):
+ df = dict() # noqa: C408, PD901
+ for cur_im_tag in range(len(csvHeader)):
+ tmp_list = []
+ # loop over all scenarios
+ for cur_scen in range(len(im_data)):
+ tmp_list = ( # noqa: PLR6104
+ tmp_list + im_data[cur_scen][i, cur_im_tag, :].tolist()
+ )
+ if (csvHeader[cur_im_tag].startswith('SA')) or (
+ csvHeader[cur_im_tag] in ['PGA', 'PGV'] # noqa: PLR6201
+ ):
+ df.update({csvHeader[cur_im_tag]: np.exp(tmp_list)})
+ else:
+ df.update({csvHeader[cur_im_tag]: tmp_list})
+ df = pd.DataFrame(df) # noqa: PD901
+ # Combine PGD from liquefaction, landslide and fault
+ if (
+ 'liq_PGD_h' in df.columns
+ or 'lsd_PGD_h' in df.columns
+ or 'fd_PGD_h' in df.columns
+ ):
+ PGD_h = np.zeros(df.shape[0]) # noqa: N806
+ if 'liq_PGD_h' in df.columns:
+ PGD_h += df['liq_PGD_h'].to_numpy() # noqa: N806
+ if 'lsd_PGD_h' in df.columns:
+ PGD_h += df['lsd_PGD_h'].to_numpy() # noqa: N806
+ if 'fd_PGD_h' in df.columns:
+ PGD_h += df['fd_PGD_h'].to_numpy() # noqa: N806
+ df['PGD_h'] = PGD_h
+ if (
+ 'liq_PGD_v' in df.columns
+ or 'lsd_PGD_v' in df.columns
+ or 'fd_PGD_v' in df.columns
+ ):
+ PGD_v = np.zeros(df.shape[0]) # noqa: N806
+ if 'liq_PGD_v' in df.columns:
+ PGD_v += df['liq_PGD_v'].to_numpy() # noqa: N806
+ if 'lsd_PGD_v' in df.columns:
+ PGD_v += df['lsd_PGD_v'].to_numpy() # noqa: N806
+ if 'fd_PGD_v' in df.columns:
+ PGD_v += df['fd_PGD_v'].to_numpy() # noqa: N806
+ df['PGD_v'] = PGD_v
+ colToDrop = [] # noqa: N806
+ for col in df.columns:
+ if (
+ (not col.startswith('SA'))
+ and (col not in ['PGA', 'PGV', 'PGD_h', 'PGD_v']) # noqa: PLR6201
+ and (col not in gf_im_list)
+ ):
+ colToDrop.append(col)
+ df.drop(columns=colToDrop, inplace=True) # noqa: PD002
+ df.fillna('NaN', inplace=True) # noqa: PD002
+ df.to_csv(os.path.join(output_dir, site_id), index=False) # noqa: PTH118
+ # return
+ return 0
+ # except:
+ # return
+ # return 1
+
+
+def compute_weighted_res(res_list, gmpe_weights): # noqa: C901, D103
+ # compute weighted average of gmpe results
+ # initialize the return res (these three attributes are identical in different gmpe results)
+ res = {
+ 'Magnitude': res_list[0]['Magnitude'],
+ 'MeanAnnualRate': res_list[0]['MeanAnnualRate'],
+ 'SiteSourceDistance': res_list[0].get('SiteSourceDistance', None),
+ 'Periods': res_list[0]['Periods'],
+ 'IM': res_list[0]['IM'],
+ }
+ # number of gmpe
+ num_gmpe = len(res_list)
+ # check number of weights
+ if num_gmpe != len(gmpe_weights):
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: please check the weights of different GMPEs.'
+ )
+ return 1
+ # site number
+ num_site = len(res_list[0]['GroundMotions'])
+ # loop over different sites
+ gm_collector = []
+ for site_tag in range(num_site): # noqa: PLR1702
+ # loop over different GMPE
+ tmp_res = {}
+ for i, cur_res in enumerate(res_list):
+ cur_gmResults = cur_res['GroundMotions'][site_tag] # noqa: N806
+ # get keys
+ im_keys = list(cur_gmResults.keys())
+ for cur_im in im_keys:
+ if cur_im not in list(tmp_res.keys()):
+ if cur_im in ['Location', 'SiteData']: # noqa: PLR6201
+ tmp_res.update({cur_im: cur_gmResults[cur_im]})
+ else:
+ tmp_res.update({cur_im: {}})
+ if cur_im not in ['Location', 'SiteData']: # noqa: PLR6201
+ # get components
+ comp_keys = list(cur_gmResults[cur_im].keys())
+ # loop over different components
+ for cur_comp in comp_keys:
+ if cur_comp not in list(tmp_res[cur_im].keys()):
+ tmp_res[cur_im].update({cur_comp: []})
+ for cur_value in cur_gmResults[cur_im][cur_comp]:
+ if 'StdDev' in cur_comp:
+ # standard deviation
+ tmp_res[cur_im][cur_comp].append(
+ np.sqrt(cur_value**2.0 * gmpe_weights[i])
+ )
+ else:
+ # mean
+ tmp_res[cur_im][cur_comp].append(
+ cur_value * gmpe_weights[i]
+ )
+ else:
+ for j, cur_value in enumerate(
+ cur_gmResults[cur_im][cur_comp]
+ ):
+ if 'StdDev' in cur_comp:
+ # standard deviation
+ tmp_res[cur_im][cur_comp][j] = np.sqrt(
+ tmp_res[cur_im][cur_comp][j] ** 2.0
+ + cur_value**2.0 * gmpe_weights[i]
+ )
+ else:
+ # mean
+ tmp_res[cur_im][cur_comp][j] = ( # noqa: PLR6104
+ tmp_res[cur_im][cur_comp][j]
+ + cur_value * gmpe_weights[i]
+ )
+ # collector
+ gm_collector.append(tmp_res)
+ # res
+ res.update({'GroundMotions': gm_collector})
+ # return
+ return res
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/CreateScenario.py b/modules/performRegionalEventSimulation/regionalGroundMotion/CreateScenario.py
index 842f42abe..12c5c1718 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/CreateScenario.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/CreateScenario.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,26 +37,31 @@
# Kuanshi Zhong
#
-import os, time
-import subprocess
import json
+import os
import random
-import numpy as np
-import pandas as pd
import socket
import sys
+import time
+
+import numpy as np
+import pandas as pd
+
if 'stampede2' not in socket.gethostname():
- from FetchOpenSHA import *
+ from FetchOpenSHA import * # noqa: F403
-def get_rups_to_run(scenario_info, user_scenarios, num_scenarios):
+
+def get_rups_to_run(scenario_info, user_scenarios, num_scenarios): # noqa: C901, D103
# If there is a filter
- if scenario_info["Generator"].get("method", None) == "ScenarioSpecific":
- SourceIndex = scenario_info["Generator"].get("SourceIndex", None)
- RupIndex = scenario_info['Generator'].get('RuptureIndex', None)
+ if scenario_info['Generator'].get('method', None) == 'ScenarioSpecific':
+ SourceIndex = scenario_info['Generator'].get('SourceIndex', None) # noqa: N806
+ RupIndex = scenario_info['Generator'].get('RuptureIndex', None) # noqa: N806
if (SourceIndex is None) or (RupIndex is None):
- print("Both SourceIndex and RuptureIndex are needed for"\
- "ScenarioSpecific analysis")
- return
+ print( # noqa: T201
+ 'Both SourceIndex and RuptureIndex are needed for'
+ 'ScenarioSpecific analysis'
+ )
+ return None
rups_to_run = []
for ind in range(len(user_scenarios.get('features'))):
cur_rup = user_scenarios.get('features')[ind]
@@ -68,66 +72,86 @@ def get_rups_to_run(scenario_info, user_scenarios, num_scenarios):
if cur_id_rupture == int(RupIndex):
rups_to_run.append(ind)
break
- elif scenario_info["Generator"].get("method", None) == "MonteCarlo":
- rup_filter = scenario_info["Generator"].get("RuptureFilter", None)
- if rup_filter is None or len(rup_filter)==0:
- rups_to_run = list(range(0, num_scenarios))
+ elif scenario_info['Generator'].get('method', None) == 'MonteCarlo':
+ rup_filter = scenario_info['Generator'].get('RuptureFilter', None)
+ if rup_filter is None or len(rup_filter) == 0:
+ rups_to_run = list(range(num_scenarios))
else:
rups_requested = []
for rups in rup_filter.split(','):
- if "-" in rups:
- asset_low, asset_high = rups.split("-")
- rups_requested += list(range(int(asset_low), int(asset_high)+1))
+ if '-' in rups:
+ asset_low, asset_high = rups.split('-')
+ rups_requested += list(
+ range(int(asset_low), int(asset_high) + 1)
+ )
else:
rups_requested.append(int(rups))
rups_requested = np.array(rups_requested)
- rups_requested = rups_requested - 1 # The input index starts from 1, not 0
- rups_available = list(range(0, num_scenarios))
+ rups_requested = ( # noqa: PLR6104
+ rups_requested - 1
+ ) # The input index starts from 1, not 0
+ rups_available = list(range(num_scenarios))
rups_to_run = rups_requested[
- np.where(np.in1d(rups_requested, rups_available))[0]]
+ np.where(np.isin(rups_requested, rups_available))[0]
+ ]
# Select all
- elif scenario_info["Generator"].get("method", None) == "Subsampling":
- rups_to_run = list(range(0, num_scenarios))
+ elif scenario_info['Generator'].get('method', None) == 'Subsampling':
+ rups_to_run = list(range(num_scenarios))
else:
- sys.exit(f'The scenario selection method {scenario_info["Generator"].get("method", None)} is not available')
+ sys.exit(
+ f'The scenario selection method {scenario_info["Generator"].get("method", None)} is not available'
+ )
return rups_to_run
-def load_earthquake_rupFile(scenario_info, rupFilePath):
+
+def load_earthquake_rupFile(scenario_info, rupFilePath): # noqa: N802, N803, D103
# Getting earthquake rupture forecast data
source_type = scenario_info['EqRupture']['Type']
try:
- with open(rupFilePath, 'r') as f:
+ with open(rupFilePath) as f: # noqa: PLW1514, PTH123
user_scenarios = json.load(f)
- except:
- sys.exit('CreateScenario: source file {} not found.'.format(rupFilePath))
+ except: # noqa: E722
+ sys.exit(f'CreateScenario: source file {rupFilePath} not found.')
# number of features (i.e., ruptures)
- num_scenarios = len(user_scenarios.get('features',[]))
+ num_scenarios = len(user_scenarios.get('features', []))
if num_scenarios < 1:
sys.exit('CreateScenario: source file is empty.')
- rups_to_run = get_rups_to_run(scenario_info, user_scenarios, num_scenarios)
+ rups_to_run = get_rups_to_run(scenario_info, user_scenarios, num_scenarios)
# get rupture and source ids
scenario_data = {}
- if source_type == "ERF":
+ if source_type == 'ERF':
# source model
source_model = scenario_info['EqRupture']['Model']
for rup_tag in rups_to_run:
cur_rup = user_scenarios.get('features')[rup_tag]
cur_id_source = cur_rup.get('properties').get('Source', None)
cur_id_rupture = cur_rup.get('properties').get('Rupture', None)
- scenario_data.update({rup_tag: {
- 'Type': source_type,
- 'RuptureForecast': source_model,
- 'Name': cur_rup.get('properties').get('Name', ""),
- 'Magnitude': cur_rup.get('properties').get('Magnitude', None),
- 'MeanAnnualRate': cur_rup.get('properties').get('MeanAnnualRate', None),
- 'SourceIndex': cur_id_source,
- 'RuptureIndex': cur_id_rupture,
- 'SiteSourceDistance': cur_rup.get('properties').get('Distance', None),
- 'SiteRuptureDistance': cur_rup.get('properties').get('DistanceRup', None)
- }})
- elif source_type == "PointSource":
- sourceID = 0
- rupID = 0
+ scenario_data.update(
+ {
+ rup_tag: {
+ 'Type': source_type,
+ 'RuptureForecast': source_model,
+ 'Name': cur_rup.get('properties').get('Name', ''),
+ 'Magnitude': cur_rup.get('properties').get(
+ 'Magnitude', None
+ ),
+ 'MeanAnnualRate': cur_rup.get('properties').get(
+ 'MeanAnnualRate', None
+ ),
+ 'SourceIndex': cur_id_source,
+ 'RuptureIndex': cur_id_rupture,
+ 'SiteSourceDistance': cur_rup.get('properties').get(
+ 'Distance', None
+ ),
+ 'SiteRuptureDistance': cur_rup.get('properties').get(
+ 'DistanceRup', None
+ ),
+ }
+ }
+ )
+ elif source_type == 'PointSource':
+ sourceID = 0 # noqa: N806
+ rupID = 0 # noqa: N806
for rup_tag in rups_to_run:
try:
cur_rup = user_scenarios.get('features')[rup_tag]
@@ -135,22 +159,27 @@ def load_earthquake_rupFile(scenario_info, rupFilePath):
location = cur_rup.get('properties')['Location']
average_rake = cur_rup.get('properties')['AverageRake']
average_dip = cur_rup.get('properties')['AverageDip']
- scenario_data.update({0: {
- 'Type': source_type,
- 'Magnitude': magnitude,
- 'Location': location,
- 'AverageRake': average_rake,
- 'AverageDip': average_dip,
- 'SourceIndex':sourceID,
- 'RuptureIndex':rupID
- }})
- rupID = rupID + 1
- except:
- print('Please check point-source inputs.')
+ scenario_data.update(
+ {
+ 0: {
+ 'Type': source_type,
+ 'Magnitude': magnitude,
+ 'Location': location,
+ 'AverageRake': average_rake,
+ 'AverageDip': average_dip,
+ 'SourceIndex': sourceID,
+ 'RuptureIndex': rupID,
+ }
+ }
+ )
+ rupID = rupID + 1 # noqa: N806, PLR6104
+ except: # noqa: PERF203, E722
+ print('Please check point-source inputs.') # noqa: T201
# return
return scenario_data
-def load_ruptures_openquake(scenario_info, stations, work_dir, siteFile, rupFile):
+
+def load_ruptures_openquake(scenario_info, stations, work_dir, siteFile, rupFile): # noqa: C901, N803, D103
# Collecting all possible earthquake scenarios
lat = []
lon = []
@@ -160,89 +189,119 @@ def load_ruptures_openquake(scenario_info, stations, work_dir, siteFile, rupFile
# Reference location
mlat = np.mean(lat)
mlon = np.mean(lon)
- from openquake.hazardlib import nrml, sourceconverter, site
- from openquake.hazardlib.calc.filters import SourceFilter, get_distances
- from openquake.hazardlib.geo.surface.base import BaseSurface
- from openquake.hazardlib.geo.mesh import Mesh, surface_to_arrays
- from openquake.commonlib import readinput
- import json
+ import json # noqa: PLC0415
+
+ from openquake.commonlib import readinput # noqa: PLC0415
+ from openquake.hazardlib import nrml, site, sourceconverter # noqa: PLC0415
+ from openquake.hazardlib.calc.filters import ( # noqa: PLC0415
+ SourceFilter,
+ get_distances,
+ )
+ from openquake.hazardlib.geo.mesh import Mesh # noqa: PLC0415
+ from openquake.hazardlib.geo.surface.base import BaseSurface # noqa: PLC0415
+
try:
- with open(rupFile, 'r') as f:
+ with open(rupFile) as f: # noqa: PLW1514, PTH123
user_scenarios = json.load(f)
- except:
- sys.exit('CreateScenario: source file {} not found.'.format(rupFile))
+ except: # noqa: E722
+ sys.exit(f'CreateScenario: source file {rupFile} not found.')
# number of features (i.e., ruptures)
- num_scenarios = len(user_scenarios.get('features',[]))
+ num_scenarios = len(user_scenarios.get('features', []))
if num_scenarios < 1:
sys.exit('CreateScenario: source file is empty.')
rups_to_run = get_rups_to_run(scenario_info, user_scenarios, num_scenarios)
- in_dir = os.path.join(work_dir,'Input')
- oq = readinput.get_oqparam(dict(
- calculation_mode='classical',
- inputs = {
- "site_model":[siteFile]},
- intensity_measure_types_and_levels="{'PGA': [0.1], 'SA(0.1)': [0.1]}", #place holder for initiating oqparam. Not used in ERF
- investigation_time=str(scenario_info['EqRupture'].get('investigation_time', '50.0')),
- gsim='AbrahamsonEtAl2014', #place holder for initiating oqparam, not used in ERF
- truncation_level='99.0', # place holder for initiating oqparam. not used in ERF
- maximum_distance=str(scenario_info['EqRupture'].get('maximum_distance', '2000')),
- width_of_mfd_bin = str(scenario_info['EqRupture'].get('width_of_mfd_bin', '1.0')),
- area_source_discretization=str(scenario_info['EqRupture'].get('area_source_discretization', '10'))
- ))
+ in_dir = os.path.join(work_dir, 'Input') # noqa: PTH118
+ oq = readinput.get_oqparam(
+ dict( # noqa: C408
+ calculation_mode='classical',
+ inputs={'site_model': [siteFile]},
+ intensity_measure_types_and_levels="{'PGA': [0.1], 'SA(0.1)': [0.1]}", # place holder for initiating oqparam. Not used in ERF
+ investigation_time=str(
+ scenario_info['EqRupture'].get('investigation_time', '50.0')
+ ),
+ gsim='AbrahamsonEtAl2014', # place holder for initiating oqparam, not used in ERF
+ truncation_level='99.0', # place holder for initiating oqparam. not used in ERF
+ maximum_distance=str(
+ scenario_info['EqRupture'].get('maximum_distance', '2000')
+ ),
+ width_of_mfd_bin=str(
+ scenario_info['EqRupture'].get('width_of_mfd_bin', '1.0')
+ ),
+ area_source_discretization=str(
+ scenario_info['EqRupture'].get('area_source_discretization', '10')
+ ),
+ )
+ )
rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
- rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
- [src_nrml] = nrml.read(os.path.join(in_dir, scenario_info['EqRupture']['sourceFile']))
+ rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
+ [src_nrml] = nrml.read(
+ os.path.join(in_dir, scenario_info['EqRupture']['sourceFile']) # noqa: PTH118
+ )
conv = sourceconverter.SourceConverter(
- scenario_info['EqRupture']['investigation_time'],
- rupture_mesh_spacing,
- width_of_mfd_bin=scenario_info['EqRupture']['width_of_mfd_bin'],
- area_source_discretization=scenario_info['EqRupture']['area_source_discretization'])
+ scenario_info['EqRupture']['investigation_time'],
+ rupture_mesh_spacing,
+ width_of_mfd_bin=scenario_info['EqRupture']['width_of_mfd_bin'],
+ area_source_discretization=scenario_info['EqRupture'][
+ 'area_source_discretization'
+ ],
+ )
src_raw = conv.convert_node(src_nrml)
sources = []
sources_dist = []
sources_id = []
- id = 0
- siteMeanCol = site.SiteCollection.from_points([mlon], [mlat])
+ id = 0 # noqa: A001
+ siteMeanCol = site.SiteCollection.from_points([mlon], [mlat]) # noqa: N806
srcfilter = SourceFilter(siteMeanCol, oq.maximum_distance)
for i in range(len(src_nrml)):
subnode = src_nrml[i]
- subSrc = src_raw[i]
- tag = subnode.tag.rsplit('}')[1] if subnode.tag.startswith('{') else subnode.tag
- if tag == "sourceGroup":
+ subSrc = src_raw[i] # noqa: N806
+ tag = (
+ subnode.tag.rsplit('}')[1]
+ if subnode.tag.startswith('{')
+ else subnode.tag
+ )
+ if tag == 'sourceGroup':
for j in range(len(subnode)):
subsubnode = subnode[j]
- subsubSrc = subSrc[j]
- subtag = subsubnode.tag.rsplit('}')[1] if subsubnode.tag.startswith('{') else subsubnode.tag
- if subtag.endswith('Source') and srcfilter.get_close_sites(subsubSrc) is not None:
+ subsubSrc = subSrc[j] # noqa: N806
+ subtag = (
+ subsubnode.tag.rsplit('}')[1]
+ if subsubnode.tag.startswith('{')
+ else subsubnode.tag
+ )
+ if (
+ subtag.endswith('Source')
+ and srcfilter.get_close_sites(subsubSrc) is not None
+ ):
subsubSrc.id = id
sources_id.append(id)
- id += 1
+ id += 1 # noqa: A001
sources.append(subsubSrc)
- sourceMesh = subsubSrc.polygon.discretize(rupture_mesh_spacing)
- sourceSurface = BaseSurface(sourceMesh)
- siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat)
- sources_dist. append(sourceSurface.get_min_distance(siteMesh))
- elif tag.endswith('Source') and srcfilter.get_close_sites(subSrc) is not None:
+ sourceMesh = subsubSrc.polygon.discretize(rupture_mesh_spacing) # noqa: N806
+ sourceSurface = BaseSurface(sourceMesh) # noqa: N806
+ siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat) # noqa: N806
+ sources_dist.append(sourceSurface.get_min_distance(siteMesh))
+ elif (
+ tag.endswith('Source') and srcfilter.get_close_sites(subSrc) is not None
+ ):
subSrc.id = id
sources_id.append(id)
- id += 1
+ id += 1 # noqa: A001
sources.append(subSrc)
- sourceMesh = subSrc.polygon.discretize(rupture_mesh_spacing)
- sourceSurface = BaseSurface(sourceMesh)
- siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat)
- sources_dist. append(sourceSurface.get_min_distance(siteMesh))
- sources_df = pd.DataFrame.from_dict({
- 'source': sources,
- 'sourceDist': sources_dist,
- 'sourceID':sources_id
- })
- sources_df = sources_df.sort_values(['sourceDist'], ascending = (True))
+ sourceMesh = subSrc.polygon.discretize(rupture_mesh_spacing) # noqa: N806
+ sourceSurface = BaseSurface(sourceMesh) # noqa: N806
+ siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat) # noqa: N806
+ sources_dist.append(sourceSurface.get_min_distance(siteMesh))
+ sources_df = pd.DataFrame.from_dict(
+ {'source': sources, 'sourceDist': sources_dist, 'sourceID': sources_id}
+ )
+ sources_df = sources_df.sort_values(['sourceDist'], ascending=(True))
sources_df = sources_df.set_index('sourceID')
allrups = []
- allrups_rRup = []
- allrups_srcId = []
+ allrups_rRup = [] # noqa: N806
+ allrups_srcId = [] # noqa: N806
allrups_mar = []
- for src in sources_df["source"]:
+ for src in sources_df['source']:
src_rups = list(src.iter_ruptures())
for i, rup in enumerate(src_rups):
rup.rup_id = src.offset + i
@@ -250,48 +309,56 @@ def load_ruptures_openquake(scenario_info, stations, work_dir, siteFile, rupFile
allrups_rRup.append(rup.surface.get_min_distance(siteMeanCol))
allrups_srcId.append(src.id)
allrups_mar.append(rup.occurrence_rate)
- rups_df = pd.DataFrame.from_dict({
- 'rups':allrups,
- 'rups_rRup':allrups_rRup,
- 'rups_srcId':allrups_srcId,
- 'MeanAnnualRate':allrups_mar
- })
- rups_df = rups_df.sort_values(['rups_rRup'], ascending = (True))
- rups_df = rups_df[rups_df["rups_rRup"] > 0]
+ rups_df = pd.DataFrame.from_dict(
+ {
+ 'rups': allrups,
+ 'rups_rRup': allrups_rRup,
+ 'rups_srcId': allrups_srcId,
+ 'MeanAnnualRate': allrups_mar,
+ }
+ )
+ rups_df = rups_df.sort_values(['rups_rRup'], ascending=(True))
+ rups_df = rups_df[rups_df['rups_rRup'] > 0]
maf_list_n = [-x for x in rups_df['MeanAnnualRate']]
sort_ids = np.argsort(maf_list_n)
rups_df = rups_df.iloc[sort_ids]
- rups_df.reset_index(drop=True, inplace=True)
+ rups_df.reset_index(drop=True, inplace=True) # noqa: PD002
# rups_df = rups_df = rups_df.sort_values(['MeanAnnualRate'], ascending = (False))
- rups_df = rups_df.loc[rups_to_run,:]
+ rups_df = rups_df.loc[rups_to_run, :]
scenario_data = {}
for ind in rups_df.index:
- src_id = int(rups_df.loc[ind,"rups_srcId"])
- name = sources_df.loc[src_id, "source"].name
- rup = rups_df.loc[ind, "rups"]
- scenario_data.update({ind: {
- 'Type': "oqSourceXML",
- 'RuptureForecast': "oqERF",
- 'Name': name,
- 'Magnitude': float(rup.mag),
- 'MeanAnnualRate': getattr(rup, "occurrence_rate", None),
- 'SourceIndex': src_id,
- 'RuptureIndex': int(rup.rup_id),
- 'SiteSourceDistance': sources_df.loc[src_id, "sourceDist"][0],
- 'SiteRuptureDistance': get_distances(rup, siteMeanCol, 'rrup')[0],
- 'rup':rup
- }})
+ src_id = int(rups_df.loc[ind, 'rups_srcId'])
+ name = sources_df.loc[src_id, 'source'].name
+ rup = rups_df.loc[ind, 'rups']
+ scenario_data.update(
+ {
+ ind: {
+ 'Type': 'oqSourceXML',
+ 'RuptureForecast': 'oqERF',
+ 'Name': name,
+ 'Magnitude': float(rup.mag),
+ 'MeanAnnualRate': getattr(rup, 'occurrence_rate', None),
+ 'SourceIndex': src_id,
+ 'RuptureIndex': int(rup.rup_id),
+ 'SiteSourceDistance': sources_df.loc[src_id, 'sourceDist'][0],
+ 'SiteRuptureDistance': get_distances(rup, siteMeanCol, 'rrup')[
+ 0
+ ],
+ 'rup': rup,
+ }
+ }
+ )
return scenario_data
-def load_earthquake_scenarios(scenario_info, stations, dir_info):
+def load_earthquake_scenarios(scenario_info, stations, dir_info): # noqa: D103
# Number of scenarios
- source_num = scenario_info.get('Number', 1)
+ source_num = scenario_info.get('Number', 1) # noqa: F841
# sampling method
- samp_method = scenario_info['EqRupture'].get('Sampling','Random')
+ samp_method = scenario_info['EqRupture'].get('Sampling', 'Random') # noqa: F841
# source model
source_model = scenario_info['EqRupture']['Model']
- eq_source = getERF(scenario_info)
+ eq_source = getERF(scenario_info) # noqa: F405
# Getting earthquake rupture forecast data
source_type = scenario_info['EqRupture']['Type']
# Collecting all sites
@@ -301,17 +368,19 @@ def load_earthquake_scenarios(scenario_info, stations, dir_info):
lat.append(s['Latitude'])
lon.append(s['Longitude'])
# load scenario file
- user_scenario_file = os.path.join(dir_info.get('Input'), scenario_info.get('EqRupture').get('UserScenarioFile'))
+ user_scenario_file = os.path.join( # noqa: PTH118
+ dir_info.get('Input'), scenario_info.get('EqRupture').get('UserScenarioFile')
+ )
try:
- with open(user_scenario_file, 'r') as f:
+ with open(user_scenario_file) as f: # noqa: PLW1514, PTH123
user_scenarios = json.load(f)
- except:
- print('CreateScenario: source file {} not found.'.format(user_scenario_file))
+ except: # noqa: E722
+ print(f'CreateScenario: source file {user_scenario_file} not found.') # noqa: T201
return {}
# number of features (i.e., ruptures)
- num_scenarios = len(user_scenarios.get('features',[]))
+ num_scenarios = len(user_scenarios.get('features', []))
if num_scenarios < 1:
- print('CreateScenario: source file is empty.')
+ print('CreateScenario: source file is empty.') # noqa: T201
return {}
# get rupture and source ids
scenario_data = {}
@@ -320,39 +389,59 @@ def load_earthquake_scenarios(scenario_info, stations, dir_info):
cur_id_source = cur_rup.get('properties').get('Source', None)
cur_id_rupture = cur_rup.get('properties').get('Rupture', None)
if cur_id_rupture is None or cur_id_source is None:
- print('CreateScenario: rupture #{} does not have valid source/rupture ID - skipped.'.format(rup_tag))
+ print( # noqa: T201
+ f'CreateScenario: rupture #{rup_tag} does not have valid source/rupture ID - skipped.'
+ )
continue
- cur_source, cur_rupture = get_source_rupture(eq_source, cur_id_source, cur_id_rupture)
- scenario_data.update({rup_tag: {
- 'Type': source_type,
- 'RuptureForecast': source_model,
- 'Name': str(cur_source.getName()),
- 'Magnitude': float(cur_rupture.getMag()),
- 'MeanAnnualRate': float(cur_rupture.getMeanAnnualRate(eq_source.getTimeSpan().getDuration())),
- 'SourceIndex': cur_id_source,
- 'RuptureIndex': cur_id_rupture,
- 'SiteSourceDistance': get_source_distance(eq_source, cur_id_source, lat, lon),
- 'SiteRuptureDistance': get_rupture_distance(eq_source, cur_id_source, cur_id_rupture, lat, lon)
- }})
-
+ cur_source, cur_rupture = get_source_rupture( # noqa: F405
+ eq_source, cur_id_source, cur_id_rupture
+ )
+ scenario_data.update(
+ {
+ rup_tag: {
+ 'Type': source_type,
+ 'RuptureForecast': source_model,
+ 'Name': str(cur_source.getName()),
+ 'Magnitude': float(cur_rupture.getMag()),
+ 'MeanAnnualRate': float(
+ cur_rupture.getMeanAnnualRate(
+ eq_source.getTimeSpan().getDuration()
+ )
+ ),
+ 'SourceIndex': cur_id_source,
+ 'RuptureIndex': cur_id_rupture,
+ 'SiteSourceDistance': get_source_distance( # noqa: F405
+ eq_source, cur_id_source, lat, lon
+ ),
+ 'SiteRuptureDistance': get_rupture_distance( # noqa: F405
+ eq_source, cur_id_source, cur_id_rupture, lat, lon
+ ),
+ }
+ }
+ )
+
# return
return scenario_data
-
-def create_earthquake_scenarios(scenario_info, stations, work_dir, openquakeSiteFile = None):
+def create_earthquake_scenarios( # noqa: C901, D103
+ scenario_info,
+ stations,
+ work_dir,
+ openquakeSiteFile=None, # noqa: N803
+):
# # Number of scenarios
# source_num = scenario_info.get('Number', 1)
# if source_num == 'All':
# # Large number to consider all sources in the ERF
# source_num = 10000000
- out_dir = os.path.join(work_dir,"Output")
+ out_dir = os.path.join(work_dir, 'Output') # noqa: PTH118
if scenario_info['Generator'] == 'Simulation':
- # TODO:
- print('Physics-based earthquake simulation is under development.')
+ # TODO: # noqa: TD002
+ print('Physics-based earthquake simulation is under development.') # noqa: T201
return 1
# Searching earthquake ruptures that fulfill the request
- elif scenario_info['Generator'] == 'Selection':
+ elif scenario_info['Generator'] == 'Selection': # noqa: RET505
# Collecting all possible earthquake scenarios
lat = []
lon = []
@@ -367,47 +456,69 @@ def create_earthquake_scenarios(scenario_info, stations, work_dir, openquakeSite
source_type = scenario_info['EqRupture']['Type']
t_start = time.time()
if source_type == 'ERF':
- if 'SourceIndex' in scenario_info['EqRupture'].keys() and 'RuptureIndex' in scenario_info['EqRupture'].keys():
+ if (
+ 'SourceIndex' in scenario_info['EqRupture'].keys() # noqa: SIM118
+ and 'RuptureIndex' in scenario_info['EqRupture'].keys() # noqa: SIM118
+ ):
source_model = scenario_info['EqRupture']['Model']
- eq_source = getERF(scenario_info)
+ eq_source = getERF(scenario_info) # noqa: F405
# check source index list and rupture index list
- if type(scenario_info['EqRupture']['SourceIndex']) == int:
+ if type(scenario_info['EqRupture']['SourceIndex']) == int: # noqa: E721
source_index_list = [scenario_info['EqRupture']['SourceIndex']]
else:
source_index_list = scenario_info['EqRupture']['SourceIndex']
- if type(scenario_info['EqRupture']['RuptureIndex']) == int:
+ if type(scenario_info['EqRupture']['RuptureIndex']) == int: # noqa: E721
rup_index_list = [scenario_info['EqRupture']['RuptureIndex']]
else:
rup_index_list = scenario_info['EqRupture']['RuptureIndex']
- if not(len(source_index_list) == len(rup_index_list)):
- print('CreateScenario: source number {} should be matched by rupture number {}'.format(len(source_index_list),len(rup_index_list)))
- return dict()
+ if len(source_index_list) != len(rup_index_list):
+ print( # noqa: T201
+ f'CreateScenario: source number {len(source_index_list)} should be matched by rupture number {len(rup_index_list)}'
+ )
+ return dict() # noqa: C408
# loop over all scenarios
- scenario_data = dict()
+ scenario_data = dict() # noqa: C408
for i in range(len(source_index_list)):
cur_source_index = source_index_list[i]
cur_rup_index = rup_index_list[i]
- distToSource = get_source_distance(eq_source, cur_source_index, lat, lon)
- scenario_data.update({i: {
- 'Type': source_type,
- 'RuptureForecast': source_model,
- 'SourceIndex': cur_source_index,
- 'RuptureIndex': cur_rup_index,
- 'SiteSourceDistance': distToSource,
- 'SiteRuptureDistance': get_rupture_distance(eq_source, cur_source_index, cur_rup_index, lat, lon)
- }})
+ distToSource = get_source_distance( # noqa: N806, F405
+ eq_source, cur_source_index, lat, lon
+ )
+ scenario_data.update(
+ {
+ i: {
+ 'Type': source_type,
+ 'RuptureForecast': source_model,
+ 'SourceIndex': cur_source_index,
+ 'RuptureIndex': cur_rup_index,
+ 'SiteSourceDistance': distToSource,
+ 'SiteRuptureDistance': get_rupture_distance( # noqa: F405
+ eq_source,
+ cur_source_index,
+ cur_rup_index,
+ lat,
+ lon,
+ ),
+ }
+ }
+ )
return scenario_data
- else:
+ else: # noqa: RET505
source_model = scenario_info['EqRupture']['Model']
source_name = scenario_info['EqRupture'].get('Name', None)
- min_M = scenario_info['EqRupture'].get('min_Mag', 5.0)
- max_M = scenario_info['EqRupture'].get('max_Mag', 9.0)
- max_R = scenario_info['EqRupture'].get('max_Dist', 1000.0)
- eq_source = getERF(scenario_info)
- erf_data = export_to_json(eq_source, ref_station, outfile = os.path.join(out_dir,'RupFile.geojson'), \
- EqName = source_name, minMag = min_M, \
- maxMag = max_M, maxDistance = max_R, \
- )
+ min_M = scenario_info['EqRupture'].get('min_Mag', 5.0) # noqa: N806
+ max_M = scenario_info['EqRupture'].get('max_Mag', 9.0) # noqa: N806
+ max_R = scenario_info['EqRupture'].get('max_Dist', 1000.0) # noqa: N806
+ eq_source = getERF(scenario_info) # noqa: F405
+ erf_data = export_to_json( # noqa: F405, F841
+ eq_source,
+ ref_station,
+ outfile=os.path.join(out_dir, 'RupFile.geojson'), # noqa: PTH118
+ EqName=source_name,
+ minMag=min_M,
+ maxMag=max_M,
+ maxDistance=max_R,
+ )
# Parsing data
# feat = erf_data['features']
# """
@@ -445,42 +556,62 @@ def create_earthquake_scenarios(scenario_info, stations, work_dir, openquakeSite
# del erf_data
elif source_type == 'PointSource':
# Export to a geojson format RupFile.json
- outfile = os.path.join(out_dir,'RupFile.geojson')
- pointSource_data = {"type": "FeatureCollection"}
+ outfile = os.path.join(out_dir, 'RupFile.geojson') # noqa: PTH118
+ pointSource_data = {'type': 'FeatureCollection'} # noqa: N806
feature_collection = []
- newRup = {
- 'type': "Feature",
- "properties":{
+ newRup = { # noqa: N806
+ 'type': 'Feature',
+ 'properties': {
'Type': source_type,
'Magnitude': scenario_info['EqRupture']['Magnitude'],
'Location': scenario_info['EqRupture']['Location'],
'AverageRake': scenario_info['EqRupture']['AverageRake'],
'AverageDip': scenario_info['EqRupture']['AverageDip'],
- 'Source':0,
- 'Rupture':0}
+ 'Source': 0,
+ 'Rupture': 0,
+ },
}
- newRup['geometry'] = dict()
+ newRup['geometry'] = dict() # noqa: C408
newRup['geometry'].update({'type': 'Point'})
- newRup['geometry'].update({'coordinates': [scenario_info['EqRupture']['Location']['Longitude'], scenario_info['EqRupture']['Location']['Latitude']]})
+ newRup['geometry'].update(
+ {
+ 'coordinates': [
+ scenario_info['EqRupture']['Location']['Longitude'],
+ scenario_info['EqRupture']['Location']['Latitude'],
+ ]
+ }
+ )
feature_collection.append(newRup)
- pointSource_data.update({'features':feature_collection})
+ pointSource_data.update({'features': feature_collection})
if outfile is not None:
- print('The collected point source ruptures are saved in {}'.format(outfile))
- with open(outfile, 'w') as f:
+ print(f'The collected point source ruptures are saved in {outfile}') # noqa: T201
+ with open(outfile, 'w') as f: # noqa: PLW1514, PTH123
json.dump(pointSource_data, f, indent=2)
- elif source_type=='oqSourceXML':
- import FetchOpenQuake
- siteFile = os.path.join(work_dir,'Input',openquakeSiteFile)
- FetchOpenQuake.export_rupture_to_json(scenario_info, mlon, mlat, siteFile, work_dir)
- print('CreateScenario: all scenarios configured {0} sec'.format(time.time() - t_start))
- # return
- return
+ elif source_type == 'oqSourceXML':
+ import FetchOpenQuake # noqa: PLC0415
+ siteFile = os.path.join(work_dir, 'Input', openquakeSiteFile) # noqa: PTH118, N806
+ FetchOpenQuake.export_rupture_to_json(
+ scenario_info, mlon, mlat, siteFile, work_dir
+ )
+ print( # noqa: T201
+ f'CreateScenario: all scenarios configured {time.time() - t_start} sec'
+ )
+ # return
+ return None
-def sample_scenarios(rup_info=[], sample_num=1, sample_type='Random', source_name=None, min_M=0.0):
+def sample_scenarios( # noqa: D103
+ rup_info=[], # noqa: B006
+ sample_num=1,
+ sample_type='Random',
+ source_name=None,
+ min_M=0.0, # noqa: N803
+):
if len(rup_info) == 0:
- print('CreateScenario.sample_scenarios: no available scenario provided - please relax earthquake filters.')
+ print( # noqa: T201
+ 'CreateScenario.sample_scenarios: no available scenario provided - please relax earthquake filters.'
+ )
return []
feat = rup_info
@@ -491,30 +622,30 @@ def sample_scenarios(rup_info=[], sample_num=1, sample_type='Random', source_nam
if min_M > cur_f['properties']['Magnitude']:
continue
tag.append(i)
-
+
if sample_type == 'Random':
s_tag = random.sample(tag, min(sample_num, len(tag)))
-
+
elif sample_type == 'MAF':
# maf list
maf_list = [feat[x]['properties']['MeanAnnualRate'] for x in tag]
# normalize maf list
sum_maf = np.sum(maf_list)
- maf_list_n = [x/sum_maf for x in maf_list]
+ maf_list_n = [x / sum_maf for x in maf_list]
# get sample
s_tag = np.random.choice(tag, sample_num, p=maf_list_n).tolist()
else:
- print('CreateScenario.sample_scenarios: please specify a sampling method.')
+ print('CreateScenario.sample_scenarios: please specify a sampling method.') # noqa: T201
s_tag = []
# return
return s_tag
-def create_wind_scenarios(scenario_info, stations, data_dir):
+def create_wind_scenarios(scenario_info, stations, data_dir): # noqa: D103
# Number of scenarios
- source_num = scenario_info.get('Number', 1)
+ source_num = scenario_info.get('Number', 1) # noqa: F841
# Directly defining earthquake ruptures
if scenario_info['Generator'] == 'Simulation':
# Collecting site locations
@@ -524,51 +655,58 @@ def create_wind_scenarios(scenario_info, stations, data_dir):
lat.append(s['Latitude'])
lon.append(s['Longitude'])
# Save Stations.csv
- df = pd.DataFrame({
- 'lat': lat,
- 'lon': lon
- })
- df.to_csv(data_dir + 'Stations.csv', index = False, header = False)
+ df = pd.DataFrame({'lat': lat, 'lon': lon}) # noqa: PD901
+ df.to_csv(data_dir + 'Stations.csv', index=False, header=False)
# Save Lat_w.csv
lat_w = np.linspace(min(lat) - 0.5, max(lat) + 0.5, 100)
- df = pd.DataFrame({'lat_w': lat_w})
- df.to_csv(data_dir + 'Lat_w.csv', index = False, header = False)
+ df = pd.DataFrame({'lat_w': lat_w}) # noqa: PD901
+ df.to_csv(data_dir + 'Lat_w.csv', index=False, header=False)
# Parsing Terrain info
- df = pd.read_csv(data_dir + scenario_info['Terrain']['Longitude'],
- header = None, index_col = None)
- df.to_csv(data_dir + 'Long_wr.csv', header = False, index = False)
- df = pd.read_csv(data_dir + scenario_info['Terrain']['Latitude'],
- header = None, index_col = None)
- df.to_csv(data_dir + 'Lat_wr.csv', header = False, index = False)
- df = pd.read_csv(data_dir + scenario_info['Terrain']['Size'],
- header = None, index_col = None)
- df.to_csv(data_dir + 'wr_sizes.csv', header = False, index = False)
- df = pd.read_csv(data_dir + scenario_info['Terrain']['z0'],
- header = None, index_col = None)
- df.to_csv(data_dir + 'z0r.csv', header = False, index = False)
+ df = pd.read_csv( # noqa: PD901
+ data_dir + scenario_info['Terrain']['Longitude'],
+ header=None,
+ index_col=None,
+ )
+ df.to_csv(data_dir + 'Long_wr.csv', header=False, index=False)
+ df = pd.read_csv( # noqa: PD901
+ data_dir + scenario_info['Terrain']['Latitude'],
+ header=None,
+ index_col=None,
+ )
+ df.to_csv(data_dir + 'Lat_wr.csv', header=False, index=False)
+ df = pd.read_csv( # noqa: PD901
+ data_dir + scenario_info['Terrain']['Size'], header=None, index_col=None
+ )
+ df.to_csv(data_dir + 'wr_sizes.csv', header=False, index=False)
+ df = pd.read_csv( # noqa: PD901
+ data_dir + scenario_info['Terrain']['z0'], header=None, index_col=None
+ )
+ df.to_csv(data_dir + 'z0r.csv', header=False, index=False)
# Parsing storm properties
param = []
- param.append(scenario_info['Storm']['Landfall']['Latitude'])
+ param.append(scenario_info['Storm']['Landfall']['Latitude']) # noqa: FURB113
param.append(scenario_info['Storm']['Landfall']['Longitude'])
param.append(scenario_info['Storm']['LandingAngle'])
param.append(scenario_info['Storm']['Pressure'])
param.append(scenario_info['Storm']['Speed'])
param.append(scenario_info['Storm']['Radius'])
- df = pd.DataFrame({'param': param})
- df.to_csv(data_dir + 'param.csv', index = False, header = False)
- df = pd.read_csv(data_dir + scenario_info['Storm']['Track'],
- header = None, index_col = None)
- df.to_csv(data_dir + 'Track.csv', header = False, index = False)
+ df = pd.DataFrame({'param': param}) # noqa: PD901
+ df.to_csv(data_dir + 'param.csv', index=False, header=False)
+ df = pd.read_csv( # noqa: PD901
+ data_dir + scenario_info['Storm']['Track'], header=None, index_col=None
+ )
+ df.to_csv(data_dir + 'Track.csv', header=False, index=False)
# Saving del_par.csv
- del_par = [0, 0, 0] # default
- df =pd.DataFrame({'del_par': del_par})
- df.to_csv(data_dir + 'del_par.csv', header = False, index = False)
+ del_par = [0, 0, 0] # default
+ df = pd.DataFrame({'del_par': del_par}) # noqa: PD901
+ df.to_csv(data_dir + 'del_par.csv', header=False, index=False)
# Parsing resolution data
- delta_p = [1000., scenario_info['Resolution']['DivRad'], 1000000.]
- delta_p.extend([0., scenario_info['Resolution']['DivDeg'], 360.])
- delta_p.extend([scenario_info['MeasureHeight'], 10,
- scenario_info['MeasureHeight']])
- df = pd.DataFrame({'delta_p': delta_p})
- df.to_csv(data_dir + 'delta_p.csv', header = False, index = False)
+ delta_p = [1000.0, scenario_info['Resolution']['DivRad'], 1000000.0]
+ delta_p.extend([0.0, scenario_info['Resolution']['DivDeg'], 360.0])
+ delta_p.extend(
+ [scenario_info['MeasureHeight'], 10, scenario_info['MeasureHeight']]
+ )
+ df = pd.DataFrame({'delta_p': delta_p}) # noqa: PD901
+ df.to_csv(data_dir + 'delta_p.csv', header=False, index=False)
else:
- print('Currently only supporting Simulation generator.')
+ print('Currently only supporting Simulation generator.') # noqa: T201
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/CreateStation.py b/modules/performRegionalEventSimulation/regionalGroundMotion/CreateStation.py
index 8e53e3dd5..0bd879000 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/CreateStation.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/CreateStation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,54 +37,66 @@
# Kuanshi Zhong
#
-import json, copy
-import numpy as np
-import pandas as pd
import socket
import sys
+
+import numpy as np
+import pandas as pd
from tqdm import tqdm
-if 'stampede2' not in socket.gethostname():
- from FetchOpenSHA import get_site_vs30_from_opensha
- from FetchOpenSHA import get_site_z1pt0_from_opensha, get_site_z2pt5_from_opensha
+if 'stampede2' not in socket.gethostname():
+ from FetchOpenSHA import (
+ get_site_vs30_from_opensha,
+ get_site_z1pt0_from_opensha,
+ get_site_z2pt5_from_opensha,
+ )
-def get_label(options, labels, label_name):
+def get_label(options, labels, label_name): # noqa: D103
for option in options:
if option in labels:
labels = labels[labels != option]
return option, labels
- print(f'WARNING: Could not identify the label for the {label_name}')
+ print(f'WARNING: Could not identify the label for the {label_name}') # noqa: T201, RET503
class Station:
- """
- A class for stations in an earthquake scenario
- """
- def __init__(self, lon, lat, vs30 = None, z2p5 = None):
+ """A class for stations in an earthquake scenario""" # noqa: D400
+
+ def __init__(self, lon, lat, vs30=None, z2p5=None):
# Initializing the location, vs30, z2.5, Tcond and other Tags
self.lon = lon
self.lat = lat
self.vs30 = vs30
self.z2p5 = z2p5
- def get_location(self):
+ def get_location(self): # noqa: D102
# Returning the geo location
return self.lon, self.lat
- def get_vs30(self):
+ def get_vs30(self): # noqa: D102
# Returning the Vs30 at the station
return self.vs30
- def get_z2p5(self):
+ def get_z2p5(self): # noqa: D102
# Returning the z2.5 of the station
return self.z2p5
-def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z25Config, zTR_tag=0, soil_flag=False, soil_model_type=None, soil_user_fun=None):
- """
- Reading input csv file for stations and saving data to output json file
+def create_stations( # noqa: C901, PLR0912, PLR0915
+ input_file,
+ output_file,
+ filterIDs, # noqa: N803
+ vs30Config, # noqa: N803
+ z1Config, # noqa: N803
+ z25Config, # noqa: N803
+ zTR_tag=0, # noqa: N803
+ soil_flag=False, # noqa: FBT002
+ soil_model_type=None,
+ soil_user_fun=None,
+):
+ """Reading input csv file for stations and saving data to output json file
Input:
input_file: the filename of the station csv file
output_file: the filename of the output json file
@@ -96,27 +107,28 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
z2pt5_tag: z2pt5 tag: 1 - using empirical equation, 0 - leave it as null
Output:
stn_file: dictionary of station data
- """
+ """ # noqa: D205, D400, D401
# Reading csv data
run_tag = 1
try:
stn_df = pd.read_csv(input_file, header=0, index_col=0)
- except:
+ except: # noqa: E722
run_tag = 0
- return run_tag
+ return run_tag # noqa: RET504
# Max and Min IDs
- if len(filterIDs)>0:
+ if len(filterIDs) > 0:
stns_requested = []
for stns in filterIDs.split(','):
- if "-" in stns:
- stn_low, stn_high = stns.split("-")
- stns_requested += list(range(int(stn_low), int(stn_high)+1))
+ if '-' in stns:
+ stn_low, stn_high = stns.split('-')
+ stns_requested += list(range(int(stn_low), int(stn_high) + 1))
else:
stns_requested.append(int(stns))
stns_requested = np.array(stns_requested)
- stns_available = stn_df.index.values
+ stns_available = stn_df.index.values # noqa: PD011
stns_to_run = stns_requested[
- np.where(np.in1d(stns_requested, stns_available))[0]]
+ np.where(np.isin(stns_requested, stns_available))[0]
+ ]
selected_stn = stn_df.loc[stns_to_run]
else:
selected_stn = stn_df
@@ -131,48 +143,82 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
# selected_stn = copy.copy(stn_df.loc[min_id:max_id, :])
selected_stn.index = list(range(len(selected_stn.index)))
# Extracting data
- labels = selected_stn.columns.values
- lon_label, labels = get_label(['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude')
- lat_label, labels = get_label(['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude')
- if any([i in ['Vs30', 'vs30', 'Vs_30', 'vs_30'] for i in labels]):
- vs30_label, labels = get_label(['Vs30', 'vs30', 'Vs_30', 'vs_30'], labels, 'vs30')
+ labels = selected_stn.columns.values # noqa: PD011
+ lon_label, labels = get_label(
+ ['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude'
+ )
+ lat_label, labels = get_label(
+ ['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude'
+ )
+ if any([i in ['Vs30', 'vs30', 'Vs_30', 'vs_30'] for i in labels]): # noqa: C419, PLR6201
+ vs30_label, labels = get_label(
+ ['Vs30', 'vs30', 'Vs_30', 'vs_30'], labels, 'vs30'
+ )
else:
vs30_label = 'Vs30'
- if any([i in ['Z2p5', 'z2p5', 'Z2pt5', 'z2pt5', 'Z25', 'z25', 'Z2.5', 'z2.5'] for i in labels]):
- z2p5_label, labels = get_label(['Z2p5', 'z2p5', 'Z2pt5', 'z2pt5', 'Z25', 'z25', 'Z2.5', 'z2.5'], labels, 'z2p5')
+ if any(
+ [ # noqa: C419
+ i in ['Z2p5', 'z2p5', 'Z2pt5', 'z2pt5', 'Z25', 'z25', 'Z2.5', 'z2.5'] # noqa: PLR6201
+ for i in labels
+ ]
+ ):
+ z2p5_label, labels = get_label(
+ ['Z2p5', 'z2p5', 'Z2pt5', 'z2pt5', 'Z25', 'z25', 'Z2.5', 'z2.5'],
+ labels,
+ 'z2p5',
+ )
else:
z2p5_label = 'z2p5'
- if any([i in ['Z1p0', 'z1p0', 'Z1pt0', 'z1pt0', 'Z1', 'z1', 'Z1.0', 'z1.0'] for i in labels]):
- z1p0_label, labels = get_label(['Z1p0', 'z1p0', 'Z1pt0', 'z1pt0', 'Z1', 'z1', 'Z1.0', 'z1.0'], labels, 'z1p0')
+ if any(
+ [ # noqa: C419
+ i in ['Z1p0', 'z1p0', 'Z1pt0', 'z1pt0', 'Z1', 'z1', 'Z1.0', 'z1.0'] # noqa: PLR6201
+ for i in labels
+ ]
+ ):
+ z1p0_label, labels = get_label(
+ ['Z1p0', 'z1p0', 'Z1pt0', 'z1pt0', 'Z1', 'z1', 'Z1.0', 'z1.0'],
+ labels,
+ 'z1p0',
+ )
else:
z1p0_label = 'z1p0'
- if any([i in ['zTR', 'ztr', 'ZTR', 'DepthToRock'] for i in labels]):
- zTR_label, labels = get_label(['zTR', 'ztr', 'ZTR', 'DepthToRock'], labels, 'zTR')
+ if any([i in ['zTR', 'ztr', 'ZTR', 'DepthToRock'] for i in labels]): # noqa: C419, PLR6201
+ zTR_label, labels = get_label( # noqa: N806
+ ['zTR', 'ztr', 'ZTR', 'DepthToRock'], labels, 'zTR'
+ )
else:
- zTR_label = 'DepthToRock'
+ zTR_label = 'DepthToRock' # noqa: N806
if soil_flag:
- if any([i in ['Model', 'model', 'SoilModel', 'soilModel'] for i in labels]):
- soil_model_label, labels = get_label(['Model', 'model', 'SoilModel', 'soilModel'], labels, 'Model')
+ if any([i in ['Model', 'model', 'SoilModel', 'soilModel'] for i in labels]): # noqa: C419, PLR6201
+ soil_model_label, labels = get_label(
+ ['Model', 'model', 'SoilModel', 'soilModel'], labels, 'Model'
+ )
else:
soil_model_label = 'Model'
if soil_model_type is not None:
- model_map = {'Elastic Isotropic': 'EI',
- 'Multiaxial Cyclic Plasticity': 'BA',
- 'User': 'USER'}
+ model_map = {
+ 'Elastic Isotropic': 'EI',
+ 'Multiaxial Cyclic Plasticity': 'BA',
+ 'User': 'USER',
+ }
soil_model_tag = model_map.get(soil_model_type, 'EI')
# add a 'Model' column to selected_stn
- selected_stn[soil_model_label] = [soil_model_tag for x in range(len(selected_stn.index))]
- STN = []
- stn_file = {
- 'Stations': []
- }
+ selected_stn[soil_model_label] = [
+ soil_model_tag for x in range(len(selected_stn.index))
+ ]
+ STN = [] # noqa: N806
+ stn_file = {'Stations': []}
# Get Vs30
- if vs30Config['Type'] == "User-specified":
- if vs30_label not in selected_stn.keys():
- sys.exit("ERROR: User-specified option is selected for Vs30 model but the provided."+
- "but the provided Site File doesn't contain a column named 'Vs30'."+
- "\nNote: the User-specified Vs30 model is only supported for Scattering Locations site definition.")
- tmp = selected_stn.iloc[:,list(selected_stn.keys()).index(vs30_label)].values.tolist()
+ if vs30Config['Type'] == 'User-specified':
+ if vs30_label not in selected_stn.keys(): # noqa: SIM118
+ sys.exit(
+ 'ERROR: User-specified option is selected for Vs30 model but the provided.' # noqa: ISC003
+ + "but the provided Site File doesn't contain a column named 'Vs30'."
+ + '\nNote: the User-specified Vs30 model is only supported for Scattering Locations site definition.'
+ )
+ tmp = selected_stn.iloc[ # noqa: PD011
+ :, list(selected_stn.keys()).index(vs30_label)
+ ].values.tolist()
if len(tmp):
nan_loc = [x[0] for x in np.argwhere(np.isnan(tmp)).tolist()]
else:
@@ -181,32 +227,58 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
nan_loc = list(range(len(selected_stn.index)))
if 'Global Vs30' in vs30Config['Type']:
vs30_tag = 1
- elif 'Thompson' in vs30Config['Type']:
+ elif 'Thompson' in vs30Config['Type']:
vs30_tag = 2
- elif 'NCM' in vs30Config['Type']:
+ elif 'NCM' in vs30Config['Type']:
vs30_tag = 3
else:
vs30_tag = 0
if len(nan_loc) and vs30_tag == 1:
- print('CreateStation: Interpolating global Vs30 map for defined stations.')
- selected_stn.loc[nan_loc,vs30_label] = get_vs30_global(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())
- if len(nan_loc) and vs30_tag == 2:
- print('CreateStation: Interpolating Thompson Vs30 map for defined stations.')
- selected_stn.loc[nan_loc,vs30_label] = get_vs30_thompson(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())
- if len(nan_loc) and vs30_tag == 3:
- print('CreateStation: Fetch National Crustal Model Vs for defined stations.')
- selected_stn.loc[nan_loc,vs30_label] = get_vs30_ncm(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())
+ print('CreateStation: Interpolating global Vs30 map for defined stations.') # noqa: T201
+ selected_stn.loc[nan_loc, vs30_label] = get_vs30_global(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
+ if len(nan_loc) and vs30_tag == 2: # noqa: PLR2004
+ print('CreateStation: Interpolating Thompson Vs30 map for defined stations.') # noqa: T201
+ selected_stn.loc[nan_loc, vs30_label] = get_vs30_thompson(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
+ if len(nan_loc) and vs30_tag == 3: # noqa: PLR2004
+ print('CreateStation: Fetch National Crustal Model Vs for defined stations.') # noqa: T201
+ selected_stn.loc[nan_loc, vs30_label] = get_vs30_ncm(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
if len(nan_loc) and vs30_tag == 0:
- print('CreateStation: Fetch OpenSHA Vs30 map for defined stations.')
- selected_stn.loc[nan_loc,vs30_label] = get_site_vs30_from_opensha(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())
-
+ print('CreateStation: Fetch OpenSHA Vs30 map for defined stations.') # noqa: T201
+ selected_stn.loc[nan_loc, vs30_label] = get_site_vs30_from_opensha(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
+
# Get zTR
- if zTR_label in selected_stn.keys():
- tmp = selected_stn.iloc[:,list(selected_stn.keys()).index(zTR_label)].values.tolist()
+ if zTR_label in selected_stn.keys(): # noqa: SIM118
+ tmp = selected_stn.iloc[ # noqa: PD011
+ :, list(selected_stn.keys()).index(zTR_label)
+ ].values.tolist()
if len(tmp):
nan_loc = [x[0] for x in np.argwhere(np.isnan(tmp)).tolist()]
else:
@@ -214,38 +286,74 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
else:
nan_loc = list(range(len(selected_stn.index)))
if len(nan_loc) and zTR_tag == 0:
- print('CreateStation: Interpolating global depth to rock map for defined stations.')
- selected_stn.loc[nan_loc, zTR_label] = [max(0,x) for x in get_zTR_global(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())]
+ print( # noqa: T201
+ 'CreateStation: Interpolating global depth to rock map for defined stations.'
+ )
+ selected_stn.loc[nan_loc, zTR_label] = [
+ max(0, x)
+ for x in get_zTR_global(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
+ ]
elif len(nan_loc) and zTR_tag == 1:
- print('CreateStation: Interpolating depth to rock map from National Crustal Model.')
- selected_stn.loc[nan_loc, zTR_label] = [max(0,x) for x in get_zTR_ncm(selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lat_label)].values.tolist(),
- selected_stn.iloc[nan_loc,list(selected_stn.keys()).index(lon_label)].values.tolist())]
+ print( # noqa: T201
+ 'CreateStation: Interpolating depth to rock map from National Crustal Model.'
+ )
+ selected_stn.loc[nan_loc, zTR_label] = [
+ max(0, x)
+ for x in get_zTR_ncm(
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lat_label)
+ ].values.tolist(),
+ selected_stn.iloc[ # noqa: PD011
+ nan_loc, list(selected_stn.keys()).index(lon_label)
+ ].values.tolist(),
+ )
+ ]
elif len(nan_loc):
- print('CreateStation: Default zore depth to rock for sites missing the data.')
+ print( # noqa: T201
+ 'CreateStation: Default zore depth to rock for sites missing the data.'
+ )
selected_stn[zTR_label] = [0.0 for x in range(len(selected_stn.index))]
-
+
# rename column headers to standard keywords
- selected_stn = selected_stn.rename(columns={lat_label: 'Latitude', lon_label: 'Longitude', vs30_label: 'Vs30',
- z1p0_label: 'z1p0', z2p5_label: 'z2p5', zTR_label: 'DepthToRock'})
+ selected_stn = selected_stn.rename(
+ columns={
+ lat_label: 'Latitude',
+ lon_label: 'Longitude',
+ vs30_label: 'Vs30',
+ z1p0_label: 'z1p0',
+ z2p5_label: 'z2p5',
+ zTR_label: 'DepthToRock',
+ }
+ )
if soil_flag:
selected_stn.rename(columns={soil_model_label: 'Model'})
- # get soil model
+ # get soil model
if soil_flag:
# get soil_model
- soil_model = selected_stn.iloc[:, list(selected_stn.keys()).index('Model')].values.tolist()
+ soil_model = selected_stn.iloc[ # noqa: PD011
+ :, list(selected_stn.keys()).index('Model')
+ ].values.tolist()
# elastic istropic model
- row_EI = [i for i, x in enumerate(soil_model) if x == 'EI']
+ row_EI = [i for i, x in enumerate(soil_model) if x == 'EI'] # noqa: N806
# Borja & Amier model
- row_BA = [i for i, x in enumerate(soil_model) if x == 'BA']
+ row_BA = [i for i, x in enumerate(soil_model) if x == 'BA'] # noqa: N806
# User-defined model
- row_USER = [i for i, x in enumerate(soil_model) if x == 'USER']
+ row_USER = [i for i, x in enumerate(soil_model) if x == 'USER'] # noqa: N806
if len(row_EI):
cur_param_list = ['Den']
for cur_param in cur_param_list:
- if cur_param in selected_stn.keys():
- tmp = selected_stn.iloc[row_EI,list(selected_stn.keys()).index(cur_param)].values.tolist()
+ if cur_param in selected_stn.keys(): # noqa: SIM118
+ tmp = selected_stn.iloc[ # noqa: PD011
+ row_EI, list(selected_stn.keys()).index(cur_param)
+ ].values.tolist()
if len(tmp):
nan_loc = [x[0] for x in np.argwhere(np.isnan(tmp)).tolist()]
else:
@@ -253,13 +361,18 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
else:
nan_loc = list(range(len(row_EI)))
if len(nan_loc):
- selected_stn.loc[row_EI,cur_param] = [get_soil_model_ei(param=cur_param) for x in range(len(row_EI))]
-
+ selected_stn.loc[row_EI, cur_param] = [
+ get_soil_model_ei(param=cur_param)
+ for x in range(len(row_EI))
+ ]
+
if len(row_BA):
cur_param_list = ['Su_rat', 'Den', 'h/G', 'm', 'h0', 'chi']
for cur_param in cur_param_list:
- if cur_param in selected_stn.keys():
- tmp = selected_stn.iloc[row_BA,list(selected_stn.keys()).index(cur_param)].values.tolist()
+ if cur_param in selected_stn.keys(): # noqa: SIM118
+ tmp = selected_stn.iloc[ # noqa: PD011
+ row_BA, list(selected_stn.keys()).index(cur_param)
+ ].values.tolist()
if len(tmp):
nan_loc = [x[0] for x in np.argwhere(np.isnan(tmp)).tolist()]
else:
@@ -267,38 +380,73 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
else:
nan_loc = list(range(len(row_BA)))
if len(nan_loc):
- selected_stn.loc[row_BA,cur_param] = [get_soil_model_ba(param=cur_param) for x in range(len(row_BA))]
+ selected_stn.loc[row_BA, cur_param] = [
+ get_soil_model_ba(param=cur_param)
+ for x in range(len(row_BA))
+ ]
user_param_list = []
if len(row_USER):
if soil_user_fun is None:
- print('CreateStation: no fetching is conducted for the User soil model- please ensure all needed parameters are defined.')
+ print( # noqa: T201
+ 'CreateStation: no fetching is conducted for the User soil model- please ensure all needed parameters are defined.'
+ )
for cur_param in list(selected_stn.keys()):
- if cur_param not in ['Longitude', 'Latitude', 'Vs30', 'DepthToRock', 'z1p0',
- 'z2p5', 'Model', 'Su_rat', 'Den', 'h/G', 'm', 'h0', 'chi']:
- user_param_list.append(cur_param)
+ if cur_param not in [ # noqa: PLR6201
+ 'Longitude',
+ 'Latitude',
+ 'Vs30',
+ 'DepthToRock',
+ 'z1p0',
+ 'z2p5',
+ 'Model',
+ 'Su_rat',
+ 'Den',
+ 'h/G',
+ 'm',
+ 'h0',
+ 'chi',
+ ]:
+ user_param_list.append(cur_param) # noqa: PERF401
else:
selected_stn = get_soil_model_user(selected_stn, soil_user_fun)
user_param_list = list(selected_stn.keys())
for cur_param in user_param_list:
- if cur_param in ['Longitude', 'Latitude', 'Vs30', 'DepthToRock', 'z1p0',
- 'z2p5', 'Model', 'Su_rat', 'Den', 'h/G', 'm', 'h0', 'chi']:
- user_param_list.pop(user_param_list.index(cur_param))
+ if cur_param in [ # noqa: PLR6201
+ 'Longitude',
+ 'Latitude',
+ 'Vs30',
+ 'DepthToRock',
+ 'z1p0',
+ 'z2p5',
+ 'Model',
+ 'Su_rat',
+ 'Den',
+ 'h/G',
+ 'm',
+ 'h0',
+ 'chi',
+ ]:
+ user_param_list.pop(user_param_list.index(cur_param)) # noqa: B909
ground_failure_input_keys = set()
for ind in tqdm(range(selected_stn.shape[0]), desc='Stations'):
- stn = selected_stn.iloc[ind,:]
+ stn = selected_stn.iloc[ind, :]
stn_id = selected_stn.index[ind]
- # for stn_id, stn in selected_stn.iterrows():
+ # for stn_id, stn in selected_stn.iterrows():
# Creating a Station object
- STN.append(Station(
- stn['Longitude'], stn['Latitude'],
- stn.get('Vs30', 760.0), stn.get('z2p5', 9.0)
- ))
+ STN.append(
+ Station(
+ stn['Longitude'],
+ stn['Latitude'],
+ stn.get('Vs30', 760.0),
+ stn.get('z2p5', 9.0),
+ )
+ )
# Collecting station data
tmp = {
'ID': stn_id,
'Longitude': stn['Longitude'],
- 'Latitude': stn['Latitude']
+ 'Latitude': stn['Latitude'],
}
if stn.get('Vs30'):
@@ -315,16 +463,20 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
elif vs30_tag == 0:
tmp.update({'Vs30': get_site_vs30_from_opensha([stn[lat_label]], [stn[lon_label]])[0]})
"""
- if (z1Config["Type"]=="User-specified") and stn.get('z1pt0'):
+ if (z1Config['Type'] == 'User-specified') and stn.get('z1pt0'):
tmp.update({'z1pt0': stn.get('z1pt0')})
- elif (z1Config["Type"]=="User-specified") and z1Config.get('Parameters', False):
- tmp.update({'z1pt0': float(z1Config["Parameters"]["value"])})
- elif z1Config["Type"]=="OpenSHA default model":
- z1_tag = z1Config["z1_tag"]
- if z1_tag==1:
+ elif (z1Config['Type'] == 'User-specified') and z1Config.get(
+ 'Parameters', False
+ ):
+ tmp.update({'z1pt0': float(z1Config['Parameters']['value'])})
+ elif z1Config['Type'] == 'OpenSHA default model':
+ z1_tag = z1Config['z1_tag']
+ if z1_tag == 1:
tmp.update({'z1pt0': get_z1(tmp['Vs30'])})
- elif z1_tag==2:
- z1pt0 = get_site_z1pt0_from_opensha(tmp['Latitude'], tmp['Longitude'])
+ elif z1_tag == 2: # noqa: PLR2004
+ z1pt0 = get_site_z1pt0_from_opensha(
+ tmp['Latitude'], tmp['Longitude']
+ )
if np.isnan(z1pt0):
z1pt0 = get_z1(tmp.get('Vs30'))
tmp.update({'z1pt0': z1pt0})
@@ -332,88 +484,114 @@ def create_stations(input_file, output_file, filterIDs, vs30Config, z1Config, z2
z1pt0 = get_z1(tmp.get('Vs30'))
tmp.update({'z1pt0': z1pt0})
- if (z25Config["Type"]=="User-specified") and stn.get('z2pt5'):
+ if (z25Config['Type'] == 'User-specified') and stn.get('z2pt5'):
tmp.update({'z2pt5': stn.get('z2pt5')})
- elif (z25Config["Type"]=="User-specified") and z25Config.get('Parameters', False):
- tmp.update({'z2pt5': float(z25Config["Parameters"]["value"])})
- elif z25Config["Type"]=="OpenSHA default model":
- z25_tag = z25Config["z25_tag"]
- if z25_tag==1:
+ elif (z25Config['Type'] == 'User-specified') and z25Config.get(
+ 'Parameters', False
+ ):
+ tmp.update({'z2pt5': float(z25Config['Parameters']['value'])})
+ elif z25Config['Type'] == 'OpenSHA default model':
+ z25_tag = z25Config['z25_tag']
+ if z25_tag == 1:
tmp.update({'z2pt5': get_z25(tmp['z1pt0'])})
- elif z25_tag==2:
- z2pt5 = get_site_z2pt5_from_opensha(tmp['Latitude'], tmp['Longitude'])
+ elif z25_tag == 2: # noqa: PLR2004
+ z2pt5 = get_site_z2pt5_from_opensha(
+ tmp['Latitude'], tmp['Longitude']
+ )
if np.isnan(z2pt5):
z2pt5 = get_z25(tmp['z1pt0'])
tmp.update({'z2pt5': z2pt5})
- elif z25_tag ==0:
+ elif z25_tag == 0:
z2pt5 = get_z25(tmp['z1pt0'])
tmp.update({'z2pt5': z2pt5})
if 'DepthToRock' in stn.index:
tmp.update({'DepthToRock': stn.get('DepthToRock')})
else:
- #tmp.update({'zTR': max(0,get_zTR_global([stn[lat_label]], [stn[lon_label]])[0])})
+ # tmp.update({'zTR': max(0,get_zTR_global([stn[lat_label]], [stn[lon_label]])[0])})
tmp.update({'DepthToRock': 0.0})
if soil_flag:
tmp.update({'Model': stn.get('Model', 'EI')})
- for cur_param in ['Su_rat', 'Den', 'h/G', 'm', 'h0', 'chi']+user_param_list:
+ for cur_param in [ # noqa: RUF005
+ 'Su_rat',
+ 'Den',
+ 'h/G',
+ 'm',
+ 'h0',
+ 'chi',
+ ] + user_param_list:
tmp.update({cur_param: stn.get(cur_param, None)})
-
+
if stn.get('vsInferred'):
- if stn.get('vsInferred') not in [0, 1]:
- sys.exit("CreateStation: Only '0' or '1' can be assigned to the"+
- " 'vsInferred' column in the Site File (.csv), where 0 stands for false and 1 stands for true." )
+ if stn.get('vsInferred') not in [0, 1]: # noqa: PLR6201
+ sys.exit(
+ "CreateStation: Only '0' or '1' can be assigned to the" # noqa: ISC003
+ + " 'vsInferred' column in the Site File (.csv), where 0 stands for false and 1 stands for true."
+ )
# print(f"CreateStation: A value of 'vsInferred' is provided for station {stn_id} in the Site File (.csv)"+
# " and the 'vsInferred' defined in the Vs30 model pane is overwritten.")
tmp.update({'vsInferred': stn.get('vsInferred')})
else:
- tmp.update({'vsInferred': (1 if vs30Config['Parameters']['vsInferred'] else 0) })
- for key in ['liqSusc', 'gwDepth', 'distWater', 'distCoast', 'distRiver',\
- 'precipitation', 'slope', 'slopeThickness', 'gammaSoil', 'phiSoil',\
- 'cohesionSoil']:
+ tmp.update(
+ {'vsInferred': (1 if vs30Config['Parameters']['vsInferred'] else 0)}
+ )
+ for key in [
+ 'liqSusc',
+ 'gwDepth',
+ 'distWater',
+ 'distCoast',
+ 'distRiver',
+ 'precipitation',
+ 'slope',
+ 'slopeThickness',
+ 'gammaSoil',
+ 'phiSoil',
+ 'cohesionSoil'
+ ]:
if stn.get(key, None) is not None:
- tmp.update({key:stn.get(key)})
+ tmp.update({key: stn.get(key)})
ground_failure_input_keys.add(key)
-
-
stn_file['Stations'].append(tmp)
- #stn_file['Stations'].append({
+ # stn_file['Stations'].append({
# 'ID': stn_id,
# 'Longitude': stn[lon_label],
# 'Latitude': stn[lat_label],
# 'Vs30': stn.get(vs30_label, 760.0),
# 'z2.5': stn.get(z2p5_label, 9.0)
- #})
+ # })
# Saving data to the output file
df_csv = {
- 'ID': [x['ID'] for x in stn_file['Stations']],
- 'lon': [x['Longitude'] for x in stn_file['Stations']],
- 'lat': [x['Latitude'] for x in stn_file['Stations']],
- 'vs30': [x.get('Vs30',760) for x in stn_file['Stations']],
- 'z1pt0': [x.get('z1pt0',9) for x in stn_file['Stations']],
- 'z2pt5': [x.get('z2pt5',12) for x in stn_file['Stations']],
- 'vsInferred': [x.get('vsInferred',0) for x in stn_file['Stations']]
- # DepthToRock is not used in NGA-West2 GMPEs and is not saved
- # 'DepthToRock': [x.get('DepthToRock',0) for x in stn_file['Stations']]
- }
- # no backarc by default
+ 'ID': [x['ID'] for x in stn_file['Stations']],
+ 'lon': [x['Longitude'] for x in stn_file['Stations']],
+ 'lat': [x['Latitude'] for x in stn_file['Stations']],
+ 'vs30': [x.get('Vs30', 760) for x in stn_file['Stations']],
+ 'z1pt0': [x.get('z1pt0', 9) for x in stn_file['Stations']],
+ 'z2pt5': [x.get('z2pt5', 12) for x in stn_file['Stations']],
+ 'vsInferred': [x.get('vsInferred', 0) for x in stn_file['Stations']],
+ # DepthToRock is not used in NGA-West2 GMPEs and is not saved
+ # 'DepthToRock': [x.get('DepthToRock',0) for x in stn_file['Stations']]
+ }
+ # no backarc by default
for key in ground_failure_input_keys:
- df_csv.update({key:[x[key] for x in stn_file['Stations']]})
- if stn_file['Stations'][0].get('backarc',None):
- df_csv.update({
- 'backarc': [x.get('backarc') for x in stn_file['Stations']]
- })
+ df_csv.update({key: [x[key] for x in stn_file['Stations']]})
+ if stn_file['Stations'][0].get('backarc', None):
+ df_csv.update({'backarc': [x.get('backarc') for x in stn_file['Stations']]})
pd.DataFrame.from_dict(df_csv).to_csv(output_file, index=False)
# Returning the final run state
return stn_file
-def create_gridded_stations(input_file, output_file, div_lon = 2, div_lat = 2,
- delta_lon = None, delta = None):
- """
- Reading input csv file for the grid, generating stations, and saving data
+def create_gridded_stations(
+ input_file,
+ output_file, # noqa: ARG001
+ div_lon=2,
+ div_lat=2,
+ delta_lon=None,
+ delta=None, # noqa: ARG001
+):
+ """Reading input csv file for the grid, generating stations, and saving data
to output json file
Input:
input_file: the filename of the station csv file
@@ -423,25 +601,29 @@ def create_gridded_stations(input_file, output_file, div_lon = 2, div_lat = 2,
delta_lon: delta degree along longitude
delta_lat: delta degree along latitude
Output:
- run_tag: 0 - success, 1 - input failure, 2 - outupt failure
- """
+ run_tag: 0 - success, 1 - input failure, 2 - output failure
+ """ # noqa: D205, D400, D401
# Reading csv data
run_tag = 0
try:
gstn_df = pd.read_csv(input_file, header=0, index_col=0)
- except:
+ except: # noqa: E722
run_tag = 1
- return run_tag
- if np.max(gstn_df.index.values) != 2:
+ return run_tag # noqa: RET504
+ if np.max(gstn_df.index.values) != 2: # noqa: PLR2004
run_tag = 1
- return run_tag
- else:
- labels = gstn_df.columns.values
- lon_label, labels = get_label(['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude')
- lat_label, labels = get_label(['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude')
+ return run_tag # noqa: RET504
+ else: # noqa: RET505
+ labels = gstn_df.columns.values # noqa: PD011
+ lon_label, labels = get_label(
+ ['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude'
+ )
+ lat_label, labels = get_label(
+ ['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude'
+ )
lon_temp = []
lat_temp = []
- for gstn_id, gstn in gstn_df.iterrows():
+ for gstn_id, gstn in gstn_df.iterrows(): # noqa: B007
lon_temp.append(gstn[lon_label])
lat_temp.append(gstn[lat_label])
# Generating the grid
@@ -449,178 +631,183 @@ def create_gridded_stations(input_file, output_file, div_lon = 2, div_lat = 2,
dlat = (np.max(lat_temp) - np.min(lat_temp)) / div_lat
if delta_lon is not None:
delta_lon = np.min([delta_lon, dlon])
- if delta_lat is not None:
- delta_lat = np.min([delta_lat, dlat])
- glon, glat = np.meshgrid(
+ if delta_lat is not None: # noqa: F821
+ delta_lat = np.min([delta_lat, dlat]) # noqa: F821
+ glon, glat = np.meshgrid( # noqa: F841, RET503
np.arange(np.min(lon_temp), np.max(lon_temp), delta_lon),
- np.arange(np.min(lat_temp), np.max(lat_temp), delta_lat)
+ np.arange(np.min(lat_temp), np.max(lat_temp), delta_lat),
)
def get_vs30_global(lat, lon):
- """
- Interpolate global Vs30 at given latitude and longitude
+ """Interpolate global Vs30 at given latitude and longitude
Input:
lat: list of latitude
lon: list of longitude
Output:
vs30: list of vs30
- """
- import pickle
- import os
- from scipy import interpolate
+ """ # noqa: D205, D400
+ import os # noqa: PLC0415
+ import pickle # noqa: S403, PLC0415
+
+ from scipy import interpolate # noqa: PLC0415
+
# Loading global Vs30 data
- cwd = os.path.dirname(os.path.realpath(__file__))
- with open(cwd+'/database/site/global_vs30_4km.pkl', 'rb') as f:
- vs30_global = pickle.load(f)
+ cwd = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120
+ with open(cwd + '/database/site/global_vs30_4km.pkl', 'rb') as f: # noqa: PTH123
+ vs30_global = pickle.load(f) # noqa: S301
# Interpolation function (linear)
- interpFunc = interpolate.interp2d(vs30_global['Longitude'], vs30_global['Latitude'], vs30_global['Vs30'])
- vs30 = [float(interpFunc(x, y)) for x,y in zip(lon, lat)]
+ interpFunc = interpolate.interp2d( # noqa: N806
+ vs30_global['Longitude'], vs30_global['Latitude'], vs30_global['Vs30']
+ )
+ vs30 = [float(interpFunc(x, y)) for x, y in zip(lon, lat)]
# return
- return vs30
+ return vs30 # noqa: RET504
def get_vs30_thompson(lat, lon):
- """
- Interpolate global Vs30 at given latitude and longitude
+ """Interpolate global Vs30 at given latitude and longitude
Input:
lat: list of latitude
lon: list of longitude
Output:
vs30: list of vs30
- """
- import pickle
- import os
- from scipy import interpolate
+ """ # noqa: D205, D400
+ import os # noqa: PLC0415
+ import pickle # noqa: S403, PLC0415
+
+ from scipy import interpolate # noqa: PLC0415
+
# Loading Thompson Vs30 data
- cwd = os.path.dirname(os.path.realpath(__file__))
- with open(cwd+'/database/site/thompson_vs30_4km.pkl', 'rb') as f:
- vs30_thompson = pickle.load(f)
+ cwd = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120
+ with open(cwd + '/database/site/thompson_vs30_4km.pkl', 'rb') as f: # noqa: PTH123
+ vs30_thompson = pickle.load(f) # noqa: S301
# Interpolation function (linear)
# Thompson's map gives zero values for water-covered region and outside CA -> use 760 for default
- print('CreateStation: Warning - approximate 760 m/s for sites not supported by Thompson Vs30 map (water/outside CA).')
- vs30_thompson['Vs30'][vs30_thompson['Vs30']<0.1] = 760
- interpFunc = interpolate.interp2d(vs30_thompson['Longitude'], vs30_thompson['Latitude'], vs30_thompson['Vs30'])
- vs30 = [float(interpFunc(x, y)) for x,y in zip(lon, lat)]
-
+ print( # noqa: T201
+ 'CreateStation: Warning - approximate 760 m/s for sites not supported by Thompson Vs30 map (water/outside CA).'
+ )
+ vs30_thompson['Vs30'][vs30_thompson['Vs30'] < 0.1] = 760 # noqa: PLR2004
+ interpFunc = interpolate.interp2d( # noqa: N806
+ vs30_thompson['Longitude'], vs30_thompson['Latitude'], vs30_thompson['Vs30']
+ )
+ vs30 = [float(interpFunc(x, y)) for x, y in zip(lon, lat)]
+
# return
- return vs30
+ return vs30 # noqa: RET504
def get_z1(vs30):
- """
- Compute z1 based on the prediction equation by Chiou and Youngs (2013) (unit of vs30 is meter/second and z1 is meter)
- """
-
- z1 = np.exp(-7.15 / 4.0 * np.log((vs30 ** 4 + 571.0 ** 4) / (1360.0 ** 4 + 571.0 ** 4)))
+ """Compute z1 based on the prediction equation by Chiou and Youngs (2013) (unit of vs30 is meter/second and z1 is meter)""" # noqa: D400
+ z1 = np.exp(-7.15 / 4.0 * np.log((vs30**4 + 571.0**4) / (1360.0**4 + 571.0**4)))
# return
- return z1
+ return z1 # noqa: RET504
def get_z25(z1):
- """
- Compute z25 based on the prediction equation by Campbell and Bozorgnia (2013)
- """
+ """Compute z25 based on the prediction equation by Campbell and Bozorgnia (2013)""" # noqa: D400
z25 = 0.748 + 2.218 * z1
# return
- return z25
+ return z25 # noqa: RET504
-def get_z25fromVs(vs):
- """
- Compute z25 (m) based on the prediction equation 33 by Campbell and Bozorgnia (2014)
+
+def get_z25fromVs(vs): # noqa: N802
+ """Compute z25 (m) based on the prediction equation 33 by Campbell and Bozorgnia (2014)
Vs is m/s
- """
- z25 = (7.089 - 1.144 * np.log(vs))*1000
+ """ # noqa: D205, D400
+ z25 = (7.089 - 1.144 * np.log(vs)) * 1000
# return
- return z25
+ return z25 # noqa: RET504
-def get_zTR_global(lat, lon):
- """
- Interpolate depth to rock at given latitude and longitude
+def get_zTR_global(lat, lon): # noqa: N802
+ """Interpolate depth to rock at given latitude and longitude
Input:
lat: list of latitude
lon: list of longitude
Output:
zTR: list of zTR
- """
- import pickle
- import os
- from scipy import interpolate
+ """ # noqa: D205, D400
+ import os # noqa: PLC0415
+ import pickle # noqa: S403, PLC0415
+
+ from scipy import interpolate # noqa: PLC0415
+
# Loading depth to rock data
- cwd = os.path.dirname(os.path.realpath(__file__))
- with open(cwd+'/database/site/global_zTR_4km.pkl', 'rb') as f:
- zTR_global = pickle.load(f)
+ cwd = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120
+ with open(cwd + '/database/site/global_zTR_4km.pkl', 'rb') as f: # noqa: PTH123
+ zTR_global = pickle.load(f) # noqa: S301, N806
# Interpolation function (linear)
- interpFunc = interpolate.interp2d(zTR_global['Longitude'], zTR_global['Latitude'], zTR_global['zTR'])
- zTR = [float(interpFunc(x, y)) for x,y in zip(lon, lat)]
+ interpFunc = interpolate.interp2d( # noqa: N806
+ zTR_global['Longitude'], zTR_global['Latitude'], zTR_global['zTR']
+ )
+ zTR = [float(interpFunc(x, y)) for x, y in zip(lon, lat)] # noqa: N806
# return
- return zTR
+ return zTR # noqa: RET504
def export_site_prop(stn_file, output_dir, filename):
- """
- saving a csv file for stations
+ """Saving a csv file for stations
Input:
stn_file: a dictionary of station data
output_path: output directory
filename: output filename
Output:
- run_tag: 0 - success, 1 - outupt failure
- """
- import os
- from pathlib import Path
+ run_tag: 0 - success, 1 - output failure
+ """ # noqa: D205, D400, D401
+ import os # noqa: PLC0415
+ from pathlib import Path # noqa: PLC0415
- print(stn_file)
- station_name = ['site'+str(j)+'.csv' for j in range(len(stn_file))]
+ print(stn_file) # noqa: T201
+ station_name = ['site' + str(j) + '.csv' for j in range(len(stn_file))]
lat = [stn_file[j]['Latitude'] for j in range(len(stn_file))]
lon = [stn_file[j]['Longitude'] for j in range(len(stn_file))]
vs30 = [stn_file[j]['Vs30'] for j in range(len(stn_file))]
- df = pd.DataFrame({
- 'GP_file': station_name,
- 'Longitude': lon,
- 'Latitude': lat,
- 'Vs30': vs30
- })
- df = pd.DataFrame.from_dict(stn_file)
-
- output_dir = os.path.join(os.path.dirname(Path(output_dir)),
- os.path.basename(Path(output_dir)))
+ df = pd.DataFrame( # noqa: PD901
+ {'GP_file': station_name, 'Longitude': lon, 'Latitude': lat, 'Vs30': vs30}
+ )
+ df = pd.DataFrame.from_dict(stn_file) # noqa: PD901
+
+ output_dir = os.path.join( # noqa: PTH118
+ os.path.dirname(Path(output_dir)), # noqa: PTH120
+ os.path.basename(Path(output_dir)), # noqa: PTH119
+ )
try:
- os.makedirs(output_dir)
- except:
- print('HazardSimulation: output folder already exists.')
+ os.makedirs(output_dir) # noqa: PTH103
+ except: # noqa: E722
+ print('HazardSimulation: output folder already exists.') # noqa: T201
# save the csv
- df.to_csv(os.path.join(output_dir, filename), index = False)
+ df.to_csv(os.path.join(output_dir, filename), index=False) # noqa: PTH118
-def get_zTR_ncm(lat, lon):
- """
- Call USGS National Crustal Model services for zTR
+def get_zTR_ncm(lat, lon): # noqa: N802
+ """Call USGS National Crustal Model services for zTR
https://earthquake.usgs.gov/nshmp/ncm
Input:
lat: list of latitude
lon: list of longitude
Output:
zTR: list of depth to bedrock
- """
- import requests
+ """ # noqa: D205, D400
+ import requests # noqa: PLC0415
- zTR = []
+ zTR = [] # noqa: N806
# Looping over sites
for cur_lat, cur_lon in zip(lat, lon):
- url_geology = 'https://earthquake.usgs.gov/ws/nshmp/ncm/ws/nshmp/ncm/geologic-framework?location={}%2C{}'.format(cur_lat,cur_lon)
+ url_geology = f'https://earthquake.usgs.gov/ws/nshmp/ncm/ws/nshmp/ncm/geologic-framework?location={cur_lat}%2C{cur_lon}'
# geological data (depth to bedrock)
- r1 = requests.get(url_geology)
+ r1 = requests.get(url_geology) # noqa: S113
cur_res = r1.json()
if not cur_res['response']['results'][0]['profiles']:
# the current site is out of the available range of NCM (Western US only, 06/2021)
# just append 0.0 to zTR
- print('CreateStation: Warning in NCM API call - could not get the site geological data and approximate 0.0 for zTR for site {}, {}'.format(cur_lat,cur_lon))
+ print( # noqa: T201
+ f'CreateStation: Warning in NCM API call - could not get the site geological data and approximate 0.0 for zTR for site {cur_lat}, {cur_lon}'
+ )
zTR.append(0.0)
continue
- else:
+ else: # noqa: RET507
# get the top bedrock data
zTR.append(abs(cur_res['response']['results'][0]['profiles'][0]['top']))
# return
@@ -628,8 +815,7 @@ def get_zTR_ncm(lat, lon):
def get_vsp_ncm(lat, lon, depth):
- """
- Call USGS National Crustal Model services for Vs30 profile
+ """Call USGS National Crustal Model services for Vs30 profile
https://earthquake.usgs.gov/nshmp/ncm
Input:
lat: list of latitude
@@ -637,26 +823,30 @@ def get_vsp_ncm(lat, lon, depth):
depth: [depthMin, depthInc, depthMax]
Output:
vsp: list of shear-wave velocity profile
- """
- import requests
+ """ # noqa: D205, D400
+ import requests # noqa: PLC0415
vsp = []
- depthMin, depthInc, depthMax = [abs(x) for x in depth]
+ depthMin, depthInc, depthMax = (abs(x) for x in depth) # noqa: N806
# Looping over sites
for cur_lat, cur_lon in zip(lat, lon):
- url_geophys = 'https://earthquake.usgs.gov/ws/nshmp/ncm/ws/nshmp/ncm/geophysical?location={}%2C{}&depths={}%2C{}%2C{}'.format(cur_lat,cur_lon,depthMin,depthInc,depthMax)
- r1 = requests.get(url_geophys)
+ url_geophys = f'https://earthquake.usgs.gov/ws/nshmp/ncm/ws/nshmp/ncm/geophysical?location={cur_lat}%2C{cur_lon}&depths={depthMin}%2C{depthInc}%2C{depthMax}'
+ r1 = requests.get(url_geophys) # noqa: S113
cur_res = r1.json()
if cur_res['status'] == 'error':
# the current site is out of the available range of NCM (Western US only, 06/2021)
# just append -1 to zTR
- print('CreateStation: Warning in NCM API call - could not get the site geopyhsical data.')
+ print( # noqa: T201
+ 'CreateStation: Warning in NCM API call - could not get the site geopyhsical data.'
+ )
vsp.append([])
continue
- else:
+ else: # noqa: RET507
# get vs30 profile
- vsp.append([abs(x) for x in cur_res['response']['results'][0]['profile']['vs']])
+ vsp.append(
+ [abs(x) for x in cur_res['response']['results'][0]['profile']['vs']]
+ )
if len(vsp) == 1:
vsp = vsp[0]
# return
@@ -664,59 +854,58 @@ def get_vsp_ncm(lat, lon, depth):
def compute_vs30_from_vsp(depthp, vsp):
- """
- Compute the Vs30 given the depth and Vs profile
+ """Compute the Vs30 given the depth and Vs profile
Input:
depthp: list of depth for Vs profile
vsp: Vs profile
Output:
vs30p: average VS for the upper 30-m depth
- """
+ """ # noqa: D205, D400
# Computing the depth interval
- delta_depth = np.diff([0] + depthp)
+ delta_depth = np.diff([0] + depthp) # noqa: RUF005
# Computing the wave-travel time
- delta_t = [x / y for x,y in zip(delta_depth, vsp)]
+ delta_t = [x / y for x, y in zip(delta_depth, vsp)]
# Computing the Vs30
vs30p = 30.0 / np.sum(delta_t)
# return
- return vs30p
+ return vs30p # noqa: RET504
def get_vs30_ncm(lat, lon):
- """
- Fetch Vs30 at given latitude and longitude from NCM
+ """Fetch Vs30 at given latitude and longitude from NCM
Input:
lat: list of latitude
lon: list of longitude
Output:
vs30: list of vs30
- """
+ """ # noqa: D205, D400
# Depth list (in meter)
depth = [1.0, 1.0, 30.0]
depthp = np.arange(depth[0], depth[2] + 1.0, depth[1])
# Getting Vs profile
- vsp = [get_vsp_ncm([x], [y], depth) for x,y in zip(lat, lon)]
+ vsp = [get_vsp_ncm([x], [y], depth) for x, y in zip(lat, lon)]
# Computing Vs30
vs30 = []
for cur_vsp in vsp:
if cur_vsp:
vs30.append(compute_vs30_from_vsp(depthp, cur_vsp))
else:
- print('CreateStation: Warning - approximate 760 m/s for sites not supported by NCM (Western US).')
+ print( # noqa: T201
+ 'CreateStation: Warning - approximate 760 m/s for sites not supported by NCM (Western US).'
+ )
vs30.append(760.0)
# return
return vs30
def get_soil_model_ba(param=None):
- """
- Get modeling parameters for Borja and Amies 1994 J2 model
+ """Get modeling parameters for Borja and Amies 1994 J2 model
Currently just assign default values
- Can be extended to have input soil properties to predict this pararmeters
- """
+ Can be extended to have input soil properties to predict this parameters
+ """ # noqa: D205, D400
su_rat = 0.26
density = 2.0
- h_to_G = 1.0
+ h_to_G = 1.0 # noqa: N806
m = 1.0
h0 = 0.2
chi = 0.0
@@ -740,11 +929,10 @@ def get_soil_model_ba(param=None):
def get_soil_model_ei(param=None):
- """
- Get modeling parameters for elastic isotropic
+ """Get modeling parameters for elastic isotropic
Currently just assign default values
- Can be extended to have input soil properties to predict this pararmeters
- """
+ Can be extended to have input soil properties to predict this parameter
+ """ # noqa: D205, D400
density = 2.0
if param == 'Den':
@@ -755,37 +943,46 @@ def get_soil_model_ei(param=None):
return res
-def get_soil_model_user(df_stn, model_fun):
-
+def get_soil_model_user(df_stn, model_fun): # noqa: D103
# check if mode_fun exists
- import os, sys, importlib
- if not os.path.isfile(model_fun):
- print('CreateStation.get_soil_model_user: {} is not found.'.format(model_fun))
+ import importlib # noqa: PLC0415
+ import os # noqa: PLC0415
+ import sys # noqa: PLC0415
+
+ if not os.path.isfile(model_fun): # noqa: PTH113
+ print(f'CreateStation.get_soil_model_user: {model_fun} is not found.') # noqa: T201
return df_stn, []
# try to load the model file
- from pathlib import Path
+ from pathlib import Path # noqa: PLC0415
+
try:
path_model_fun = Path(model_fun).resolve()
- sys.path.insert(0, str(path_model_fun.parent)+'/')
+ sys.path.insert(0, str(path_model_fun.parent) + '/')
# load the function
- user_model= importlib.__import__(path_model_fun.name[:-3], globals(), locals(), [], 0)
- except:
- print('CreateStation.get_soil_model_user: {} cannot be loaded.'.format(model_fun))
+ user_model = importlib.__import__(
+ path_model_fun.name[:-3], globals(), locals(), [], 0
+ )
+ except: # noqa: E722
+ print(f'CreateStation.get_soil_model_user: {model_fun} cannot be loaded.') # noqa: T201
return df_stn
# try to load the standard function: soil_model_fun(site_info=None)
try:
soil_model = user_model.soil_model
- except:
- print('CreateStation.get_soil_model_user: soil_model is nto found in {}.'.format(model_fun))
+ except: # noqa: E722
+ print( # noqa: T201
+ f'CreateStation.get_soil_model_user: soil_model is nto found in {model_fun}.'
+ )
return df_stn
-
+
# get the parameters from soil_model_fun
try:
df_stn_new = soil_model(site_info=df_stn)
- except:
- print('CreateStation.get_soil_model_user: error in soil_model_fun(site_info=None).')
+ except: # noqa: E722
+ print( # noqa: T201
+ 'CreateStation.get_soil_model_user: error in soil_model_fun(site_info=None).'
+ )
return df_stn
- return df_stn_new
\ No newline at end of file
+ return df_stn_new
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenQuake.py b/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenQuake.py
index 54bb935ef..a09550e27 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenQuake.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenQuake.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,64 +37,87 @@
# Kuanshi Zhong
#
-import numpy as np
-import pandas as pd
-import os
import getpass
import logging
-from os.path import getsize
-import sys
+import os
import shutil
-import stat
-import subprocess
-import time
-import importlib
import socket
+import subprocess # noqa: S404
+import sys
+import time
+import numpy as np
+import pandas as pd
install_requires = []
default_oq_version = '3.17.1'
-def openquake_config(site_info, scen_info, event_info, workDir):
+def openquake_config(site_info, scen_info, event_info, workDir): # noqa: C901, N803, D103, PLR0912, PLR0915
+ dir_input = os.path.join(workDir, 'Input') # noqa: PTH118
+ dir_output = os.path.join(workDir, 'Output') # noqa: PTH118
+ import configparser # noqa: PLC0415
- dir_input = os.path.join(workDir, "Input")
- dir_output = os.path.join(workDir, "Output")
- import configparser
cfg = configparser.ConfigParser()
# general section
- if scen_info['EqRupture']['Type'] == 'oqSourceXML': #OpenQuakeScenario
- cfg['general'] = {'description': 'Scenario Hazard Config File',
- 'calculation_mode': 'scenario'}
+ if scen_info['EqRupture']['Type'] == 'oqSourceXML': # OpenQuakeScenario
+ cfg['general'] = {
+ 'description': 'Scenario Hazard Config File',
+ 'calculation_mode': 'scenario',
+ }
elif scen_info['EqRupture']['Type'] == 'OpenQuakeEventBased':
- cfg['general'] = {'description': 'Scenario Hazard Config File',
- 'calculation_mode': 'event_based',
- 'ses_seed': scen_info['EqRupture'].get('Seed', 24)}
+ cfg['general'] = {
+ 'description': 'Scenario Hazard Config File',
+ 'calculation_mode': 'event_based',
+ 'ses_seed': scen_info['EqRupture'].get('Seed', 24),
+ }
cfg['logic_tree'] = {'number_of_logic_tree_samples': 0}
elif scen_info['EqRupture']['Type'] == 'OpenQuakeClassicalPSHA':
- cfg['general'] = {'description': 'Scenario Hazard Config File',
- 'calculation_mode': 'classical',
- 'random_seed': scen_info['EqRupture'].get('Seed', 24)}
- cfg['logic_tree'] = {'number_of_logic_tree_samples': 0} # 0 here indicates full logic tree realization
- elif scen_info['EqRupture']['Type'] in ['OpenQuakeUserConfig','OpenQuakeClassicalPSHA-User']:
+ cfg['general'] = {
+ 'description': 'Scenario Hazard Config File',
+ 'calculation_mode': 'classical',
+ 'random_seed': scen_info['EqRupture'].get('Seed', 24),
+ }
+ cfg['logic_tree'] = {
+ 'number_of_logic_tree_samples': 0
+ } # 0 here indicates full logic tree realization
+ elif scen_info['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
filename_ini = scen_info['EqRupture'].get('ConfigFile', None)
if filename_ini is None:
- print('FetchOpenQuake: please specify Scenario[\'EqRupture\'][\'ConfigFile\'].')
+ print( # noqa: T201
+ "FetchOpenQuake: please specify Scenario['EqRupture']['ConfigFile']."
+ )
return 0
- else:
- filename_ini = os.path.join(dir_input, filename_ini)
+ else: # noqa: RET505
+ filename_ini = os.path.join(dir_input, filename_ini) # noqa: PTH118
# updating the export_dir
cfg.read(filename_ini)
cfg['output']['export_dir'] = dir_output
else:
- print('FetchOpenQuake: please specify Scenario[\'Generator\'], options: OpenQuakeScenario, OpenQuakeEventBased, OpenQuakeClassicalPSHA, or OpenQuakeUserConfig.')
+ print( # noqa: T201
+ "FetchOpenQuake: please specify Scenario['Generator'], options: OpenQuakeScenario, OpenQuakeEventBased, OpenQuakeClassicalPSHA, or OpenQuakeUserConfig."
+ )
return 0
- if scen_info['EqRupture']['Type'] in ['OpenQuakeUserConfig','OpenQuakeClassicalPSHA-User']:
+ if scen_info['EqRupture']['Type'] in [ # noqa: PLR1702, PLR6201
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
# sites
- tmpSites = pd.read_csv(os.path.join(dir_input, site_info['input_file']), header=0, index_col=0)
- tmpSitesLoc = tmpSites.loc[:, ['Longitude','Latitude']]
- tmpSitesLoc.loc[site_info['min_ID']:site_info['max_ID']].to_csv(os.path.join(dir_input, 'sites_oq.csv'), header=False, index=False)
+ tmpSites = pd.read_csv( # noqa: N806
+ os.path.join(dir_input, site_info['input_file']), # noqa: PTH118
+ header=0,
+ index_col=0,
+ )
+ tmpSitesLoc = tmpSites.loc[:, ['Longitude', 'Latitude']] # noqa: N806
+ tmpSitesLoc.loc[site_info['min_ID'] : site_info['max_ID']].to_csv(
+ os.path.join(dir_input, 'sites_oq.csv'), # noqa: PTH118
+ header=False,
+ index=False,
+ )
if cfg.has_section('geometry'):
cfg['geometry']['sites_csv'] = 'sites_oq.csv'
else:
@@ -105,16 +127,21 @@ def openquake_config(site_info, scen_info, event_info, workDir):
else:
cfg['site_params'] = {'site_model_file': site_info['output_file']}
# copy that file to the rundir
- shutil.copy(os.path.join(dir_input,site_info['output_file']), os.path.join(dir_output,site_info['output_file']))
+ shutil.copy(
+ os.path.join(dir_input, site_info['output_file']), # noqa: PTH118
+ os.path.join(dir_output, site_info['output_file']), # noqa: PTH118
+ )
# im type and period
- tmp0 = cfg['calculation'].get('intensity_measure_types_and_levels').split('"')
+ tmp0 = (
+ cfg['calculation'].get('intensity_measure_types_and_levels').split('"')
+ )
tmp = []
for jj, cur_tmp in enumerate(tmp0):
- if jj%2:
+ if jj % 2:
tmp.append(cur_tmp)
im_type = []
- tmp_T = []
+ tmp_T = [] # noqa: N806
for cur_tmp in tmp:
if 'PGA' in cur_tmp:
im_type = 'PGA'
@@ -125,209 +152,352 @@ def openquake_config(site_info, scen_info, event_info, workDir):
pass
event_info['IntensityMeasure']['Type'] = im_type
event_info['IntensityMeasure']['Periods'] = tmp_T
- cfg['calculation']['source_model_logic_tree_file'] = os.path.join(cfg['calculation'].get('source_model_logic_tree_file'))
- cfg['calculation']['gsim_logic_tree_file'] = os.path.join(cfg['calculation'].get('gsim_logic_tree_file'))
+ cfg['calculation']['source_model_logic_tree_file'] = os.path.join( # noqa: PTH118
+ cfg['calculation'].get('source_model_logic_tree_file')
+ )
+ cfg['calculation']['gsim_logic_tree_file'] = os.path.join( # noqa: PTH118
+ cfg['calculation'].get('gsim_logic_tree_file')
+ )
else:
# sites
# tmpSites = pd.read_csv(site_info['siteFile'], header=0, index_col=0)
# tmpSitesLoc = tmpSites.loc[:, ['Longitude','Latitude']]
# tmpSitesLoc.to_csv(os.path.join(dir_input, 'sites_oq.csv'), header=False, index=False)
# cfg['geometry'] = {'sites_csv': 'sites_oq.csv'}
- cfg['geometry'] = {'sites_csv': os.path.basename(site_info['siteFile'])}
+ cfg['geometry'] = {'sites_csv': os.path.basename(site_info['siteFile'])} # noqa: PTH119
# rupture
- cfg['erf'] = {'rupture_mesh_spacing': scen_info['EqRupture'].get('RupMesh', 2.0),
- 'width_of_mfd_bin': scen_info['EqRupture'].get('MagFreqDistBin', 0.1),
- 'area_source_discretization': scen_info['EqRupture'].get('AreaMesh', 10.0)}
+ cfg['erf'] = {
+ 'rupture_mesh_spacing': scen_info['EqRupture'].get('RupMesh', 2.0),
+ 'width_of_mfd_bin': scen_info['EqRupture'].get('MagFreqDistBin', 0.1),
+ 'area_source_discretization': scen_info['EqRupture'].get(
+ 'AreaMesh', 10.0
+ ),
+ }
# site_params (saved in the output_file)
cfg['site_params'] = {'site_model_file': 'tmp_oq_site_model.csv'}
# hazard_calculation
- mapGMPE = {'Abrahamson, Silva & Kamai (2014)': 'AbrahamsonEtAl2014',
- 'AbrahamsonEtAl2014': 'AbrahamsonEtAl2014',
- 'Boore, Stewart, Seyhan & Atkinson (2014)': 'BooreEtAl2014',
- 'BooreEtAl2014': 'BooreEtAl2014',
- 'Campbell & Bozorgnia (2014)': 'CampbellBozorgnia2014',
- 'CampbellBozorgnia2014': 'CampbellBozorgnia2014',
- 'Chiou & Youngs (2014)': 'ChiouYoungs2014',
- 'ChiouYoungs2014': 'ChiouYoungs2014'
- }
-
- if scen_info['EqRupture']['Type'] == 'oqSourceXML':#OpenQuakeScenario
+ mapGMPE = { # noqa: N806
+ 'Abrahamson, Silva & Kamai (2014)': 'AbrahamsonEtAl2014',
+ 'AbrahamsonEtAl2014': 'AbrahamsonEtAl2014',
+ 'Boore, Stewart, Seyhan & Atkinson (2014)': 'BooreEtAl2014',
+ 'BooreEtAl2014': 'BooreEtAl2014',
+ 'Campbell & Bozorgnia (2014)': 'CampbellBozorgnia2014',
+ 'CampbellBozorgnia2014': 'CampbellBozorgnia2014',
+ 'Chiou & Youngs (2014)': 'ChiouYoungs2014',
+ 'ChiouYoungs2014': 'ChiouYoungs2014',
+ }
+
+ if scen_info['EqRupture']['Type'] == 'oqSourceXML': # OpenQuakeScenario
imt = ''
if event_info['IntensityMeasure']['Type'] == 'SA':
- for curT in event_info['IntensityMeasure']['Periods']:
+ for curT in event_info['IntensityMeasure']['Periods']: # noqa: N806
imt = imt + 'SA(' + str(curT) + '), '
imt = imt[:-2]
else:
imt = event_info['IntensityMeasure']['Type']
- cfg['calculation'] = {'rupture_model_file': scen_info['EqRupture']['sourceFile'],
- 'gsim': mapGMPE[event_info['GMPE']['Type']],
- 'intensity_measure_types': imt,
- 'random_seed': 42,
- 'truncation_level': event_info['IntensityMeasure'].get('Truncation', 3.0),
- 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0),
- 'number_of_ground_motion_fields': event_info['NumberPerSite']}
+ cfg['calculation'] = {
+ 'rupture_model_file': scen_info['EqRupture']['sourceFile'],
+ 'gsim': mapGMPE[event_info['GMPE']['Type']],
+ 'intensity_measure_types': imt,
+ 'random_seed': 42,
+ 'truncation_level': event_info['IntensityMeasure'].get(
+ 'Truncation', 3.0
+ ),
+ 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0),
+ 'number_of_ground_motion_fields': event_info['NumberPerSite'],
+ }
elif scen_info['EqRupture']['Type'] == 'OpenQuakeEventBased':
imt = ''
- imt_levels = event_info['IntensityMeasure'].get('Levels', [0.01,10,100])
+ imt_levels = event_info['IntensityMeasure'].get(
+ 'Levels', [0.01, 10, 100]
+ )
imt_scale = event_info['IntensityMeasure'].get('Scale', 'Log')
if event_info['IntensityMeasure']['Type'] == 'SA':
- for curT in event_info['IntensityMeasure']['Periods']:
- #imt = imt + '"SA(' + str(curT) + ')": {}, '.format(imt_levels)
+ for curT in event_info['IntensityMeasure']['Periods']: # noqa: N806
+ # imt = imt + '"SA(' + str(curT) + ')": {}, '.format(imt_levels)
if imt_scale == 'Log':
- imt = imt + '"SA(' + str(curT) + ')": logscale({}, {}, {}), '.format(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt = (
+ imt
+ + '"SA('
+ + str(curT)
+ + f')": logscale({float(imt_levels[0])}, {float(imt_levels[1])}, {int(imt_levels[2])}), '
+ )
else:
- imt_values = np.linspace(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt_values = np.linspace(
+ float(imt_levels[0]),
+ float(imt_levels[1]),
+ int(imt_levels[2]),
+ )
imt_strings = ''
for imt_v in imt_values:
- imt_strings = imt_strings+str(imt_v)+', '
+ imt_strings = imt_strings + str(imt_v) + ', '
imt_strings = imt_strings[:-2]
- imt = imt + '"SA(' + str(curT) + ')": [{}], '.format(imt_strings)
+ imt = imt + '"SA(' + str(curT) + f')": [{imt_strings}], '
imt = imt[:-2]
elif event_info['IntensityMeasure']['Type'] == 'PGA':
if imt_scale == 'Log':
- imt = '"PGA": logscale({}, {}, {}), '.format(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt = f'"PGA": logscale({float(imt_levels[0])}, {float(imt_levels[1])}, {int(imt_levels[2])}), '
else:
- imt_values = np.linspace(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt_values = np.linspace(
+ float(imt_levels[0]),
+ float(imt_levels[1]),
+ int(imt_levels[2]),
+ )
imt_strings = ''
for imt_v in imt_values:
- imt_strings = imt_strings+str(imt_v)+', '
+ imt_strings = imt_strings + str(imt_v) + ', '
imt_strings = imt_strings[:-2]
- imt = 'PGA": [{}], '.format(imt_strings)
+ imt = f'PGA": [{imt_strings}], '
else:
- imt = event_info['IntensityMeasure']['Type'] + ': logscale(1, 200, 45)'
- cfg['calculation'] = {'source_model_logic_tree_file': scen_info['EqRupture']['Filename'],
- 'gsim_logic_tree_file': event_info['GMPE']['Parameters'],
- 'investigation_time': scen_info['EqRupture']['TimeSpan'],
- 'intensity_measure_types_and_levels': '{' + imt + '}',
- 'random_seed': 42,
- 'truncation_level': event_info['IntensityMeasure'].get('Truncation', 3.0),
- 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0),
- 'number_of_ground_motion_fields': event_info['NumberPerSite']}
+ imt = (
+ event_info['IntensityMeasure']['Type'] + ': logscale(1, 200, 45)'
+ )
+ cfg['calculation'] = {
+ 'source_model_logic_tree_file': scen_info['EqRupture']['Filename'],
+ 'gsim_logic_tree_file': event_info['GMPE']['Parameters'],
+ 'investigation_time': scen_info['EqRupture']['TimeSpan'],
+ 'intensity_measure_types_and_levels': '{' + imt + '}',
+ 'random_seed': 42,
+ 'truncation_level': event_info['IntensityMeasure'].get(
+ 'Truncation', 3.0
+ ),
+ 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0),
+ 'number_of_ground_motion_fields': event_info['NumberPerSite'],
+ }
elif scen_info['EqRupture']['Type'] == 'OpenQuakeClassicalPSHA':
imt = ''
- imt_levels = event_info['IntensityMeasure'].get('Levels', [0.01,10,100])
+ imt_levels = event_info['IntensityMeasure'].get(
+ 'Levels', [0.01, 10, 100]
+ )
imt_scale = event_info['IntensityMeasure'].get('Scale', 'Log')
if event_info['IntensityMeasure']['Type'] == 'SA':
- for curT in event_info['IntensityMeasure']['Periods']:
- #imt = imt + '"SA(' + str(curT) + ')": {}, '.format(imt_levels)
+ for curT in event_info['IntensityMeasure']['Periods']: # noqa: N806
+ # imt = imt + '"SA(' + str(curT) + ')": {}, '.format(imt_levels)
if imt_scale == 'Log':
- imt = imt + '"SA(' + str(curT) + ')": logscale({}, {}, {}), '.format(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt = (
+ imt
+ + '"SA('
+ + str(curT)
+ + f')": logscale({float(imt_levels[0])}, {float(imt_levels[1])}, {int(imt_levels[2])}), '
+ )
else:
- imt_values = np.linspace(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt_values = np.linspace(
+ float(imt_levels[0]),
+ float(imt_levels[1]),
+ int(imt_levels[2]),
+ )
imt_strings = ''
for imt_v in imt_values:
- imt_strings = imt_strings+str(imt_v)+', '
+ imt_strings = imt_strings + str(imt_v) + ', '
imt_strings = imt_strings[:-2]
- imt = imt + '"SA(' + str(curT) + ')": [{}], '.format(imt_strings)
+ imt = imt + '"SA(' + str(curT) + f')": [{imt_strings}], '
imt = imt[:-2]
elif event_info['IntensityMeasure']['Type'] == 'PGA':
if imt_scale == 'Log':
- imt = '"PGA": logscale({}, {}, {}), '.format(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt = f'"PGA": logscale({float(imt_levels[0])}, {float(imt_levels[1])}, {int(imt_levels[2])}), '
else:
- imt_values = np.linspace(float(imt_levels[0]),float(imt_levels[1]),int(imt_levels[2]))
+ imt_values = np.linspace(
+ float(imt_levels[0]),
+ float(imt_levels[1]),
+ int(imt_levels[2]),
+ )
imt_strings = ''
for imt_v in imt_values:
- imt_strings = imt_strings+str(imt_v)+', '
+ imt_strings = imt_strings + str(imt_v) + ', '
imt_strings = imt_strings[:-2]
- imt = '"PGA": [{}], '.format(imt_strings)
+ imt = f'"PGA": [{imt_strings}], '
else:
- imt = event_info['IntensityMeasure']['Type'] + ': logscale(1, 200, 45)'
- cfg['calculation'] = {'source_model_logic_tree_file': scen_info['EqRupture']['Filename'],
- 'gsim_logic_tree_file': event_info['GMPE']['Parameters'],
- 'investigation_time': scen_info['EqRupture']['TimeSpan'],
- 'intensity_measure_types_and_levels': '{' + imt + '}',
- 'truncation_level': event_info['IntensityMeasure'].get('Truncation', 3.0),
- 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0)}
- cfg_quan = ''
- cfg['output'] = {'export_dir': dir_output,
- 'individual_curves': scen_info['EqRupture'].get('IndivHazCurv', False),
- 'mean': scen_info['EqRupture'].get('MeanHazCurv', True),
- 'quantiles': ' '.join([str(x) for x in scen_info['EqRupture'].get('Quantiles', [0.05, 0.5, 0.95])]),
- 'hazard_maps': scen_info['EqRupture'].get('HazMap', False),
- 'uniform_hazard_spectra': scen_info['EqRupture'].get('UHS', False),
- 'poes': np.round(1-np.exp(-float(scen_info['EqRupture']['TimeSpan'])*1.0/float(scen_info['EqRupture'].get('ReturnPeriod', 100))),decimals=3)}
+ imt = (
+ event_info['IntensityMeasure']['Type'] + ': logscale(1, 200, 45)'
+ )
+ cfg['calculation'] = {
+ 'source_model_logic_tree_file': scen_info['EqRupture']['Filename'],
+ 'gsim_logic_tree_file': event_info['GMPE']['Parameters'],
+ 'investigation_time': scen_info['EqRupture']['TimeSpan'],
+ 'intensity_measure_types_and_levels': '{' + imt + '}',
+ 'truncation_level': event_info['IntensityMeasure'].get(
+ 'Truncation', 3.0
+ ),
+ 'maximum_distance': scen_info['EqRupture'].get('max_Dist', 500.0),
+ }
+ cfg_quan = '' # noqa: F841
+ cfg['output'] = {
+ 'export_dir': dir_output,
+ 'individual_curves': scen_info['EqRupture'].get(
+ 'IndivHazCurv', False
+ ),
+ 'mean': scen_info['EqRupture'].get('MeanHazCurv', True),
+ 'quantiles': ' '.join(
+ [
+ str(x)
+ for x in scen_info['EqRupture'].get(
+ 'Quantiles', [0.05, 0.5, 0.95]
+ )
+ ]
+ ),
+ 'hazard_maps': scen_info['EqRupture'].get('HazMap', False),
+ 'uniform_hazard_spectra': scen_info['EqRupture'].get('UHS', False),
+ 'poes': np.round(
+ 1
+ - np.exp(
+ -float(scen_info['EqRupture']['TimeSpan'])
+ * 1.0
+ / float(scen_info['EqRupture'].get('ReturnPeriod', 100))
+ ),
+ decimals=3,
+ ),
+ }
else:
- print('FetchOpenQuake: please specify Scenario[\'Generator\'], options: OpenQuakeScenario, OpenQuakeEventBased, OpenQuakeClassicalPSHA, or OpenQuakeUserConfig.')
+ print( # noqa: T201
+ "FetchOpenQuake: please specify Scenario['Generator'], options: OpenQuakeScenario, OpenQuakeEventBased, OpenQuakeClassicalPSHA, or OpenQuakeUserConfig."
+ )
return 0
# Write the ini
- filename_ini = os.path.join(dir_input, 'oq_job.ini')
- with open(filename_ini, 'w') as configfile:
+ filename_ini = os.path.join(dir_input, 'oq_job.ini') # noqa: PTH118
+ with open(filename_ini, 'w') as configfile: # noqa: PLW1514, PTH123
cfg.write(configfile)
# openquake module
oq_ver_loaded = None
try:
- from importlib_metadata import version
- except:
- from importlib.metadata import version
- if scen_info['EqRupture'].get('OQLocal',None):
+ from importlib_metadata import version # noqa: PLC0415
+ except: # noqa: E722
+ from importlib.metadata import version # noqa: PLC0415
+ if scen_info['EqRupture'].get('OQLocal', None):
# using user-specific local OQ
# first to validate the path
- if not os.path.isdir(scen_info['EqRupture'].get('OQLocal')):
- print('FetchOpenQuake: Local OpenQuake instance {} not found.'.format(scen_info['EqRupture'].get('OQLocal')))
+ if not os.path.isdir(scen_info['EqRupture'].get('OQLocal')): # noqa: PTH112
+ print( # noqa: T201
+ 'FetchOpenQuake: Local OpenQuake instance {} not found.'.format(
+ scen_info['EqRupture'].get('OQLocal')
+ )
+ )
return 0
- else:
+ else: # noqa: RET505
# getting version
try:
oq_ver = version('openquake.engine')
if oq_ver:
- print('FetchOpenQuake: Removing previous installation of OpenQuake {}.'.format(oq_ver))
+ print( # noqa: T201
+ f'FetchOpenQuake: Removing previous installation of OpenQuake {oq_ver}.'
+ )
sys.modules.pop('openquake')
- subprocess.check_call([sys.executable, "-m", "pip", "uninstall", "-y", "openquake.engine"])
- except:
+ subprocess.check_call( # noqa: S603
+ [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'uninstall',
+ '-y',
+ 'openquake.engine',
+ ]
+ )
+ except: # noqa: E722
# no installed OQ python package
# do nothing
- print('FetchOpenQuake: No previous installation of OpenQuake python package found.')
+ print( # noqa: T201
+ 'FetchOpenQuake: No previous installation of OpenQuake python package found.'
+ )
# load the local OQ
try:
- print('FetchOpenQuake: Setting up the user-specified local OQ.')
- sys.path.insert(0,os.path.dirname(scen_info['EqRupture'].get('OQLocal')))
- #owd = os.getcwd()
- #os.chdir(os.path.dirname(scen_info['EqRupture'].get('OQLocal')))
+ print('FetchOpenQuake: Setting up the user-specified local OQ.') # noqa: T201
+ sys.path.insert(
+ 0,
+ os.path.dirname(scen_info['EqRupture'].get('OQLocal')), # noqa: PTH120
+ )
+ # owd = os.getcwd()
+ # os.chdir(os.path.dirname(scen_info['EqRupture'].get('OQLocal')))
if 'openquake' in list(sys.modules.keys()):
sys.modules.pop('openquake')
- from openquake import baselib
+ from openquake import baselib # noqa: PLC0415
+
oq_ver_loaded = baselib.__version__
- #sys.modules.pop('openquake')
- #os.chdir(owd)
- except:
- print('FetchOpenQuake: {} cannot be loaded.'.format(scen_info['EqRupture'].get('OQLocal')))
+ # sys.modules.pop('openquake')
+ # os.chdir(owd)
+ except: # noqa: E722
+ print( # noqa: T201
+ 'FetchOpenQuake: {} cannot be loaded.'.format(
+ scen_info['EqRupture'].get('OQLocal')
+ )
+ )
else:
- # using the offical released OQ
+ # using the official released OQ
try:
oq_ver = version('openquake.engine')
- if oq_ver != scen_info['EqRupture'].get('OQVersion',default_oq_version):
- print('FetchOpenQuake: Required OpenQuake version is not found and being installed now.')
+ if oq_ver != scen_info['EqRupture'].get('OQVersion', default_oq_version):
+ print( # noqa: T201
+ 'FetchOpenQuake: Required OpenQuake version is not found and being installed now.'
+ )
if oq_ver:
# pop the old version first
sys.modules.pop('openquake')
- subprocess.check_call([sys.executable, "-m", "pip", "uninstall", "-y", "openquake.engine"])
-
+ subprocess.check_call( # noqa: S603
+ [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'uninstall',
+ '-y',
+ 'openquake.engine',
+ ]
+ )
+
# install the required version
- subprocess.check_call([sys.executable, "-m", "pip", "install", "openquake.engine=="+scen_info['EqRupture'].get('OQVersion',default_oq_version), "--user"])
+ subprocess.check_call( # noqa: S603
+ [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'install',
+ 'openquake.engine=='
+ + scen_info['EqRupture'].get(
+ 'OQVersion', default_oq_version
+ ),
+ '--user',
+ ]
+ )
oq_ver_loaded = version('openquake.engine')
else:
oq_ver_loaded = oq_ver
- except:
- print('FetchOpenQuake: No OpenQuake is not found and being installed now.')
+ except: # noqa: E722
+ print( # noqa: T201
+ 'FetchOpenQuake: No OpenQuake is not found and being installed now.'
+ )
try:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "openquake.engine=="+scen_info['EqRupture'].get('OQVersion',default_oq_version), "--user"])
+ subprocess.check_call( # noqa: S603
+ [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'install',
+ 'openquake.engine=='
+ + scen_info['EqRupture'].get(
+ 'OQVersion', default_oq_version
+ ),
+ '--user',
+ ]
+ )
oq_ver_loaded = version('openquake.engine')
- except:
- print('FetchOpenQuake: Install of OpenQuake {} failed - please check the version.'.format(scen_info['EqRupture'].get('OQVersion',default_oq_version)))
+ except: # noqa: E722
+ print( # noqa: T201
+ 'FetchOpenQuake: Install of OpenQuake {} failed - please check the version.'.format(
+ scen_info['EqRupture'].get('OQVersion', default_oq_version)
+ )
+ )
- print('FetchOpenQuake: OpenQuake configured.')
+ print('FetchOpenQuake: OpenQuake configured.') # noqa: T201
# return
return filename_ini, oq_ver_loaded, event_info
+
# this function writes a openquake.cfg for setting global configurations
-# tested while not used so far but might be useful in future if moving to
-# other os...
+# tested while not used so far but might be useful in future if moving to
+# other os...
"""
def get_cfg(job_ini):
# writing an openquake.cfg
@@ -376,33 +546,43 @@ def get_cfg(job_ini):
return oq_cfg
"""
-def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version, dir_info=None):
- """
- Run a classical PSHA by OpenQuake
+
+def oq_run_classical_psha( # noqa: C901
+ job_ini,
+ exports='csv',
+ oq_version=default_oq_version,
+ dir_info=None,
+):
+ """Run a classical PSHA by OpenQuake
:param job_ini:
Path to configuration file/archive or
dictionary of parameters with at least a key "calculation_mode"
- """
+ """ # noqa: D400
# the run() method has been turned into private since v3.11
# the get_last_calc_id() and get_datadir() have been moved to commonlib.logs since v3.12
# the datastore has been moved to commonlib since v3.12
# Note: the extracting realizations method was kindly shared by Dr. Anne Husley
vtag = int(oq_version.split('.')[1])
- if vtag <= 10:
+ if vtag <= 10: # noqa: PLR2004
try:
- print('FetchOpenQuake: running Version {}.'.format(oq_version))
- # reloading
- from openquake.commands.run import run
- from openquake.calculators.export.hazard import export_realizations
-
- #run.main([job_ini], exports=exports)
- # invoke/modify deeper openquake commands here to make it compatible with
- # the pylauncher on stampede2 for parallel runs...
- from openquake.baselib import datastore, performance, general
- from openquake.server import dbserver
- from openquake.calculators import base
- from openquake.commonlib import readinput, logs
+ print(f'FetchOpenQuake: running Version {oq_version}.') # noqa: T201
+ # reloading
+ # run.main([job_ini], exports=exports)
+ # invoke/modify deeper openquake commands here to make it compatible with
+ # the pylauncher on stampede2 for parallel runs...
+ from openquake.baselib import ( # noqa: PLC0415
+ datastore,
+ general,
+ performance,
+ )
+ from openquake.calculators import base # noqa: PLC0415
+ from openquake.calculators.export.hazard import ( # noqa: PLC2701, PLC0415, RUF100
+ export_realizations,
+ )
+ from openquake.commonlib import logs, readinput # noqa: PLC0415
+ from openquake.server import dbserver # noqa: PLC0415
+
dbserver.ensure_on()
global calc_path
loglevel = 'info'
@@ -411,19 +591,19 @@ def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version,
concurrent_tasks = None
pdb = None
hc_id = None
- for i in range(1000):
+ for i in range(1000): # noqa: B007
try:
calc_id = logs.init('nojob', getattr(logging, loglevel.upper()))
- except:
+ except: # noqa: PERF203, E722
time.sleep(0.01)
continue
else:
- print('FetchOpenQuake: log created.')
+ print('FetchOpenQuake: log created.') # noqa: T201
break
# disable gzip_input
- base.BaseCalculator.gzip_inputs = lambda self: None
+ base.BaseCalculator.gzip_inputs = lambda self: None # noqa: ARG005
with performance.Monitor('total runtime', measuremem=True) as monitor:
- if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
+ if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'): # noqa: PLR6201
os.environ['OQ_DISTRIBUTE'] = 'processpool'
oqparam = readinput.get_oqparam(job_ini, hc_id=hc_id)
if hc_id and hc_id < 0: # interpret negative calculation ids
@@ -431,35 +611,45 @@ def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version,
try:
hc_id = calc_ids[hc_id]
except IndexError:
- raise SystemExit(
+ raise SystemExit( # noqa: B904, DOC501
'There are %d old calculations, cannot '
- 'retrieve the %s' % (len(calc_ids), hc_id))
+ 'retrieve the %s' % (len(calc_ids), hc_id)
+ )
calc = base.calculators(oqparam, calc_id)
- calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,
- exports=exports, hazard_calculation_id=hc_id,
- rlz_ids=())
+ calc.run(
+ concurrent_tasks=concurrent_tasks,
+ pdb=pdb,
+ exports=exports,
+ hazard_calculation_id=hc_id,
+ rlz_ids=(),
+ )
calc_id = datastore.get_last_calc_id()
- path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)
+ path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id) # noqa: PTH118
dstore = datastore.read(path)
export_realizations('realizations', dstore)
- except:
- print('FetchOpenQuake: Classical PSHA failed.')
+ except: # noqa: E722
+ print('FetchOpenQuake: Classical PSHA failed.') # noqa: T201
return 1
- elif vtag == 11:
+ elif vtag == 11: # noqa: PLR2004
try:
- print('FetchOpenQuake: running Version {}.'.format(oq_version))
- # reloading
- from openquake.commands import run
- from openquake.calculators.export.hazard import export_realizations
-
- #run.main([job_ini], exports=exports)
- # invoke/modify deeper openquake commands here to make it compatible with
- # the pylauncher on stampede2 for parallel runs...
- from openquake.baselib import datastore, performance, general
- from openquake.server import dbserver
- from openquake.calculators import base
- from openquake.commonlib import readinput, logs
+ print(f'FetchOpenQuake: running Version {oq_version}.') # noqa: T201
+ # reloading
+ # run.main([job_ini], exports=exports)
+ # invoke/modify deeper openquake commands here to make it compatible with
+ # the pylauncher on stampede2 for parallel runs...
+ from openquake.baselib import ( # noqa: PLC0415
+ datastore,
+ general,
+ performance,
+ )
+ from openquake.calculators import base # noqa: PLC0415
+ from openquake.calculators.export.hazard import ( # noqa: PLC0415
+ export_realizations,
+ )
+ from openquake.commonlib import logs, readinput # noqa: PLC0415
+ from openquake.server import dbserver # noqa: PLC0415
+
dbserver.ensure_on()
global calc_path
loglevel = 'info'
@@ -467,19 +657,19 @@ def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version,
reuse_input = False
concurrent_tasks = None
pdb = False
- for i in range(1000):
+ for i in range(1000): # noqa: B007
try:
calc_id = logs.init('nojob', getattr(logging, loglevel.upper()))
- except:
+ except: # noqa: PERF203, E722
time.sleep(0.01)
continue
else:
- print('FetchOpenQuake: log created.')
+ print('FetchOpenQuake: log created.') # noqa: T201
break
# disable gzip_input
- base.BaseCalculator.gzip_inputs = lambda self: None
+ base.BaseCalculator.gzip_inputs = lambda self: None # noqa: ARG005
with performance.Monitor('total runtime', measuremem=True) as monitor:
- if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):
+ if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'): # noqa: PLR6201
os.environ['OQ_DISTRIBUTE'] = 'processpool'
if 'hazard_calculation_id' in params:
hc_id = int(params['hazard_calculation_id'])
@@ -490,55 +680,61 @@ def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version,
try:
params['hazard_calculation_id'] = str(calc_ids[hc_id])
except IndexError:
- raise SystemExit(
+ raise SystemExit( # noqa: B904, DOC501
'There are %d old calculations, cannot '
- 'retrieve the %s' % (len(calc_ids), hc_id))
+ 'retrieve the %s' % (len(calc_ids), hc_id)
+ )
oqparam = readinput.get_oqparam(job_ini, kw=params)
calc = base.calculators(oqparam, calc_id)
if reuse_input: # enable caching
oqparam.cachedir = datastore.get_datadir()
- calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,exports=exports)
-
+ calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)
+
calc_id = datastore.get_last_calc_id()
- path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)
+ path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id) # noqa: PTH118
dstore = datastore.read(path)
export_realizations('realizations', dstore)
- except:
- print('FetchOpenQuake: Classical PSHA failed.')
- return 1
+ except: # noqa: E722
+ print('FetchOpenQuake: Classical PSHA failed.') # noqa: T201
+ return 1
else:
try:
- print('FetchOpenQuake: running Version {}.'.format(oq_version))
- # reloading
- from openquake.commands import run
- from openquake.commonlib import logs, datastore
- from openquake.calculators.export.hazard import export_realizations
-
- #run.main([job_ini], exports=exports)
- # invoke/modify deeper openquake commands here to make it compatible with
- # the pylauncher on stampede2 for parallel runs...
- from openquake.baselib import performance, general
- from openquake.server import dbserver
- from openquake.calculators import base
+ print(f'FetchOpenQuake: running Version {oq_version}.') # noqa: T201
+ # reloading
+ # run.main([job_ini], exports=exports)
+ # invoke/modify deeper openquake commands here to make it compatible with
+ # the pylauncher on stampede2 for parallel runs...
+ from openquake.baselib import general, performance # noqa: PLC0415
+ from openquake.calculators import base # noqa: PLC0415
+ from openquake.calculators.export.hazard import ( # noqa: PLC0415
+ export_realizations,
+ )
+ from openquake.commonlib import datastore, logs # noqa: PLC0415
+ from openquake.server import dbserver # noqa: PLC0415
+
dbserver.ensure_on()
- global calc_path
+ global calc_path # noqa: PLW0602
loglevel = 'info'
params = {}
reuse_input = False
concurrent_tasks = None
pdb = False
- for i in range(1000):
+ for i in range(1000): # noqa: B007
try:
- log = logs.init("job", job_ini, getattr(logging, loglevel.upper()))
- except:
+ log = logs.init(
+ 'job', job_ini, getattr(logging, loglevel.upper())
+ )
+ except: # noqa: PERF203, E722
time.sleep(0.01)
continue
else:
- print('FetchOpenQuake: log created.')
+ print('FetchOpenQuake: log created.') # noqa: T201
break
log.params.update(params)
- base.BaseCalculator.gzip_inputs = lambda self: None
- with log, performance.Monitor('total runtime', measuremem=True) as monitor:
+ base.BaseCalculator.gzip_inputs = lambda self: None # noqa: ARG005
+ with log, performance.Monitor(
+ 'total runtime', measuremem=True
+ ) as monitor:
calc = base.calculators(log.get_oqparam(), log.calc_id)
if reuse_input: # enable caching
calc.oqparam.cachedir = datastore.get_datadir()
@@ -546,75 +742,73 @@ def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version,
logging.info('Total time spent: %s s', monitor.duration)
logging.info('Memory allocated: %s', general.humansize(monitor.mem))
- print('See the output with silx view %s' % calc.datastore.filename)
+ print('See the output with silx view %s' % calc.datastore.filename) # noqa: T201, UP031
calc_id = logs.get_last_calc_id()
- path = os.path.join(logs.get_datadir(), 'calc_%d.hdf5' % calc_id)
+ path = os.path.join(logs.get_datadir(), 'calc_%d.hdf5' % calc_id) # noqa: PTH118
dstore = datastore.read(path)
export_realizations('realizations', dstore)
- except:
- print('FetchOpenQuake: Classical PSHA failed.')
- return 1
+ except: # noqa: E722
+ print('FetchOpenQuake: Classical PSHA failed.') # noqa: T201
+ return 1
# h5 clear for stampede2 (this is somewhat inelegant...)
if 'stampede2' in socket.gethostname():
# h5clear
if oq_h5clear(path) == 0:
- print('FetchOpenQuake.oq_run_classical_psha: h5clear completed')
+ print('FetchOpenQuake.oq_run_classical_psha: h5clear completed') # noqa: T201
else:
- print('FetchOpenQuake.oq_run_classical_psha: h5clear failed')
-
+ print('FetchOpenQuake.oq_run_classical_psha: h5clear failed') # noqa: T201
+
# copy the calc file to output directory
if dir_info:
dir_output = dir_info['Output']
try:
shutil.copy2(path, dir_output)
- print('FetchOpenQuake: calc hdf file saved.')
- except:
- print('FetchOpenQuake: failed to copy calc hdf file.')
-
- return 0
+ print('FetchOpenQuake: calc hdf file saved.') # noqa: T201
+ except: # noqa: E722
+ print('FetchOpenQuake: failed to copy calc hdf file.') # noqa: T201
+ return 0
-def oq_h5clear(hdf5_file):
- #h5clear = os.path.join(os.path.dirname(os.path.abspath(__file__)),'lib/hdf5/bin/h5clear')
- #print(h5clear)
- print(hdf5_file)
- #subprocess.run(["chmod", "a+rx", h5clear])
- subprocess.run(["chmod", "a+rx", hdf5_file])
- tmp = subprocess.run(["h5clear", "-s", hdf5_file])
- print(tmp)
+def oq_h5clear(hdf5_file): # noqa: D103
+ # h5clear = os.path.join(os.path.dirname(os.path.abspath(__file__)),'lib/hdf5/bin/h5clear')
+ # print(h5clear)
+ print(hdf5_file) # noqa: T201
+ # subprocess.run(["chmod", "a+rx", h5clear])
+ subprocess.run(['chmod', 'a+rx', hdf5_file], check=False) # noqa: S603, S607
+ tmp = subprocess.run(['h5clear', '-s', hdf5_file], check=False) # noqa: S603, S607
+ print(tmp) # noqa: T201
run_flag = tmp.returncode
- return run_flag
+ return run_flag # noqa: RET504
def oq_read_uhs_classical_psha(scen_info, event_info, dir_info):
- """
- Collect the UHS from a classical PSHA by OpenQuake
- """
- import glob
- import random
+ """Collect the UHS from a classical PSHA by OpenQuake""" # noqa: D400
+ import glob # noqa: PLC0415
+ import random # noqa: PLC0415
+
# number of scenario
num_scen = scen_info['Number']
if num_scen > 1:
- print('FetchOpenQuake: currently only supporting a single scenario for PHSA')
+ print('FetchOpenQuake: currently only supporting a single scenario for PHSA') # noqa: T201
num_scen = 1
# number of realizations per site
num_rlz = event_info['NumberPerSite']
# directory of the UHS
res_dir = dir_info['Output']
# mean UHS
- cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-mean_*.csv'))[0]
- print(cur_uhs_file)
+ cur_uhs_file = glob.glob(os.path.join(res_dir, 'hazard_uhs-mean_*.csv'))[0] # noqa: PTH118, PTH207
+ print(cur_uhs_file) # noqa: T201
# read csv
- tmp = pd.read_csv(cur_uhs_file,skiprows=1)
+ tmp = pd.read_csv(cur_uhs_file, skiprows=1)
# number of stations
num_stn = len(tmp.index)
# number of IMs
- num_IMs = len(tmp.columns) - 2
+ num_IMs = len(tmp.columns) - 2 # noqa: N806
# IM list
- list_IMs = tmp.columns.tolist()[2:]
+ list_IMs = tmp.columns.tolist()[2:] # noqa: N806
im_list = [x.split('~')[1] for x in list_IMs]
ln_psa_mr = []
mag_maf = []
@@ -625,85 +819,103 @@ def oq_read_uhs_classical_psha(scen_info, event_info, dir_info):
if num_rlz == 1:
ln_psa[:, :, 0] = np.log(tmp.iloc[:, 2:])
else:
- num_r1 = np.min([len(glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))), num_rlz])
- for i in range(num_r1):
- cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))[i]
- tmp = pd.read_csv(cur_uhs_file,skiprows=1)
+ num_r1 = np.min(
+ [
+ len(glob.glob(os.path.join(res_dir, 'hazard_uhs-rlz-*.csv'))), # noqa: PTH118, PTH207
+ num_rlz,
+ ]
+ )
+ for i in range(num_r1): # noqa: PLW2901
+ cur_uhs_file = glob.glob( # noqa: PTH207
+ os.path.join(res_dir, 'hazard_uhs-rlz-*.csv') # noqa: PTH118
+ )[i]
+ tmp = pd.read_csv(cur_uhs_file, skiprows=1)
ln_psa[:, :, i] = np.log(tmp.iloc[:, 2:])
if num_rlz > num_r1:
# randomly resampling available spectra
- for i in range(num_rlz-num_r1):
+ for i in range(num_rlz - num_r1): # noqa: PLW2901
rnd_tag = random.randrange(num_r1)
- print(int(rnd_tag))
- cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))[int(rnd_tag)]
- tmp = pd.read_csv(cur_uhs_file,skiprows=1)
+ print(int(rnd_tag)) # noqa: T201
+ cur_uhs_file = glob.glob( # noqa: PTH207
+ os.path.join(res_dir, 'hazard_uhs-rlz-*.csv') # noqa: PTH118
+ )[int(rnd_tag)]
+ tmp = pd.read_csv(cur_uhs_file, skiprows=1)
ln_psa[:, :, i] = np.log(tmp.iloc[:, 2:])
ln_psa_mr.append(ln_psa)
- mag_maf.append([0.0,float(list_IMs[0].split('~')[0]),0.0])
-
+ mag_maf.append([0.0, float(list_IMs[0].split('~')[0]), 0.0])
+
# return
return ln_psa_mr, mag_maf, im_list
-
-class OpenQuakeHazardCalc:
- def __init__(self, job_ini, event_info, oq_version, dir_info=None, no_distribute=False):
- """
- Initialize a calculation (reinvented from openquake.engine.engine)
+class OpenQuakeHazardCalc: # noqa: D101
+ def __init__( # noqa: C901
+ self,
+ job_ini,
+ event_info,
+ oq_version,
+ dir_info=None,
+ no_distribute=False, # noqa: FBT002
+ ):
+ """Initialize a calculation (reinvented from openquake.engine.engine)
:param job_ini:
Path to configuration file/archive or
dictionary of parameters with at least a key "calculation_mode"
- """
-
+ """ # noqa: D400
self.vtag = int(oq_version.split('.')[1])
self.dir_info = dir_info
- from openquake.baselib import config, performance, general, zeromq, hdf5, parallel
- from openquake.hazardlib import const, calc, gsim
- from openquake import commonlib
- from openquake.commonlib import readinput, logictree, logs
- if self.vtag >= 12:
- from openquake.commonlib import datastore
+ from openquake.baselib import ( # noqa: PLC0415
+ config,
+ )
+ from openquake.commonlib import logs, readinput # noqa: PLC0415
+
+ if self.vtag >= 12: # noqa: PLR2004
+ from openquake.commonlib import datastore # noqa: PLC0415
else:
- from openquake.baselib import datastore
- from openquake.calculators import base
- from openquake.server import dbserver
- from openquake.commands import dbserver as cdbs
+ from openquake.baselib import datastore # noqa: PLC0415
+ from openquake.calculators import base # noqa: PLC0415
+ from openquake.server import dbserver # noqa: PLC0415
- user_name = getpass.getuser()
+ user_name = getpass.getuser() # noqa: F841
if no_distribute:
os.environ['OQ_DISTRIBUTE'] = 'no'
# check if the datadir exists
datadir = datastore.get_datadir()
- if not os.path.exists(datadir):
- os.makedirs(datadir)
+ if not os.path.exists(datadir): # noqa: PTH110
+ os.makedirs(datadir) # noqa: PTH103
- #dbserver.ensure_on()
+ # dbserver.ensure_on()
if dbserver.get_status() == 'not-running':
if config.dbserver.multi_user:
- sys.exit('Please start the DbServer: '
- 'see the documentation for details')
+ sys.exit(
+ 'Please start the DbServer: ' 'see the documentation for details'
+ )
# otherwise start the DbServer automatically; NB: I tried to use
# multiprocessing.Process(target=run_server).start() and apparently
# it works, but then run-demos.sh hangs after the end of the first
# calculation, but only if the DbServer is started by oq engine (!?)
# Here is a trick to activate OpenQuake's dbserver
# We first cd to the openquake directory and invoke subprocess to open/hold on dbserver
- # Then, we cd back to the original working directory
- owd = os.getcwd()
- os.chdir(os.path.dirname(os.path.realpath(__file__)))
- self.prc = subprocess.Popen([sys.executable, '-m', 'openquake.commands', 'dbserver', 'start'])
+ # Then, we cd back to the original working directory
+ owd = os.getcwd() # noqa: PTH109
+ os.chdir(os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
+ self.prc = subprocess.Popen( # noqa: S603
+ [sys.executable, '-m', 'openquake.commands', 'dbserver', 'start']
+ )
os.chdir(owd)
# wait for the dbserver to start
waiting_seconds = 30
while dbserver.get_status() == 'not-running':
if waiting_seconds == 0:
- sys.exit('The DbServer cannot be started after 30 seconds. '
- 'Please check the configuration')
+ sys.exit(
+ 'The DbServer cannot be started after 30 seconds. '
+ 'Please check the configuration'
+ )
time.sleep(1)
waiting_seconds -= 1
else:
@@ -718,14 +930,14 @@ def __init__(self, job_ini, event_info, oq_version, dir_info=None, no_distribute
self.event_info = event_info
# Create a job
- #self.job = logs.init("job", job_ini, logging.INFO, None, None, None)
- if self.vtag >= 11:
+ # self.job = logs.init("job", job_ini, logging.INFO, None, None, None)
+ if self.vtag >= 11: # noqa: PLR2004
dic = readinput.get_params(job_ini)
else:
dic = readinput.get_params([job_ini])
- #dic['hazard_calculation_id'] = self.job.calc_id
+ # dic['hazard_calculation_id'] = self.job.calc_id
- if self.vtag >= 12:
+ if self.vtag >= 12: # noqa: PLR2004
# Create the job log
self.log = logs.init('job', dic, logging.INFO, None, None, None)
# Get openquake parameters
@@ -741,100 +953,126 @@ def __init__(self, job_ini, event_info, oq_version, dir_info=None, no_distribute
# Create the calculator
self.calculator.from_engine = True
- print('FetchOpenQuake: OpenQuake Hazard Calculator initiated.')
+ print('FetchOpenQuake: OpenQuake Hazard Calculator initiated.') # noqa: T201
- def run_calc(self):
- """
- Run a calculation and return results (reinvented from openquake.calculators.base)
- """
+ def run_calc(self): # noqa: C901
+ """Run a calculation and return results (reinvented from openquake.calculators.base)""" # noqa: D400
+ from openquake.baselib import config, performance, zeromq # noqa: PLC0415
+ from openquake.calculators import base, getters # noqa: PLC0415
- from openquake.calculators import base, getters
- from openquake.baselib import config, performance, zeromq
- if self.vtag >= 11:
- from openquake.baselib import version
+ if self.vtag >= 11: # noqa: PLR2004
+ from openquake.baselib import version # noqa: PLC0415
else:
- from openquake.baselib import __version__ as version
+ from openquake.baselib import __version__ as version # noqa: PLC0415
- with self.calculator._monitor:
- self.calculator._monitor.username = ''
+ with self.calculator._monitor: # noqa: SLF001
+ self.calculator._monitor.username = '' # noqa: SLF001
try:
# Pre-execute setups
self.calculator.pre_execute()
- #self.calculator.datastore.swmr_on()
+ # self.calculator.datastore.swmr_on()
oq = self.calculator.oqparam
dstore = self.calculator.datastore
self.calculator.set_param()
self.calculator.offset = 0
# Source model
- #print('self.__dict__ = ')
- #print(self.calculator.__dict__)
+ # print('self.__dict__ = ')
+ # print(self.calculator.__dict__)
if oq.hazard_calculation_id: # from ruptures
dstore.parent = self.calculator.datastore.read(
- oq.hazard_calculation_id)
+ oq.hazard_calculation_id
+ )
elif hasattr(self.calculator, 'csm'): # from sources
self.calculator_build_events_from_sources()
- #self.calculator.build_events_from_sources()
- if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False):
+ # self.calculator.build_events_from_sources()
+ if (
+ oq.ground_motion_fields is False
+ and oq.hazard_curves_from_gmfs is False
+ ):
return {}
elif 'rupture_model' not in oq.inputs:
logging.warning(
'There is no rupture_model, the calculator will just '
- 'import data without performing any calculation')
- fake = logictree.FullLogicTree.fake()
+ 'import data without performing any calculation'
+ )
+ fake = logictree.FullLogicTree.fake() # noqa: F821
dstore['full_lt'] = fake # needed to expose the outputs
- dstore['weights'] = [1.]
+ dstore['weights'] = [1.0]
return {}
else: # scenario
- self.calculator._read_scenario_ruptures()
- if (oq.ground_motion_fields is False and oq.hazard_curves_from_gmfs is False):
+ self.calculator._read_scenario_ruptures() # noqa: SLF001
+ if (
+ oq.ground_motion_fields is False
+ and oq.hazard_curves_from_gmfs is False
+ ):
return {}
# Intensity measure models
if oq.ground_motion_fields:
- if self.vtag >= 12:
+ if self.vtag >= 12: # noqa: PLR2004
imts = oq.get_primary_imtls()
nrups = len(dstore['ruptures'])
base.create_gmf_data(dstore, imts, oq.get_sec_imts())
- dstore.create_dset('gmf_data/sigma_epsilon',
- getters.sig_eps_dt(oq.imtls))
- dstore.create_dset('gmf_data/time_by_rup',
- getters.time_dt, (nrups,), fillvalue=None)
- elif self.vtag == 11:
+ dstore.create_dset(
+ 'gmf_data/sigma_epsilon', getters.sig_eps_dt(oq.imtls)
+ )
+ dstore.create_dset(
+ 'gmf_data/time_by_rup',
+ getters.time_dt,
+ (nrups,),
+ fillvalue=None,
+ )
+ elif self.vtag == 11: # noqa: PLR2004
imts = oq.get_primary_imtls()
nrups = len(dstore['ruptures'])
base.create_gmf_data(dstore, len(imts), oq.get_sec_imts())
- dstore.create_dset('gmf_data/sigma_epsilon',
- getters.sig_eps_dt(oq.imtls))
- dstore.create_dset('gmf_data/time_by_rup',
- getters.time_dt, (nrups,), fillvalue=None)
+ dstore.create_dset(
+ 'gmf_data/sigma_epsilon', getters.sig_eps_dt(oq.imtls)
+ )
+ dstore.create_dset(
+ 'gmf_data/time_by_rup',
+ getters.time_dt,
+ (nrups,),
+ fillvalue=None,
+ )
else:
pass
# Prepare inputs for GmfGetter
nr = len(dstore['ruptures'])
- logging.info('Reading {:_d} ruptures'.format(nr))
- if self.vtag >= 12:
- rgetters = getters.get_rupture_getters(dstore, oq.concurrent_tasks * 1.25,
- srcfilter=self.calculator.srcfilter)
- elif self.vtag == 11:
- rgetters = getters.gen_rupture_getters(dstore, oq.concurrent_tasks)
+ logging.info(f'Reading {nr:_d} ruptures')
+ if self.vtag >= 12: # noqa: PLR2004
+ rgetters = getters.get_rupture_getters(
+ dstore,
+ oq.concurrent_tasks * 1.25,
+ srcfilter=self.calculator.srcfilter,
+ )
+ elif self.vtag == 11: # noqa: PLR2004
+ rgetters = getters.gen_rupture_getters(
+ dstore, oq.concurrent_tasks
+ )
else:
- rgetters = getters.gen_rupture_getters(dstore, self.calculator.srcfilter, oq.concurrent_tasks)
+ rgetters = getters.gen_rupture_getters(
+ dstore, self.calculator.srcfilter, oq.concurrent_tasks
+ )
-
args = [(rgetter, self.calculator.param) for rgetter in rgetters]
mon = performance.Monitor()
mon.version = version
mon.config = config
- rcvr = 'tcp://%s:%s' % (config.dbserver.listen,
- config.dbserver.receiver_ports)
- skt = zeromq.Socket(rcvr, zeromq.zmq.PULL, 'bind').__enter__()
- mon.backurl = 'tcp://%s:%s' % (config.dbserver.host, skt.port)
+ rcvr = 'tcp://%s:%s' % ( # noqa: UP031
+ config.dbserver.listen,
+ config.dbserver.receiver_ports,
+ )
+ skt = zeromq.Socket(rcvr, zeromq.zmq.PULL, 'bind').__enter__() # noqa: PLC2801
+ mon.backurl = 'tcp://%s:%s' % (config.dbserver.host, skt.port) # noqa: UP031
mon = mon.new(
- operation='total ' + self.calculator.core_task.__func__.__name__, measuremem=True)
- mon.weight = getattr(args[0], 'weight', 1.) # used in task_info
+ operation='total ' + self.calculator.core_task.__func__.__name__,
+ measuremem=True,
+ )
+ mon.weight = getattr(args[0], 'weight', 1.0) # used in task_info
mon.task_no = 1 # initialize the task number
args += (mon,)
@@ -843,81 +1081,85 @@ def run_calc(self):
self.dstore = dstore
finally:
- print('FetchOpenQuake: OpenQuake Hazard Calculator defined.')
+ print('FetchOpenQuake: OpenQuake Hazard Calculator defined.') # noqa: T201
# parallel.Starmap.shutdown()
- def eval_calc(self):
- """
- Evaluate each calculators for different IMs
- """
-
+ def eval_calc(self): # noqa: C901, PLR0912, PLR0915
+ """Evaluate each calculators for different IMs""" # noqa: D400
# Define the GmfGetter
- #for args_tag in range(len(self.args)-1):
- # Looping over all source models (Note: the last attribute in self.args is a monitor - so skipping it)
-
- from openquake.calculators import getters
- from openquake.baselib import general
- from openquake.hazardlib import const, calc, gsim
- from openquake.commands import dbserver as cdbs
- if self.vtag >= 12:
- from openquake.hazardlib.const import StdDev
- if self.vtag >= 12:
- from openquake.commonlib import datastore
- else:
- from openquake.baselib import datastore
+ # for args_tag in range(len(self.args)-1):
+ # Looping over all source models (Note: the last attribute in self.args is a monitor - so skipping it)
+
+ from openquake.baselib import general # noqa: PLC0415
+ from openquake.calculators import getters # noqa: PLC0415
+ from openquake.commands import dbserver as cdbs # noqa: PLC0415
+ from openquake.hazardlib import calc, const, gsim # noqa: PLC0415
- cur_getter = getters.GmfGetter(self.args[0][0], calc.filters.SourceFilter(
- self.dstore['sitecol'], self.dstore['oqparam'].maximum_distance),
- self.calculator.param['oqparam'], self.calculator.param['amplifier'],
- self.calculator.param['sec_perils'])
+ if self.vtag >= 12: # noqa: PLR2004
+ from openquake.hazardlib.const import StdDev # noqa: PLC0415
+ if self.vtag >= 12: # noqa: PLR2004
+ from openquake.commonlib import datastore # noqa: PLC0415
+ else:
+ from openquake.baselib import datastore # noqa: PLC0415
+
+ cur_getter = getters.GmfGetter(
+ self.args[0][0],
+ calc.filters.SourceFilter(
+ self.dstore['sitecol'], self.dstore['oqparam'].maximum_distance
+ ),
+ self.calculator.param['oqparam'],
+ self.calculator.param['amplifier'],
+ self.calculator.param['sec_perils'],
+ )
# Evaluate each computer
- print('FetchOpenQuake: Evaluting ground motion models.')
- for computer in cur_getter.gen_computers(self.mon):
+ print('FetchOpenQuake: Evaluating ground motion models.') # noqa: T201
+ for computer in cur_getter.gen_computers(self.mon): # noqa: PLR1702
# Looping over rupture(s) in the current realization
- sids = computer.sids
- #print('eval_calc: site ID sids = ')
- #print(sids)
- eids_by_rlz = computer.ebrupture.get_eids_by_rlz(
- cur_getter.rlzs_by_gsim)
+ sids = computer.sids # noqa: F841
+ # print('eval_calc: site ID sids = ')
+ # print(sids)
+ eids_by_rlz = computer.ebrupture.get_eids_by_rlz(cur_getter.rlzs_by_gsim)
mag = computer.ebrupture.rupture.mag
im_list = []
- data = general.AccumDict(accum=[])
- cur_T = self.event_info['IntensityMeasure'].get('Periods', None)
+ data = general.AccumDict(accum=[]) # noqa: F841
+ cur_T = self.event_info['IntensityMeasure'].get('Periods', None) # noqa: N806
for cur_gs, rlzs in cur_getter.rlzs_by_gsim.items():
# Looping over GMPE(s)
- #print('eval_calc: cur_gs = ')
- #print(cur_gs)
+ # print('eval_calc: cur_gs = ')
+ # print(cur_gs)
num_events = sum(len(eids_by_rlz[rlz]) for rlz in rlzs)
if num_events == 0: # it may happen
continue
# NB: the trick for performance is to keep the call to
# .compute outside of the loop over the realizations;
# it is better to have few calls producing big arrays
- tmpMean = []
+ tmpMean = [] # noqa: N806
tmpstdtot = []
tmpstdinter = []
tmpstdintra = []
- if self.vtag >= 12:
- mean_stds_all = computer.cmaker.get_mean_stds([computer.ctx], StdDev.EVENT)[0]
- for imti, imt in enumerate(computer.imts):
+ if self.vtag >= 12: # noqa: PLR2004
+ mean_stds_all = computer.cmaker.get_mean_stds(
+ [computer.ctx], StdDev.EVENT
+ )[0]
+ for imti, imt in enumerate(computer.imts):
# Looping over IM(s)
- #print('eval_calc: imt = ', imt)
- if str(imt) in ['PGA', 'PGV', 'PGD']:
- cur_T = [0.0]
+ # print('eval_calc: imt = ', imt)
+ if str(imt) in ['PGA', 'PGV', 'PGD']: # noqa: PLR6201
+ cur_T = [0.0] # noqa: N806
im_list.append(str(imt))
- imTag = 'ln' + str(imt)
+ imTag = 'ln' + str(imt) # noqa: N806
else:
if 'SA' not in im_list:
im_list.append('SA')
- imTag = 'lnSA'
+ imTag = 'lnSA' # noqa: N806
if isinstance(cur_gs, gsim.multi.MultiGMPE):
gs = cur_gs[str(imt)] # MultiGMPE
else:
gs = cur_gs # regular GMPE
try:
- if self.vtag >= 12:
+ if self.vtag >= 12: # noqa: PLR2004
mean_stds = mean_stds_all[:, imti]
num_sids = len(computer.sids)
num_stds = len(mean_stds)
@@ -925,23 +1167,35 @@ def eval_calc(self):
# no standard deviation is available
# for truncation_level = 0 there is only mean, no stds
if computer.correlation_model:
- raise ValueError('truncation_level=0 requires '
- 'no correlation model')
+ raise ValueError( # noqa: DOC501, TRY003, TRY301
+ 'truncation_level=0 requires ' # noqa: EM101
+ 'no correlation model'
+ )
mean = mean_stds[0]
stddev_intra = 0
stddev_inter = 0
stddev_total = 0
if imti == 0:
- tmpMean = mean
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
+ tmpMean = mean # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
tmpstdtot = stddev_total
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=0)
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
- tmpstdtot = np.concatenate((tmpstdtot, stddev_total), axis=0)
- elif num_stds == 2:
+ tmpMean = np.concatenate((tmpMean, mean), axis=0) # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
+ tmpstdtot = np.concatenate(
+ (tmpstdtot, stddev_total), axis=0
+ )
+ elif num_stds == 2: # noqa: PLR2004
# If the GSIM provides only total standard deviation, we need
# to compute mean and total standard deviation at the sites
# of interest.
@@ -949,149 +1203,275 @@ def eval_calc(self):
# By default, we evaluate stddev_inter as the stddev_total
if self.correlation_model:
- raise CorrelationButNoInterIntraStdDevs(
- self.correlation_model, gsim)
+ raise CorrelationButNoInterIntraStdDevs( # noqa: DOC501, TRY301
+ self.correlation_model, gsim
+ )
mean, stddev_total = mean_stds
- stddev_total = stddev_total.reshape(stddev_total.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_total = stddev_total.reshape(
+ stddev_total.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
stddev_inter = stddev_total
stddev_intra = 0
if imti == 0:
- tmpMean = mean
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
+ tmpMean = mean # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
tmpstdtot = stddev_total
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=0)
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
- tmpstdtot = np.concatenate((tmpstdtot, stddev_total), axis=0)
+ tmpMean = np.concatenate((tmpMean, mean), axis=0) # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
+ tmpstdtot = np.concatenate(
+ (tmpstdtot, stddev_total), axis=0
+ )
else:
mean, stddev_inter, stddev_intra = mean_stds
- stddev_intra = stddev_intra.reshape(stddev_intra.shape + (1, ))
- stddev_inter = stddev_inter.reshape(stddev_inter.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_intra = stddev_intra.reshape(
+ stddev_intra.shape + (1,) # noqa: RUF005
+ )
+ stddev_inter = stddev_inter.reshape(
+ stddev_inter.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
if imti == 0:
- tmpMean = mean
+ tmpMean = mean # noqa: N806
tmpstdinter = stddev_inter
tmpstdintra = stddev_intra
- tmpstdtot = np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)
+ tmpstdtot = np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ )
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=1)
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
- tmpstdtot = np.concatenate((tmpstdtot,np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)), axis=1)
-
- elif self.vtag == 11:
+ tmpMean = np.concatenate((tmpMean, mean), axis=1) # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
+ tmpstdtot = np.concatenate(
+ (
+ tmpstdtot,
+ np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ ),
+ ),
+ axis=1,
+ )
+
+ elif self.vtag == 11: # noqa: PLR2004
# v11
- dctx = computer.dctx.roundup(
- cur_gs.minimum_distance)
+ dctx = computer.dctx.roundup(cur_gs.minimum_distance)
if computer.distribution is None:
if computer.correlation_model:
- raise ValueError('truncation_level=0 requires '
- 'no correlation model')
+ raise ValueError( # noqa: DOC501, TRY003, TRY301
+ 'truncation_level=0 requires ' # noqa: EM101
+ 'no correlation model'
+ )
mean, _stddevs = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt, stddev_types=[])
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ stddev_types=[],
+ )
num_sids = len(computer.sids)
- if cur_gs.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {const.StdDev.TOTAL}:
+ if {
+ const.StdDev.TOTAL
+ } == cur_gs.DEFINED_FOR_STANDARD_DEVIATION_TYPES:
# If the GSIM provides only total standard deviation, we need
# to compute mean and total standard deviation at the sites
# of interest.
# In this case, we also assume no correlation model is used.
if computer.correlation_model:
- raise CorrelationButNoInterIntraStdDevs(
- computer.correlation_model, cur_gs)
+ raise CorrelationButNoInterIntraStdDevs( # noqa: DOC501, TRY301
+ computer.correlation_model, cur_gs
+ )
mean, [stddev_total] = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt, [const.StdDev.TOTAL])
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ [const.StdDev.TOTAL],
+ )
stddev_total = stddev_total.reshape(
- stddev_total.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_total.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
if imti == 0:
- tmpMean = mean
+ tmpMean = mean # noqa: N806
tmpstdtot = stddev_total
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=0)
- tmpstdtot = np.concatenate((tmpstdtot, stddev_total), axis=0)
+ tmpMean = np.concatenate((tmpMean, mean), axis=0) # noqa: N806
+ tmpstdtot = np.concatenate(
+ (tmpstdtot, stddev_total), axis=0
+ )
else:
- mean, [stddev_inter, stddev_intra] = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt,
- [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT])
+ mean, [stddev_inter, stddev_intra] = (
+ cur_gs.get_mean_and_stddevs(
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ [
+ const.StdDev.INTER_EVENT,
+ const.StdDev.INTRA_EVENT,
+ ],
+ )
+ )
stddev_intra = stddev_intra.reshape(
- stddev_intra.shape + (1, ))
+ stddev_intra.shape + (1,) # noqa: RUF005
+ )
stddev_inter = stddev_inter.reshape(
- stddev_inter.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_inter.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
if imti == 0:
- tmpMean = mean
+ tmpMean = mean # noqa: N806
tmpstdinter = stddev_inter
tmpstdintra = stddev_intra
- tmpstdtot = np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)
+ tmpstdtot = np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ )
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=1)
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
- tmpstdtot = np.concatenate((tmpstdtot,np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)), axis=1)
+ tmpMean = np.concatenate((tmpMean, mean), axis=1) # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
+ tmpstdtot = np.concatenate(
+ (
+ tmpstdtot,
+ np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ ),
+ ),
+ axis=1,
+ )
else:
# v10
- dctx = computer.dctx.roundup(
- cur_gs.minimum_distance)
+ dctx = computer.dctx.roundup(cur_gs.minimum_distance)
if computer.truncation_level == 0:
if computer.correlation_model:
- raise ValueError('truncation_level=0 requires '
- 'no correlation model')
+ raise ValueError( # noqa: DOC501, TRY003, TRY301
+ 'truncation_level=0 requires ' # noqa: EM101
+ 'no correlation model'
+ )
mean, _stddevs = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt, stddev_types=[])
- num_sids = len(computer.sids)
- if cur_gs.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {const.StdDev.TOTAL}:
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ stddev_types=[],
+ )
+ num_sids = len(computer.sids) # noqa: F841
+ if {
+ const.StdDev.TOTAL
+ } == cur_gs.DEFINED_FOR_STANDARD_DEVIATION_TYPES:
# If the GSIM provides only total standard deviation, we need
# to compute mean and total standard deviation at the sites
# of interest.
# In this case, we also assume no correlation model is used.
if computer.correlation_model:
- raise CorrelationButNoInterIntraStdDevs(
- computer.correlation_model, cur_gs)
+ raise CorrelationButNoInterIntraStdDevs( # noqa: DOC501, TRY301
+ computer.correlation_model, cur_gs
+ )
mean, [stddev_total] = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt, [const.StdDev.TOTAL])
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ [const.StdDev.TOTAL],
+ )
stddev_total = stddev_total.reshape(
- stddev_total.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_total.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
if imti == 0:
- tmpMean = mean
+ tmpMean = mean # noqa: N806
tmpstdtot = stddev_total
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=0)
- tmpstdtot = np.concatenate((tmpstdtot, stddev_total), axis=0)
+ tmpMean = np.concatenate((tmpMean, mean), axis=0) # noqa: N806
+ tmpstdtot = np.concatenate(
+ (tmpstdtot, stddev_total), axis=0
+ )
else:
- mean, [stddev_inter, stddev_intra] = cur_gs.get_mean_and_stddevs(
- computer.sctx, computer.rctx, dctx, imt,
- [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT])
+ mean, [stddev_inter, stddev_intra] = (
+ cur_gs.get_mean_and_stddevs(
+ computer.sctx,
+ computer.rctx,
+ dctx,
+ imt,
+ [
+ const.StdDev.INTER_EVENT,
+ const.StdDev.INTRA_EVENT,
+ ],
+ )
+ )
stddev_intra = stddev_intra.reshape(
- stddev_intra.shape + (1, ))
+ stddev_intra.shape + (1,) # noqa: RUF005
+ )
stddev_inter = stddev_inter.reshape(
- stddev_inter.shape + (1, ))
- mean = mean.reshape(mean.shape + (1, ))
+ stddev_inter.shape + (1,) # noqa: RUF005
+ )
+ mean = mean.reshape(mean.shape + (1,)) # noqa: RUF005
if imti == 0:
- tmpMean = mean
+ tmpMean = mean # noqa: N806
tmpstdinter = stddev_inter
tmpstdintra = stddev_intra
- tmpstdtot = np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)
+ tmpstdtot = np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ )
else:
- tmpMean = np.concatenate((tmpMean, mean), axis=1)
- tmpstdinter = np.concatenate((tmpstdinter, stddev_inter), axis=1)
- tmpstdintra = np.concatenate((tmpstdintra, stddev_intra), axis=1)
- tmpstdtot = np.concatenate((tmpstdtot,np.sqrt(stddev_inter * stddev_inter + stddev_intra * stddev_intra)), axis=1)
-
- except Exception as exc:
- raise RuntimeError(
- '(%s, %s, source_id=%r) %s: %s' %
- (gs, imt, computer.source_id.decode('utf8'),
- exc.__class__.__name__, exc)
+ tmpMean = np.concatenate((tmpMean, mean), axis=1) # noqa: N806
+ tmpstdinter = np.concatenate(
+ (tmpstdinter, stddev_inter), axis=1
+ )
+ tmpstdintra = np.concatenate(
+ (tmpstdintra, stddev_intra), axis=1
+ )
+ tmpstdtot = np.concatenate(
+ (
+ tmpstdtot,
+ np.sqrt(
+ stddev_inter * stddev_inter
+ + stddev_intra * stddev_intra
+ ),
+ ),
+ axis=1,
+ )
+
+ except Exception as exc: # noqa: BLE001
+ raise RuntimeError( # noqa: B904
+ '(%s, %s, source_id=%r) %s: %s' # noqa: UP031
+ % (
+ gs,
+ imt,
+ computer.source_id.decode('utf8'),
+ exc.__class__.__name__,
+ exc,
+ )
).with_traceback(exc.__traceback__)
# initialize
@@ -1099,83 +1479,109 @@ def eval_calc(self):
gm_collector = []
# collect data
for k in range(tmpMean.shape[0]):
- imResult = {}
+ imResult = {} # noqa: N806
if len(tmpMean):
- imResult.update({'Mean': [float(x) for x in tmpMean[k].tolist()]})
+ imResult.update(
+ {'Mean': [float(x) for x in tmpMean[k].tolist()]}
+ )
if len(tmpstdtot):
- imResult.update({'TotalStdDev': [float(x) for x in tmpstdtot[k].tolist()]})
+ imResult.update(
+ {
+ 'TotalStdDev': [
+ float(x) for x in tmpstdtot[k].tolist()
+ ]
+ }
+ )
if len(tmpstdinter):
- imResult.update({'InterEvStdDev': [float(x) for x in tmpstdinter[k].tolist()]})
+ imResult.update(
+ {
+ 'InterEvStdDev': [
+ float(x) for x in tmpstdinter[k].tolist()
+ ]
+ }
+ )
if len(tmpstdintra):
- imResult.update({'IntraEvStdDev': [float(x) for x in tmpstdintra[k].tolist()]})
+ imResult.update(
+ {
+ 'IntraEvStdDev': [
+ float(x) for x in tmpstdintra[k].tolist()
+ ]
+ }
+ )
gm_collector.append({imTag: imResult})
- #print(gm_collector)
-
+ # print(gm_collector)
+
# close datastore instance
self.calculator.datastore.close()
-
+
# stop dbserver
- if self.vtag >= 11:
+ if self.vtag >= 11: # noqa: PLR2004
cdbs.main('stop')
else:
cdbs.dbserver('stop')
-
+
# terminate the subprocess
if self.prc:
self.prc.kill()
# copy calc hdf file
- if self.vtag >= 11:
+ if self.vtag >= 11: # noqa: PLR2004
calc_id = datastore.get_last_calc_id()
- path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)
+ path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id) # noqa: PTH118
else:
- path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % self.calc_id)
+ path = os.path.join( # noqa: PTH118
+ datastore.get_datadir(), 'calc_%d.hdf5' % self.calc_id
+ )
if self.dir_info:
dir_output = self.dir_info['Output']
try:
shutil.copy2(path, dir_output)
- print('FetchOpenQuake: calc hdf file saved.')
- except:
- print('FetchOpenQuake: failed to copy calc hdf file.')
+ print('FetchOpenQuake: calc hdf file saved.') # noqa: T201
+ except: # noqa: E722
+ print('FetchOpenQuake: failed to copy calc hdf file.') # noqa: T201
# Final results
- res = {'Magnitude': mag,
- 'Periods': cur_T,
- 'IM': im_list,
- 'GroundMotions': gm_collector}
-
+ res = {
+ 'Magnitude': mag,
+ 'Periods': cur_T,
+ 'IM': im_list,
+ 'GroundMotions': gm_collector,
+ }
+
# return
- return res
+ return res # noqa: RET504
- def calculator_build_events_from_sources(self):
- """
- Prefilter the composite source model and store the source_info
- """
+ def calculator_build_events_from_sources(self): # noqa: C901
+ """Prefilter the composite source model and store the source_info""" # noqa: D400
gsims_by_trt = self.calculator.csm.full_lt.get_gsims_by_trt()
- print('FetchOpenQuake: self.calculator.csm.src_groups = ')
- print(self.calculator.csm.src_groups)
+ print('FetchOpenQuake: self.calculator.csm.src_groups = ') # noqa: T201
+ print(self.calculator.csm.src_groups) # noqa: T201
sources = self.calculator.csm.get_sources()
- print('FetchOpenQuake: sources = ')
- print(sources)
+ print('FetchOpenQuake: sources = ') # noqa: T201
+ print(sources) # noqa: T201
for src in sources:
src.nsites = 1 # avoid 0 weight
src.num_ruptures = src.count_ruptures()
maxweight = sum(sg.weight for sg in self.calculator.csm.src_groups) / (
- self.calculator.oqparam.concurrent_tasks or 1)
- print('FetchOpenQuake: weights = ')
- print([sg.weight for sg in self.calculator.csm.src_groups])
- print('FetchOpenQuake: maxweight = ')
- print(maxweight)
- eff_ruptures = general.AccumDict(accum=0) # trt => potential ruptures
- calc_times = general.AccumDict(accum=np.zeros(3, np.float32)) # nr, ns, dt
+ self.calculator.oqparam.concurrent_tasks or 1
+ )
+ print('FetchOpenQuake: weights = ') # noqa: T201
+ print([sg.weight for sg in self.calculator.csm.src_groups]) # noqa: T201
+ print('FetchOpenQuake: maxweight = ') # noqa: T201
+ print(maxweight) # noqa: T201
+ # trt => potential ruptures
+ eff_ruptures = general.AccumDict(accum=0) # noqa: F821
+ # nr, ns, dt
+ calc_times = general.AccumDict(accum=np.zeros(3, np.float32)) # noqa: F821
allargs = []
if self.calculator.oqparam.is_ucerf():
# manage the filtering in a special way
for sg in self.calculator.csm.src_groups:
for src in sg:
src.src_filter = self.calculator.srcfilter
- srcfilter = calc.filters.nofilter # otherwise it would be ultra-slow
+ # otherwise it would be ultra-slow
+ srcfilter = calc.filters.nofilter # noqa: F821
else:
srcfilter = self.calculator.srcfilter
logging.info('Building ruptures')
@@ -1186,23 +1592,25 @@ def calculator_build_events_from_sources(self):
par = self.calculator.param.copy()
par['gsims'] = gsims_by_trt[sg.trt]
for src_group in sg.split(maxweight):
- allargs.append((src_group, srcfilter, par))
+ allargs.append((src_group, srcfilter, par)) # noqa: PERF401
- smap = []
+ smap = []
for curargs in allargs:
- smap.append(calc.stochastic.sample_ruptures(curargs[0], curargs[1], curargs[2]))
+ smap.append( # noqa: PERF401
+ calc.stochastic.sample_ruptures(curargs[0], curargs[1], curargs[2]) # noqa: F821
+ )
- print('smap = ')
- print(smap)
+ print('smap = ') # noqa: T201
+ print(smap) # noqa: T201
self.calculator.nruptures = 0
mon = self.calculator.monitor('saving ruptures')
for tmp in smap:
dic = next(tmp)
- print(dic)
+ print(dic) # noqa: T201
# NB: dic should be a dictionary, but when the calculation dies
# for an OOM it can become None, thus giving a very confusing error
if dic is None:
- raise MemoryError('You ran out of memory!')
+ raise MemoryError('You ran out of memory!') # noqa: DOC501, EM101, TRY003
rup_array = dic['rup_array']
if len(rup_array) == 0:
continue
@@ -1213,158 +1621,192 @@ def calculator_build_events_from_sources(self):
with mon:
n = len(rup_array)
rup_array['id'] = np.arange(
- self.calculator.nruptures, self.calculator.nruptures + n)
+ self.calculator.nruptures, self.calculator.nruptures + n
+ )
self.calculator.nruptures += n
- hdf5.extend(self.calculator.datastore['ruptures'], rup_array)
- hdf5.extend(self.calculator.datastore['rupgeoms'], rup_array.geom)
+ hdf5.extend(self.calculator.datastore['ruptures'], rup_array) # noqa: F821
+ hdf5.extend(self.calculator.datastore['rupgeoms'], rup_array.geom) # noqa: F821
if len(self.calculator.datastore['ruptures']) == 0:
- raise RuntimeError('No ruptures were generated, perhaps the '
- 'investigation time is too short')
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'No ruptures were generated, perhaps the ' # noqa: EM101
+ 'investigation time is too short'
+ )
# must be called before storing the events
self.calculator.store_rlz_info(eff_ruptures) # store full_lt
self.calculator.store_source_info(calc_times)
- imp = commonlib.calc.RuptureImporter(self.calculator.datastore)
- print('self.calculator.datastore.getitem(ruptures)')
- print(self.calculator.datastore.getitem('ruptures'))
+ imp = commonlib.calc.RuptureImporter(self.calculator.datastore) # noqa: F821
+ print('self.calculator.datastore.getitem(ruptures)') # noqa: T201
+ print(self.calculator.datastore.getitem('ruptures')) # noqa: T201
with self.calculator.monitor('saving ruptures and events'):
- imp.import_rups_events(self.calculator.datastore.getitem('ruptures')[()], getters.get_rupture_getters)
-
+ imp.import_rups_events(
+ self.calculator.datastore.getitem('ruptures')[()],
+ getters.get_rupture_getters, # noqa: F821
+ )
-class CorrelationButNoInterIntraStdDevs(Exception):
+class CorrelationButNoInterIntraStdDevs(Exception): # noqa: N818, D101
def __init__(self, corr, gsim):
self.corr = corr
self.gsim = gsim
- def __str__(self):
- return '''\
- You cannot use the correlation model %s with the GSIM %s, \
- that defines only the total standard deviation. If you want to use a \
- correlation model you have to select a GMPE that provides the inter and \
- intra event standard deviations.''' % (
- self.corr.__class__.__name__, self.gsim.__class__.__name__)
+ def __str__(self): # noqa: D105
+ return (
+ f'You cannot use the correlation model '
+ f'{self.corr.__class__.__name__} with the '
+ f'GSIM {self.gsim.__class__.__name__}, '
+ f'that defines only the total standard deviation. '
+ f'If you want to use a correlation model you '
+ f'have to select a GMPE that provides the inter '
+ f'and intra event standard deviations.'
+ )
def to_imt_unit_values(vals, imt):
- """
- Exponentiate the values unless the IMT is MMI
- """
+ """Exponentiate the values unless the IMT is MMI""" # noqa: D400
if str(imt) == 'MMI':
return vals
return np.exp(vals)
-def export_rupture_to_json(scenario_info, mlon, mlat, siteFile, work_dir):
- from openquake.hazardlib import nrml, sourceconverter, site
- from openquake.hazardlib.calc.filters import SourceFilter, get_distances
- from openquake.hazardlib.geo.surface.base import BaseSurface
- from openquake.hazardlib.geo.mesh import Mesh, surface_to_arrays
- from openquake.commonlib import readinput
- import json
- in_dir = os.path.join(work_dir,'Input')
- outfile = os.path.join(work_dir,'Output','RupFile.geojson')
- erf_data = {"type": "FeatureCollection"}
- oq = readinput.get_oqparam(dict(
- calculation_mode='classical',
- inputs = {
- "site_model":[siteFile]},
- intensity_measure_types_and_levels="{'PGA': [0.1], 'SA(0.1)': [0.1]}", #place holder for initiating oqparam. Not used in ERF
- investigation_time=str(scenario_info['EqRupture'].get('investigation_time', '50.0')),
- gsim='AbrahamsonEtAl2014', #place holder for initiating oqparam, not used in ERF
- truncation_level='99.0', # place holder for initiating oqparam. not used in ERF
- maximum_distance=str(scenario_info['EqRupture'].get('maximum_distance', '2000')),
- width_of_mfd_bin = str(scenario_info['EqRupture'].get('width_of_mfd_bin', '1.0')),
- area_source_discretization=str(scenario_info['EqRupture'].get('area_source_discretization', '10'))
- ))
+def export_rupture_to_json(scenario_info, mlon, mlat, siteFile, work_dir): # noqa: C901, N803, D103
+ import json # noqa: PLC0415
+
+ from openquake.commonlib import readinput # noqa: PLC0415
+ from openquake.hazardlib import nrml, site, sourceconverter # noqa: PLC0415
+ from openquake.hazardlib.calc.filters import ( # noqa: PLC0415
+ SourceFilter,
+ get_distances,
+ )
+ from openquake.hazardlib.geo.mesh import Mesh, surface_to_arrays # noqa: PLC0415
+ from openquake.hazardlib.geo.surface.base import BaseSurface # noqa: PLC0415
+
+ in_dir = os.path.join(work_dir, 'Input') # noqa: PTH118
+ outfile = os.path.join(work_dir, 'Output', 'RupFile.geojson') # noqa: PTH118
+ erf_data = {'type': 'FeatureCollection'}
+ oq = readinput.get_oqparam(
+ dict( # noqa: C408
+ calculation_mode='classical',
+ inputs={'site_model': [siteFile]},
+ intensity_measure_types_and_levels="{'PGA': [0.1], 'SA(0.1)': [0.1]}", # place holder for initiating oqparam. Not used in ERF
+ investigation_time=str(
+ scenario_info['EqRupture'].get('investigation_time', '50.0')
+ ),
+ gsim='AbrahamsonEtAl2014', # place holder for initiating oqparam, not used in ERF
+ truncation_level='99.0', # place holder for initiating oqparam. not used in ERF
+ maximum_distance=str(
+ scenario_info['EqRupture'].get('maximum_distance', '2000')
+ ),
+ width_of_mfd_bin=str(
+ scenario_info['EqRupture'].get('width_of_mfd_bin', '1.0')
+ ),
+ area_source_discretization=str(
+ scenario_info['EqRupture'].get('area_source_discretization', '10')
+ ),
+ )
+ )
+ rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
- rupture_mesh_spacing = scenario_info['EqRupture']['rupture_mesh_spacing']
- [src_nrml] = nrml.read(os.path.join(in_dir, scenario_info['EqRupture']['sourceFile']))
+ [src_nrml] = nrml.read(
+ os.path.join(in_dir, scenario_info['EqRupture']['sourceFile']) # noqa: PTH118
+ )
conv = sourceconverter.SourceConverter(
- scenario_info['EqRupture']['investigation_time'],
- rupture_mesh_spacing,
- width_of_mfd_bin=scenario_info['EqRupture']['width_of_mfd_bin'],
- area_source_discretization=scenario_info['EqRupture']['area_source_discretization'])
+ scenario_info['EqRupture']['investigation_time'],
+ rupture_mesh_spacing,
+ width_of_mfd_bin=scenario_info['EqRupture']['width_of_mfd_bin'],
+ area_source_discretization=scenario_info['EqRupture'][
+ 'area_source_discretization'
+ ],
+ )
src_raw = conv.convert_node(src_nrml)
sources = []
sources_dist = []
sources_id = []
- id = 0
- siteMeanCol = site.SiteCollection.from_points([mlon], [mlat])
+ id = 0 # noqa: A001
+ siteMeanCol = site.SiteCollection.from_points([mlon], [mlat]) # noqa: N806
srcfilter = SourceFilter(siteMeanCol, oq.maximum_distance)
- minMag = scenario_info['EqRupture']['min_mag']
- maxMag = scenario_info['EqRupture']['max_mag']
+ minMag = scenario_info['EqRupture']['min_mag'] # noqa: N806
+ maxMag = scenario_info['EqRupture']['max_mag'] # noqa: N806
for i in range(len(src_nrml)):
subnode = src_nrml[i]
- subSrc = src_raw[i]
- tag = subnode.tag.rsplit('}')[1] if subnode.tag.startswith('{') else subnode.tag
- if tag == "sourceGroup":
+ subSrc = src_raw[i] # noqa: N806
+ tag = (
+ subnode.tag.rsplit('}')[1]
+ if subnode.tag.startswith('{')
+ else subnode.tag
+ )
+ if tag == 'sourceGroup':
for j in range(len(subnode)):
subsubnode = subnode[j]
- subsubSrc = subSrc[j]
- subtag = subsubnode.tag.rsplit('}')[1] if subsubnode.tag.startswith('{') else subsubnode.tag
- if subtag.endswith('Source') and srcfilter.get_close_sites(subsubSrc) is not None:
+ subsubSrc = subSrc[j] # noqa: N806
+ subtag = (
+ subsubnode.tag.rsplit('}')[1]
+ if subsubnode.tag.startswith('{')
+ else subsubnode.tag
+ )
+ if (
+ subtag.endswith('Source')
+ and srcfilter.get_close_sites(subsubSrc) is not None
+ ):
subsubSrc.id = id
sources_id.append(id)
- id += 1
+ id += 1 # noqa: A001
sources.append(subsubSrc)
- sourceMesh = subsubSrc.polygon.discretize(rupture_mesh_spacing)
- sourceSurface = BaseSurface(sourceMesh)
- siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat)
- sources_dist. append(sourceSurface.get_min_distance(siteMesh))
- elif tag.endswith('Source') and srcfilter.get_close_sites(subSrc) is not None:
+ sourceMesh = subsubSrc.polygon.discretize(rupture_mesh_spacing) # noqa: N806
+ sourceSurface = BaseSurface(sourceMesh) # noqa: N806
+ siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat) # noqa: N806
+ sources_dist.append(sourceSurface.get_min_distance(siteMesh))
+ elif (
+ tag.endswith('Source') and srcfilter.get_close_sites(subSrc) is not None
+ ):
subSrc.id = id
sources_id.append(id)
- id += 1
+ id += 1 # noqa: A001
sources.append(subSrc)
- sourceMesh = subSrc.polygon.discretize(rupture_mesh_spacing)
- sourceSurface = BaseSurface(sourceMesh)
- siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat)
- sources_dist. append(sourceSurface.get_min_distance(siteMesh))
- sources_df = pd.DataFrame.from_dict({
- 'source': sources,
- 'sourceDist': sources_dist,
- 'sourceID':sources_id
- })
- sources_df = sources_df.sort_values(['sourceDist'], ascending = (True))
+ sourceMesh = subSrc.polygon.discretize(rupture_mesh_spacing) # noqa: N806
+ sourceSurface = BaseSurface(sourceMesh) # noqa: N806
+ siteMesh = Mesh(siteMeanCol.lon, siteMeanCol.lat) # noqa: N806
+ sources_dist.append(sourceSurface.get_min_distance(siteMesh))
+ sources_df = pd.DataFrame.from_dict(
+ {'source': sources, 'sourceDist': sources_dist, 'sourceID': sources_id}
+ )
+ sources_df = sources_df.sort_values(['sourceDist'], ascending=(True))
sources_df = sources_df.set_index('sourceID')
allrups = []
- allrups_rRup = []
- allrups_srcId = []
- for src in sources_df["source"]:
+ allrups_rRup = [] # noqa: N806
+ allrups_srcId = [] # noqa: N806
+ for src in sources_df['source']:
src_rups = list(src.iter_ruptures())
for i, rup in enumerate(src_rups):
rup.rup_id = src.offset + i
allrups.append(rup)
allrups_rRup.append(rup.surface.get_min_distance(siteMeanCol))
allrups_srcId.append(src.id)
- rups_df = pd.DataFrame.from_dict({
- 'rups':allrups,
- 'rups_rRup':allrups_rRup,
- 'rups_srcId':allrups_srcId
- })
- rups_df = rups_df.sort_values(['rups_rRup'], ascending = (True))
+ rups_df = pd.DataFrame.from_dict(
+ {'rups': allrups, 'rups_rRup': allrups_rRup, 'rups_srcId': allrups_srcId}
+ )
+ rups_df = rups_df.sort_values(['rups_rRup'], ascending=(True))
feature_collection = []
for ind in rups_df.index:
cur_dict = {'type': 'Feature'}
- cur_dist = rups_df.loc[ind, "rups_rRup"]
- if cur_dist <= 0.:
+ cur_dist = rups_df.loc[ind, 'rups_rRup']
+ if cur_dist <= 0.0:
# skipping ruptures with distance exceeding the maxDistance
continue
- rup = rups_df.loc[ind, "rups"]
+ rup = rups_df.loc[ind, 'rups']
# s0=number of multi surfaces, s1=number of rows, s2=number of columns
arrays = surface_to_arrays(rup.surface) # shape (s0, 3, s1, s2)
- src_id = rups_df.loc[ind,"rups_srcId"]
+ src_id = rups_df.loc[ind, 'rups_srcId']
maf = rup.occurrence_rate
- if maf <= 0.:
+ if maf <= 0.0:
continue
- ruptureSurface = rup.surface
+ ruptureSurface = rup.surface # noqa: N806, F841
# Properties
- cur_dict['properties'] = dict()
- name = sources_df.loc[src_id, "source"].name
+ cur_dict['properties'] = dict() # noqa: C408
+ name = sources_df.loc[src_id, 'source'].name
cur_dict['properties'].update({'Name': name})
- Mag = float(rup.mag)
+ Mag = float(rup.mag) # noqa: N806
if (Mag < minMag) or (Mag > maxMag):
continue
cur_dict['properties'].update({'Magnitude': Mag})
@@ -1375,37 +1817,56 @@ def export_rupture_to_json(scenario_info, mlon, mlat, siteFile, work_dir):
cur_dict['properties'].update({'Lat': rup.hypocenter.y})
cur_dict['properties'].update({'Depth': rup.hypocenter.z})
cur_dict['properties'].update({'trt': rup.tectonic_region_type})
- cur_dict['properties'].update({'mesh' : json.dumps(
- [[[[round(float(z), 5) for z in y] for y in x] for x in array]
- for array in arrays])})
+ cur_dict['properties'].update(
+ {
+ 'mesh': json.dumps(
+ [
+ [[[round(float(z), 5) for z in y] for y in x] for x in array]
+ for array in arrays
+ ]
+ )
+ }
+ )
if hasattr(rup, 'probs_occur'):
cur_dict['properties'].update({'Probability': rup.probs_occur})
else:
cur_dict['properties'].update({'MeanAnnualRate': rup.occurrence_rate})
if hasattr(rup, 'weight'):
cur_dict['properties'].update({'weight': rup.weight})
- cur_dict['properties'].update({'Distance': get_distances(rup, siteMeanCol, 'rrup')[0]})
- cur_dict['properties'].update({'DistanceRup': get_distances(rup, siteMeanCol, 'rrup')[0]})
+ cur_dict['properties'].update(
+ {'Distance': get_distances(rup, siteMeanCol, 'rrup')[0]}
+ )
+ cur_dict['properties'].update(
+ {'DistanceRup': get_distances(rup, siteMeanCol, 'rrup')[0]}
+ )
# cur_dict['properties'].update({'DistanceSeis': get_distances(rup, siteMeanCol, 'rrup')})
- cur_dict['properties'].update({'DistanceJB': get_distances(rup, siteMeanCol, 'rjb')[0]})
- cur_dict['properties'].update({'DistanceX': get_distances(rup, siteMeanCol, 'rx')[0]})
- cur_dict['geometry'] = dict()
+ cur_dict['properties'].update(
+ {'DistanceJB': get_distances(rup, siteMeanCol, 'rjb')[0]}
+ )
+ cur_dict['properties'].update(
+ {'DistanceX': get_distances(rup, siteMeanCol, 'rx')[0]}
+ )
+ cur_dict['geometry'] = dict() # noqa: C408
# if (len(arrays)==1 and arrays[0].shape[1]==1 and arrays[0].shape[2]==1):
# # Point Source
# cur_dict['geometry'].update({'type': 'Point'})
# cur_dict['geometry'].update({'coordinates': [arrays[0][0][0][0], arrays[0][1][0][0]]})
# elif len(rup.surface.mesh.shape)==1:
- if len(rup.surface.mesh.shape)==1:
+ if len(rup.surface.mesh.shape) == 1:
# Point Source or area source
- top_edge = rup.surface.mesh # See the get_top_edge_depth method of the BaseSurface class
+ top_edge = (
+ rup.surface.mesh
+ ) # See the get_top_edge_depth method of the BaseSurface class
coordinates = []
for i in range(len(top_edge.lats)):
- coordinates.append([top_edge.lons[i], top_edge.lats[i]])
+ coordinates.append([top_edge.lons[i], top_edge.lats[i]]) # noqa: PERF401
cur_dict['geometry'].update({'type': 'LineString'})
cur_dict['geometry'].update({'coordinates': coordinates})
else:
# Line source
- top_edge = rup.surface.mesh[0:1] # See the get_top_edge_depth method of the BaseSurface class
+ top_edge = rup.surface.mesh[
+ 0:1
+ ] # See the get_top_edge_depth method of the BaseSurface class
coordinates = []
for i in range(len(top_edge.lats[0])):
coordinates.append([top_edge.lons[0][i], top_edge.lats[0][i]])
@@ -1417,30 +1878,38 @@ def export_rupture_to_json(scenario_info, mlon, mlat, siteFile, work_dir):
feature_collection_sorted = [feature_collection[i] for i in sort_ids]
del feature_collection
erf_data.update({'features': feature_collection_sorted})
- print('FetchOpenquake: total {} ruptures are collected.'.format(len(feature_collection_sorted)))
+ print( # noqa: T201
+ f'FetchOpenquake: total {len(feature_collection_sorted)} ruptures are collected.'
+ )
# Output
if outfile is not None:
- print('The collected ruptures are sorted by MeanAnnualRate and saved in {}'.format(outfile))
- with open(outfile, 'w') as f:
+ print( # noqa: T201
+ f'The collected ruptures are sorted by MeanAnnualRate and saved in {outfile}'
+ )
+ with open(outfile, 'w') as f: # noqa: PLW1514, PTH123
json.dump(erf_data, f, indent=2)
-
-def get_site_rup_info_oq(source_info, siteList):
- from openquake.hazardlib import site
- from openquake.hazardlib.calc.filters import get_distances
+
+
+def get_site_rup_info_oq(source_info, siteList): # noqa: N803, D103
+ from openquake.hazardlib import site # noqa: PLC0415
+ from openquake.hazardlib.calc.filters import get_distances # noqa: PLC0415
+
rup = source_info['rup']
- distToRupture = []
- distJB = []
- distX = []
+ distToRupture = [] # noqa: N806, F841
+ distJB = [] # noqa: N806, F841
+ distX = [] # noqa: N806, F841
for i in range(len(siteList)):
- siteMeanCol = site.SiteCollection.from_points([siteList[i]['lon']], [siteList[i]['lat']])
- siteList[i].update({"rRup":get_distances(rup, siteMeanCol, 'rrup')[0]})
- siteList[i].update({"rJB":get_distances(rup, siteMeanCol, 'rjb')[0]})
- siteList[i].update({"rX":get_distances(rup, siteMeanCol, 'rx')[0]})
+ siteMeanCol = site.SiteCollection.from_points( # noqa: N806
+ [siteList[i]['lon']], [siteList[i]['lat']]
+ )
+ siteList[i].update({'rRup': get_distances(rup, siteMeanCol, 'rrup')[0]})
+ siteList[i].update({'rJB': get_distances(rup, siteMeanCol, 'rjb')[0]})
+ siteList[i].update({'rX': get_distances(rup, siteMeanCol, 'rx')[0]})
site_rup_info = {
- "dip" : float(rup.surface.get_dip()),
- "width" : float(rup.surface.get_width()),
- "zTop" : float(rup.rake),
- "zHyp" : float(rup.hypocenter.depth),
- "aveRake" : float(rup.rake)
- }
- return site_rup_info, siteList
\ No newline at end of file
+ 'dip': float(rup.surface.get_dip()),
+ 'width': float(rup.surface.get_width()),
+ 'zTop': float(rup.rake),
+ 'zHyp': float(rup.hypocenter.depth),
+ 'aveRake': float(rup.rake),
+ }
+ return site_rup_info, siteList
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenSHA.py b/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenSHA.py
index 24856cc11..57d4c635c 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenSHA.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/FetchOpenSHA.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,59 +37,56 @@
# Kuanshi Zhong
#
-import json
import numpy as np
import pandas as pd
-from tqdm import tqdm
-from gmpe import SignificantDurationModel
import ujson
-
-from java.io import *
-from java.lang import *
-from java.lang.reflect import *
-from java.util import *
-
-from org.opensha.commons.data import *
-from org.opensha.commons.data.siteData import *
-from org.opensha.commons.data.function import *
-from org.opensha.commons.exceptions import ParameterException
-from org.opensha.commons.geo import *
-from org.opensha.commons.param import *
-from org.opensha.commons.param.event import *
-from org.opensha.commons.param.constraint import *
-from org.opensha.commons.util import ServerPrefUtils
-from org.opensha.commons.param.impl import DoubleParameter
+from java.io import * # noqa: F403
+from java.lang import * # noqa: F403
+from java.lang.reflect import * # noqa: F403
+from java.util import * # noqa: F403
+from org.opensha.commons.data import * # noqa: F403
+from org.opensha.commons.data.function import * # noqa: F403
+from org.opensha.commons.data.siteData import * # noqa: F403
+from org.opensha.commons.geo import * # noqa: F403
+from org.opensha.commons.param import * # noqa: F403
+from org.opensha.commons.param.constraint import * # noqa: F403
+from org.opensha.commons.param.event import * # noqa: F403
+from org.opensha.sha.calc import * # noqa: F403
+from org.opensha.sha.earthquake import * # noqa: F403
+from org.opensha.sha.earthquake.param import * # noqa: F403
+from org.opensha.sha.earthquake.rupForecastImpl.Frankel02 import (
+ Frankel02_AdjustableEqkRupForecast,
+)
+from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF1 import (
+ WGCEP_UCERF1_EqkRupForecast,
+)
+from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final import UCERF2
+from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final.MeanUCERF2 import (
+ MeanUCERF2,
+)
+from org.opensha.sha.faultSurface import * # noqa: F403
from org.opensha.sha.faultSurface.utils import PtSrcDistCorr
+from org.opensha.sha.imr import * # noqa: F403
+from org.opensha.sha.imr.attenRelImpl import * # noqa: F403
+from org.opensha.sha.imr.attenRelImpl.ngaw2 import * # noqa: F403
+from org.opensha.sha.imr.attenRelImpl.ngaw2.NGAW2_Wrappers import * # noqa: F403
+from org.opensha.sha.imr.param.IntensityMeasureParams import * # noqa: F403
+from org.opensha.sha.imr.param.OtherParams import * # noqa: F403
+from org.opensha.sha.util import * # noqa: F403
+from tqdm import tqdm
-from org.opensha.sha.earthquake import *
-from org.opensha.sha.earthquake.param import *
-from org.opensha.sha.earthquake.rupForecastImpl.Frankel02 import Frankel02_AdjustableEqkRupForecast
-from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF1 import WGCEP_UCERF1_EqkRupForecast
-from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final import UCERF2
-from org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final.MeanUCERF2 import MeanUCERF2
-from org.opensha.sha.faultSurface import *
-from org.opensha.sha.imr import *
-from org.opensha.sha.imr.attenRelImpl import *
-from org.opensha.sha.imr.attenRelImpl.ngaw2 import *
-from org.opensha.sha.imr.attenRelImpl.ngaw2.NGAW2_Wrappers import *
-from org.opensha.sha.imr.param.IntensityMeasureParams import *
-from org.opensha.sha.imr.param.OtherParams import *
-from org.opensha.sha.imr.param.SiteParams import Vs30_Param
-from org.opensha.sha.calc import *
-from org.opensha.sha.util import *
try:
from scratch.UCERF3.erf.mean import MeanUCERF3
except ModuleNotFoundError:
- MeanUCERF3 = jpype.JClass("scratch.UCERF3.erf.mean.MeanUCERF3")
-
-from org.opensha.sha.gcim.imr.attenRelImpl import *
-from org.opensha.sha.gcim.imr.param.IntensityMeasureParams import *
-from org.opensha.sha.gcim.imr.param.EqkRuptureParams import *
-from org.opensha.sha.gcim.calc import *
+ MeanUCERF3 = jpype.JClass('scratch.UCERF3.erf.mean.MeanUCERF3') # noqa: F405
+from org.opensha.sha.gcim.calc import * # noqa: F403
+from org.opensha.sha.gcim.imr.attenRelImpl import * # noqa: F403
+from org.opensha.sha.gcim.imr.param.EqkRuptureParams import * # noqa: F403
+from org.opensha.sha.gcim.imr.param.IntensityMeasureParams import * # noqa: F403
-def getERF(scenario_info, update_flag=True):
+def getERF(scenario_info, update_flag=True): # noqa: FBT002, C901, N802, D103
# Initialization
erf = None
erf_name = scenario_info['EqRupture']['Model']
@@ -98,13 +94,16 @@ def getERF(scenario_info, update_flag=True):
# ERF model options
if erf_name == 'WGCEP (2007) UCERF2 - Single Branch':
erf = MeanUCERF2()
- if (erf_selection.get('Background Seismicity',None) == "Exclude") and \
- ("Treat Background Seismicity As" in erf_selection.keys()):
- value = erf_selection.pop("Treat Background Seismicity As")
- print(f"Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored")
+ if (erf_selection.get('Background Seismicity', None) == 'Exclude') and (
+ 'Treat Background Seismicity As' in erf_selection.keys() # noqa: SIM118
+ ):
+ value = erf_selection.pop('Treat Background Seismicity As')
+ print( # noqa: T201
+ f'Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored'
+ )
for key, value in erf_selection.items():
if type(value) is int:
- value = float(value)
+ value = float(value) # noqa: PLW2901
erf.setParameter(key, value)
# erf.getParameter(key).setValue(value)
elif erf_name == 'USGS/CGS 2002 Adj. Cal. ERF':
@@ -113,55 +112,85 @@ def getERF(scenario_info, update_flag=True):
erf = WGCEP_UCERF1_EqkRupForecast()
elif erf_name == 'Mean UCERF3':
tmp = MeanUCERF3()
- if erf_selection.get("preset", None) == "(POISSON ONLY) Both FM Branch Averaged":
+ if (
+ erf_selection.get('preset', None)
+ == '(POISSON ONLY) Both FM Branch Averaged'
+ ):
tmp.setPreset(MeanUCERF3.Presets.BOTH_FM_BRANCH_AVG)
- if (erf_selection.get('Background Seismicity',None) == "Exclude") and \
- ("Treat Background Seismicity As" in erf_selection.keys()):
- value = erf_selection.pop("Treat Background Seismicity As")
- print(f"Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored")
+ if (erf_selection.get('Background Seismicity', None) == 'Exclude') and (
+ 'Treat Background Seismicity As' in erf_selection.keys() # noqa: SIM118
+ ):
+ value = erf_selection.pop('Treat Background Seismicity As')
+ print( # noqa: T201
+ f'Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored'
+ )
# Some parameters in MeanUCERF3 have overloaded setValue() Need to set one by one
# Set Apply Aftershock Filter
if erf_selection.get('Apply Aftershock Filter', None):
- tmp.setParameter("Apply Aftershock Filter", erf_selection["Apply Aftershock Filter"])
+ tmp.setParameter(
+ 'Apply Aftershock Filter',
+ erf_selection['Apply Aftershock Filter'],
+ )
# Set Aleatoiry mag-area stdDev
if erf_selection.get('Aleatory Mag-Area StdDev', None):
- tmp.setParameter("Aleatory Mag-Area StdDev", erf_selection["Aleatory Mag-Area StdDev"])
+ tmp.setParameter(
+ 'Aleatory Mag-Area StdDev',
+ erf_selection['Aleatory Mag-Area StdDev'],
+ )
# Set IncludeBackgroundOpetion
setERFbackgroundOptions(tmp, erf_selection)
# Set Treat Background Seismicity As Option
setERFtreatBackgroundOptions(tmp, erf_selection)
- elif erf_selection.get("preset", None) == "FM3.1 Branch Averaged":
+ elif erf_selection.get('preset', None) == 'FM3.1 Branch Averaged':
tmp.setPreset(MeanUCERF3.Presets.FM3_1_BRANCH_AVG)
- if (erf_selection.get('Background Seismicity',None) == "Exclude") and \
- ("Treat Background Seismicity As" in erf_selection.keys()):
- value = erf_selection.pop("Treat Background Seismicity As")
- print(f"Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored")
+ if (erf_selection.get('Background Seismicity', None) == 'Exclude') and (
+ 'Treat Background Seismicity As' in erf_selection.keys() # noqa: SIM118
+ ):
+ value = erf_selection.pop('Treat Background Seismicity As')
+ print( # noqa: T201
+ f'Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored'
+ )
# Some parameters in MeanUCERF3 have overloaded setValue() Need to set one by one
# Set Apply Aftershock Filter
if erf_selection.get('Apply Aftershock Filter', None):
- tmp.setParameter("Apply Aftershock Filter", erf_selection["Apply Aftershock Filter"])
+ tmp.setParameter(
+ 'Apply Aftershock Filter',
+ erf_selection['Apply Aftershock Filter'],
+ )
# Set Aleatoiry mag-area stdDev
if erf_selection.get('Aleatory Mag-Area StdDev', None):
- tmp.setParameter("Aleatory Mag-Area StdDev", erf_selection["Aleatory Mag-Area StdDev"])
+ tmp.setParameter(
+ 'Aleatory Mag-Area StdDev',
+ erf_selection['Aleatory Mag-Area StdDev'],
+ )
# Set IncludeBackgroundOpetion
setERFbackgroundOptions(tmp, erf_selection)
# Set Treat Background Seismicity As Option
setERFtreatBackgroundOptions(tmp, erf_selection)
# Set Probability Model Option
setERFProbabilityModelOptions(tmp, erf_selection)
- elif erf_selection.get("preset", None) == "FM3.2 Branch Averaged":
+ elif erf_selection.get('preset', None) == 'FM3.2 Branch Averaged':
tmp.setPreset(MeanUCERF3.Presets.FM3_2_BRANCH_AVG)
- if (erf_selection.get('Background Seismicity',None) == "Exclude") and \
- ("Treat Background Seismicity As" in erf_selection.keys()):
- value = erf_selection.pop("Treat Background Seismicity As")
- print(f"Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored")
+ if (erf_selection.get('Background Seismicity', None) == 'Exclude') and (
+ 'Treat Background Seismicity As' in erf_selection.keys() # noqa: SIM118
+ ):
+ value = erf_selection.pop('Treat Background Seismicity As')
+ print( # noqa: T201
+ f'Background Seismicvity is set as Excluded, Treat Background Seismicity As: {value} is ignored'
+ )
# Some parameters in MeanUCERF3 have overloaded setValue() Need to set one by one
# Set Apply Aftershock Filter
if erf_selection.get('Apply Aftershock Filter', None):
- tmp.setParameter("Apply Aftershock Filter", erf_selection["Apply Aftershock Filter"])
+ tmp.setParameter(
+ 'Apply Aftershock Filter',
+ erf_selection['Apply Aftershock Filter'],
+ )
# Set Aleatoiry mag-area stdDev
if erf_selection.get('Aleatory Mag-Area StdDev', None):
- tmp.setParameter("Aleatory Mag-Area StdDev", erf_selection["Aleatory Mag-Area StdDev"])
+ tmp.setParameter(
+ 'Aleatory Mag-Area StdDev',
+ erf_selection['Aleatory Mag-Area StdDev'],
+ )
# Set IncludeBackgroundOpetion
setERFbackgroundOptions(tmp, erf_selection)
# Set Treat Background Seismicity As Option
@@ -169,280 +198,373 @@ def getERF(scenario_info, update_flag=True):
# Set Probability Model Option
setERFProbabilityModelOptions(tmp, erf_selection)
else:
- print(f"""The specified Mean UCERF3 preset {erf_selection.get("preset", None)} is not implemented""")
+ print( # noqa: T201
+ f"""The specified Mean UCERF3 preset {erf_selection.get("preset", None)} is not implemented"""
+ )
erf = tmp
del tmp
elif erf_name == 'WGCEP Eqk Rate Model 2 ERF':
erf = UCERF2()
else:
- print('Please check the ERF model name.')
+ print('Please check the ERF model name.') # noqa: T201
if erf_name and update_flag:
erf.updateForecast()
# return
return erf
-def setERFbackgroundOptions(erf, selection):
+
+def setERFbackgroundOptions(erf, selection): # noqa: N802, D103
option = selection.get('Background Seismicity', None)
- if option == "Include":
- erf.setParameter('Background Seismicity', IncludeBackgroundOption.INCLUDE)
- elif option == "Exclude":
- erf.setParameter('Background Seismicity', IncludeBackgroundOption.EXCLUDE)
- elif option == "Only":
- erf.setParameter('Background Seismicity', IncludeBackgroundOption.ONLY)
-
-def setERFtreatBackgroundOptions(erf, selection):
+ if option == 'Include':
+ erf.setParameter('Background Seismicity', IncludeBackgroundOption.INCLUDE) # noqa: F405
+ elif option == 'Exclude':
+ erf.setParameter('Background Seismicity', IncludeBackgroundOption.EXCLUDE) # noqa: F405
+ elif option == 'Only':
+ erf.setParameter('Background Seismicity', IncludeBackgroundOption.ONLY) # noqa: F405
+
+
+def setERFtreatBackgroundOptions(erf, selection): # noqa: N802, D103
option = selection.get('Treat Background Seismicity As', None)
if option is None:
pass
elif option == 'Point Sources':
- erf.setParameter('Treat Background Seismicity As', BackgroundRupType.POINT)
- elif option == "Single Random Strike Faults":
- erf.setParameter('Treat Background Seismicity As', BackgroundRupType.FINITE)
- elif option == "Two Perpendicular Faults":
- erf.setParameter('Treat Background Seismicity As', BackgroundRupType.CROSSHAIR)
+ erf.setParameter('Treat Background Seismicity As', BackgroundRupType.POINT) # noqa: F405
+ elif option == 'Single Random Strike Faults':
+ erf.setParameter('Treat Background Seismicity As', BackgroundRupType.FINITE) # noqa: F405
+ elif option == 'Two Perpendicular Faults':
+ erf.setParameter(
+ 'Treat Background Seismicity As',
+ BackgroundRupType.CROSSHAIR, # noqa: F405
+ )
+
-def setERFProbabilityModelOptions(erf, selection):
+def setERFProbabilityModelOptions(erf, selection): # noqa: N802, D103
option = selection.get('Probability Model', None)
if option is None:
pass
elif option == 'Poisson':
- erf.setParameter('Probability Model', ProbabilityModelOptions.POISSON)
- elif option == "UCERF3 BPT":
- erf.setParameter('Probability Model', ProbabilityModelOptions.U3_BPT)
- erf.setParameter('Historic Open Interval', selection.get('Historic Open Interval'))
+ erf.setParameter('Probability Model', ProbabilityModelOptions.POISSON) # noqa: F405
+ elif option == 'UCERF3 BPT':
+ erf.setParameter('Probability Model', ProbabilityModelOptions.U3_BPT) # noqa: F405
+ erf.setParameter(
+ 'Historic Open Interval', selection.get('Historic Open Interval')
+ )
setERFMagDependentAperiodicityOptions(erf, selection)
setERFBPTAveragingTypeOptions(erf, selection)
- elif option == "UCERF3 Preferred Blend":
- erf.setParameter('Probability Model', ProbabilityModelOptions.U3_PREF_BLEND)
- erf.setParameter('Historic Open Interval', selection.get('Historic Open Interval'))
+ elif option == 'UCERF3 Preferred Blend':
+ erf.setParameter('Probability Model', ProbabilityModelOptions.U3_PREF_BLEND) # noqa: F405
+ erf.setParameter(
+ 'Historic Open Interval', selection.get('Historic Open Interval')
+ )
setERFBPTAveragingTypeOptions(erf, selection)
- elif option == "WG02 BPT":
- erf.setParameter('Probability Model', ProbabilityModelOptions.WG02_BPT)
- erf.setParameter('Historic Open Interval', selection.get('Historic Open Interval'))
+ elif option == 'WG02 BPT':
+ erf.setParameter('Probability Model', ProbabilityModelOptions.WG02_BPT) # noqa: F405
+ erf.setParameter(
+ 'Historic Open Interval', selection.get('Historic Open Interval')
+ )
setERFMagDependentAperiodicityOptions(erf, selection)
-def setERFMagDependentAperiodicityOptions(erf, selection):
+
+def setERFMagDependentAperiodicityOptions(erf, selection): # noqa: C901, N802, D103
option = selection.get('Aperiodicity', None)
if option is None:
pass
- elif option =='0.4,0.3,0.2,0.1':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.LOW_VALUES)
- elif option =='0.5,0.4,0.3,0.2':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.MID_VALUES)
- elif option =='0.6,0.5,0.4,0.3':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.HIGH_VALUES)
- elif option =='All 0.1':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT1_VALUES)
- elif option =='All 0.2':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT2_VALUES)
- elif option =='All 0.3':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT3_VALUES)
- elif option =='All 0.4':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT4_VALUES)
- elif option =='All 0.5':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT5_VALUES)
- elif option =='All 0.6':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT6_VALUES)
- elif option =='All 0.7':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT7_VALUES)
- elif option =='All 0.8':
- erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.ALL_PT8_VALUES)
-
-def setERFBPTAveragingTypeOptions(erf, selection):
+ elif option == '0.4,0.3,0.2,0.1':
+ erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.LOW_VALUES) # noqa: F405
+ elif option == '0.5,0.4,0.3,0.2':
+ erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.MID_VALUES) # noqa: F405
+ elif option == '0.6,0.5,0.4,0.3':
+ erf.setParameter('Aperiodicity', MagDependentAperiodicityOptions.HIGH_VALUES) # noqa: F405
+ elif option == 'All 0.1':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT1_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.2':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT2_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.3':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT3_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.4':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT4_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.5':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT5_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.6':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT6_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.7':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT7_VALUES, # noqa: F405
+ )
+ elif option == 'All 0.8':
+ erf.setParameter(
+ 'Aperiodicity',
+ MagDependentAperiodicityOptions.ALL_PT8_VALUES, # noqa: F405
+ )
+
+
+def setERFBPTAveragingTypeOptions(erf, selection): # noqa: N802, D103
option = selection.get('BPT Averaging Type', None)
if option is None:
pass
- elif option =='AveRI and AveTimeSince':
- erf.setParameter('BPT Averaging Type', BPTAveragingTypeOptions.AVE_RI_AVE_TIME_SINCE)
- elif option =='AveRI and AveNormTimeSince':
- erf.setParameter('BPT Averaging Type', BPTAveragingTypeOptions.AVE_RI_AVE_NORM_TIME_SINCE)
- elif option =='AveRate and AveNormTimeSince':
- erf.setParameter('BPT Averaging Type', BPTAveragingTypeOptions.AVE_RATE_AVE_NORM_TIME_SINCE)
+ elif option == 'AveRI and AveTimeSince':
+ erf.setParameter(
+ 'BPT Averaging Type',
+ BPTAveragingTypeOptions.AVE_RI_AVE_TIME_SINCE, # noqa: F405
+ )
+ elif option == 'AveRI and AveNormTimeSince':
+ erf.setParameter(
+ 'BPT Averaging Type',
+ BPTAveragingTypeOptions.AVE_RI_AVE_NORM_TIME_SINCE, # noqa: F405
+ )
+ elif option == 'AveRate and AveNormTimeSince':
+ erf.setParameter(
+ 'BPT Averaging Type',
+ BPTAveragingTypeOptions.AVE_RATE_AVE_NORM_TIME_SINCE, # noqa: F405
+ )
-def get_source_rupture(erf, source_index, rupture_index):
- rupSource = erf.getSource(source_index)
+def get_source_rupture(erf, source_index, rupture_index): # noqa: D103
+ rupSource = erf.getSource(source_index) # noqa: N806
ruptures = rupSource.getRuptureList()
rupture = ruptures.get(rupture_index)
return rupSource, rupture
-def get_source_distance(erf, source_index, lat, lon):
- rupSource = erf.getSource(source_index)
- sourceSurface = rupSource.getSourceSurface()
- #print(lon)
- #print(lat)
- distToSource = []
+def get_source_distance(erf, source_index, lat, lon): # noqa: D103
+ rupSource = erf.getSource(source_index) # noqa: N806
+ sourceSurface = rupSource.getSourceSurface() # noqa: N806
+ # print(lon)
+ # print(lat)
+ distToSource = [] # noqa: N806
for i in range(len(lat)):
- distToSource.append(float(sourceSurface.getDistanceRup(Location(lat[i], lon[i]))))
+ distToSource.append( # noqa: PERF401
+ float(sourceSurface.getDistanceRup(Location(lat[i], lon[i]))) # noqa: F405
+ )
return distToSource
-def get_rupture_distance(erf, source_index, rupture_index, lat, lon):
- rupSource = erf.getSource(source_index)
- rupSurface = rupSource.getRupture(rupture_index).getRuptureSurface()
- distToRupture = []
+def get_rupture_distance(erf, source_index, rupture_index, lat, lon): # noqa: D103
+ rupSource = erf.getSource(source_index) # noqa: N806
+ rupSurface = rupSource.getRupture(rupture_index).getRuptureSurface() # noqa: N806
+ distToRupture = [] # noqa: N806
for i in range(len(lat)):
- distToRupture.append(float(rupSurface.getDistanceRup(Location(lat[i], lon[i]))))
+ distToRupture.append( # noqa: PERF401
+ float(rupSurface.getDistanceRup(Location(lat[i], lon[i]))) # noqa: F405
+ )
return distToRupture
-def get_rupture_info_CY2014(erf, source_index, rupture_index, siteList):
- rupSource = erf.getSource(source_index)
- rupList = rupSource.getRuptureList()
- rupSurface = rupList.get(rupture_index).getRuptureSurface()
+
+def get_rupture_info_CY2014(erf, source_index, rupture_index, siteList): # noqa: N802, N803, D103
+ rupSource = erf.getSource(source_index) # noqa: N806
+ rupList = rupSource.getRuptureList() # noqa: N806
+ rupSurface = rupList.get(rupture_index).getRuptureSurface() # noqa: N806
if rupList.get(rupture_index).getHypocenterLocation() is None:
# https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/nshmp2/imr/ngaw2/NSHMP14_WUS_CB.java#L242
dip = float(rupSurface.getAveDip())
width = float(rupSurface.getAveWidth())
- zTop = float(rupSurface.getAveRupTopDepth())
- zHyp = zTop + np.sin(dip/180.0*np.pi) * width / 2.0
+ zTop = float(rupSurface.getAveRupTopDepth()) # noqa: N806
+ zHyp = zTop + np.sin(dip / 180.0 * np.pi) * width / 2.0 # noqa: N806
else:
- zHyp = rupList.get(rupture_index).getHypocenterLocation().getDepth()
+ zHyp = rupList.get(rupture_index).getHypocenterLocation().getDepth() # noqa: N806
for i in range(len(siteList)):
- siteList[i].update({"rRup":float(rupSurface.getDistanceRup(Location(siteList[i]['lat'], siteList[i]['lon'])))})
- siteList[i].update({"rJB":float(rupSurface.getDistanceJB(Location(siteList[i]['lat'], siteList[i]['lon'])))})
- siteList[i].update({"rX":float(rupSurface.getDistanceX(Location(siteList[i]['lat'], siteList[i]['lon'])))})
+ siteList[i].update(
+ {
+ 'rRup': float(
+ rupSurface.getDistanceRup(
+ Location(siteList[i]['lat'], siteList[i]['lon']) # noqa: F405
+ )
+ )
+ }
+ )
+ siteList[i].update(
+ {
+ 'rJB': float(
+ rupSurface.getDistanceJB(
+ Location(siteList[i]['lat'], siteList[i]['lon']) # noqa: F405
+ )
+ )
+ }
+ )
+ siteList[i].update(
+ {
+ 'rX': float(
+ rupSurface.getDistanceX(
+ Location(siteList[i]['lat'], siteList[i]['lon']) # noqa: F405
+ )
+ )
+ }
+ )
site_rup_info = {
- "dip" : float(rupSurface.getAveDip()),
- "width" : float(rupSurface.getAveWidth()),
- "zTop" : float(rupSurface.getAveRupTopDepth()),
- "aveRake" : float(rupList.get(rupture_index).getAveRake()),
- "zHyp":zHyp
- }
+ 'dip': float(rupSurface.getAveDip()),
+ 'width': float(rupSurface.getAveWidth()),
+ 'zTop': float(rupSurface.getAveRupTopDepth()),
+ 'aveRake': float(rupList.get(rupture_index).getAveRake()),
+ 'zHyp': zHyp,
+ }
return site_rup_info, siteList
-def horzDistanceFast(lat1, lon1, lat2, lon2):
- lat1 = lat1/180*np.pi
- lon1 = lon1/180*np.pi
- lat2 = lat2/180*np.pi
- lon2 = lon2/180*np.pi
- dlon = np.abs(lon2 - lon1)
+
+def horzDistanceFast(lat1, lon1, lat2, lon2): # noqa: N802, D103
+ lat1 = lat1 / 180 * np.pi
+ lon1 = lon1 / 180 * np.pi
+ lat2 = lat2 / 180 * np.pi
+ lon2 = lon2 / 180 * np.pi
+ dlon = np.abs(lon2 - lon1)
dlat = np.abs(lat2 - lat1)
- a = np.sin(dlat / 2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2)**2
- c = 2 * np.arcsin(np.sqrt(a))
- EARTH_RADIUS_MEAN = 6371.0072 #https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/commons/geo/GeoTools.java#L22
+ a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2) ** 2
+ c = 2 * np.arcsin(np.sqrt(a))
+ EARTH_RADIUS_MEAN = 6371.0072 # https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/commons/geo/GeoTools.java#L22 # noqa: N806
# return EARTH_RADIUS_MEAN * np.sqrt((dLat * dLat) + (dLon * dLon))
return EARTH_RADIUS_MEAN * c
-def getPtSrcDistCorr(horzDist, mag, type):
+
+def getPtSrcDistCorr(horzDist, mag, type): # noqa: A002, N802, N803, D103
# https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/faultSurface/utils/PtSrcDistCorr.java#L20
- if type == "FIELD":
- rupLen = np.power(10.0,-3.22+0.69*mag)
- return 0.7071 + (1.0-0.7071)/(1 + np.power(rupLen/(horzDist*0.87),1.1))
- elif type == "NSHMP08":
- print("The NSHMP08 rJB correction has not been implemented. corr=1.0 is used instead")
+ if type == 'FIELD':
+ rupLen = np.power(10.0, -3.22 + 0.69 * mag) # noqa: N806
+ return 0.7071 + (1.0 - 0.7071) / (
+ 1 + np.power(rupLen / (horzDist * 0.87), 1.1)
+ )
+ elif type == 'NSHMP08': # noqa: RET505
+ print( # noqa: T201
+ 'The NSHMP08 rJB correction has not been implemented. corr=1.0 is used instead'
+ )
# https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/faultSurface/utils/PtSrcDistCorr.java#L20
return 1.0
else:
return 1.0
-def get_PointSource_info_CY2014(source_info, siteList):
+
+
+def get_PointSource_info_CY2014(source_info, siteList): # noqa: N802, N803, D103
# https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/faultSurface/PointSurface.java#L118
- sourceLat = source_info['Location']['Latitude']
- sourceLon = source_info['Location']['Longitude']
- sourceDepth = source_info['Location']['Depth']
+ sourceLat = source_info['Location']['Latitude'] # noqa: N806
+ sourceLon = source_info['Location']['Longitude'] # noqa: N806
+ sourceDepth = source_info['Location']['Depth'] # noqa: N806
for i in range(len(siteList)):
- siteLat = siteList[i]['lat']
- siteLon = siteList[i]['lon']
- horiD = horzDistanceFast(sourceLat, sourceLon, siteLat, siteLon)
- rJB = horiD * getPtSrcDistCorr (horiD, source_info['Magnitude'],'NONE')
- rRup = np.sqrt(rJB**2 + sourceDepth**2)
- rX = 0.0
- siteList[i].update({"rRup":rRup})
- siteList[i].update({"rJB":rJB})
- siteList[i].update({"rX":rX})
+ siteLat = siteList[i]['lat'] # noqa: N806
+ siteLon = siteList[i]['lon'] # noqa: N806
+ horiD = horzDistanceFast(sourceLat, sourceLon, siteLat, siteLon) # noqa: N806
+ rJB = horiD * getPtSrcDistCorr(horiD, source_info['Magnitude'], 'NONE') # noqa: N806
+ rRup = np.sqrt(rJB**2 + sourceDepth**2) # noqa: N806
+ rX = 0.0 # noqa: N806
+ siteList[i].update({'rRup': rRup})
+ siteList[i].update({'rJB': rJB})
+ siteList[i].update({'rX': rX})
site_rup_info = {
- "dip" : float(source_info['AverageDip']),
- "width" : 0.0,#https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/faultSurface/PointSurface.java#L68
- "zTop" : sourceDepth,
- "aveRake" : float(source_info['AverageRake'])
- }
- return site_rup_info, siteList
-
-def export_to_json(erf, site_loc, outfile = None, EqName = None, minMag = 0.0, maxMag = 10.0, maxDistance = 1000.0):
+ 'dip': float(source_info['AverageDip']),
+ 'width': 0.0, # https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/faultSurface/PointSurface.java#L68
+ 'zTop': sourceDepth,
+ 'aveRake': float(source_info['AverageRake']),
+ }
+ return site_rup_info, siteList
+
+def export_to_json( # noqa: C901, D103
+ erf,
+ site_loc,
+ outfile=None,
+ EqName=None, # noqa: N803
+ minMag=0.0, # noqa: N803
+ maxMag=10.0, # noqa: N803
+ maxDistance=1000.0, # noqa: N803
+):
# Initializing
- erf_data = {"type": "FeatureCollection"}
- site_loc = Location(site_loc[0], site_loc[1])
- site = Site(site_loc)
+ erf_data = {'type': 'FeatureCollection'}
+ site_loc = Location(site_loc[0], site_loc[1]) # noqa: F405
+ site = Site(site_loc) # noqa: F405
# Total source number
num_sources = erf.getNumSources()
source_tag = []
source_dist = []
for i in range(num_sources):
- rupSource = erf.getSource(i)
- distanceToSource = rupSource.getMinDistance(site)
+ rupSource = erf.getSource(i) # noqa: N806
+ distanceToSource = rupSource.getMinDistance(site) # noqa: N806
# sourceSurface = rupSource.getSourceSurface()
# distanceToSource = sourceSurface.getDistanceRup(site_loc)
source_tag.append(i)
source_dist.append(distanceToSource)
- df = pd.DataFrame.from_dict({
- 'sourceID': source_tag,
- 'sourceDist': source_dist
- })
+ df = pd.DataFrame.from_dict({'sourceID': source_tag, 'sourceDist': source_dist}) # noqa: PD901
# Sorting sources
- source_collection = df.sort_values(['sourceDist'], ascending = (True))
- source_collection = source_collection[source_collection["sourceDist"] maxMag):
continue
cur_dict['properties'].update({'Magnitude': Mag})
@@ -452,35 +574,44 @@ def export_to_json(erf, site_loc, outfile = None, EqName = None, minMag = 0.0, m
# these calls are time-consuming, so only run them if one needs
# detailed outputs of the sources
cur_dict['properties'].update({'Distance': float(cur_dist)})
- distanceRup = rupture.getRuptureSurface().getDistanceRup(site_loc)
+ distanceRup = rupture.getRuptureSurface().getDistanceRup(site_loc) # noqa: N806
cur_dict['properties'].update({'DistanceRup': float(distanceRup)})
- distanceSeis = rupture.getRuptureSurface().getDistanceSeis(site_loc)
+ distanceSeis = rupture.getRuptureSurface().getDistanceSeis(site_loc) # noqa: N806
cur_dict['properties'].update({'DistanceSeis': float(distanceSeis)})
- distanceJB = rupture.getRuptureSurface().getDistanceJB(site_loc)
+ distanceJB = rupture.getRuptureSurface().getDistanceJB(site_loc) # noqa: N806
cur_dict['properties'].update({'DistanceJB': float(distanceJB)})
- distanceX = rupture.getRuptureSurface().getDistanceX(site_loc)
+ distanceX = rupture.getRuptureSurface().getDistanceX(site_loc) # noqa: N806
cur_dict['properties'].update({'DistanceX': float(distanceX)})
- Prob = rupture.getProbability()
+ Prob = rupture.getProbability() # noqa: N806
cur_dict['properties'].update({'Probability': float(Prob)})
maf = rupture.getMeanAnnualRate(erf.getTimeSpan().getDuration())
cur_dict['properties'].update({'MeanAnnualRate': abs(float(maf))})
# Geometry
- cur_dict['geometry'] = dict()
- if (ruptureSurface.isPointSurface()):
+ cur_dict['geometry'] = dict() # noqa: C408
+ if ruptureSurface.isPointSurface():
# Point source
- pointSurface = ruptureSurface
+ pointSurface = ruptureSurface # noqa: N806
location = pointSurface.getLocation()
cur_dict['geometry'].update({'type': 'Point'})
- cur_dict['geometry'].update({'coordinates': [float(location.getLongitude()), float(location.getLatitude())]})
+ cur_dict['geometry'].update(
+ {
+ 'coordinates': [
+ float(location.getLongitude()),
+ float(location.getLatitude()),
+ ]
+ }
+ )
else:
# Line source
try:
trace = ruptureSurface.getUpperEdge()
- except:
+ except: # noqa: E722
trace = ruptureSurface.getEvenlyDiscritizedUpperEdge()
coordinates = []
for k in trace:
- coordinates.append([float(k.getLongitude()), float(k.getLatitude())])
+ coordinates.append( # noqa: PERF401
+ [float(k.getLongitude()), float(k.getLatitude())]
+ )
cur_dict['geometry'].update({'type': 'LineString'})
cur_dict['geometry'].update({'coordinates': coordinates})
# Appending
@@ -493,7 +624,9 @@ def export_to_json(erf, site_loc, outfile = None, EqName = None, minMag = 0.0, m
feature_collection_sorted = [feature_collection[i] for i in sort_ids]
del feature_collection
erf_data.update({'features': feature_collection_sorted})
- print('FetchOpenSHA: total {} ruptures are collected.'.format(len(feature_collection_sorted)))
+ print( # noqa: T201
+ f'FetchOpenSHA: total {len(feature_collection_sorted)} ruptures are collected.'
+ )
# num_preview = 1000
# if len(feature_collection_sorted) > num_preview:
# preview_erf_data={'features': feature_collection_sorted[0:num_preview]}
@@ -503,8 +636,10 @@ def export_to_json(erf, site_loc, outfile = None, EqName = None, minMag = 0.0, m
# import time
# startTime = time.process_time_ns()
if outfile is not None:
- print('The collected ruptures are sorted by MeanAnnualRate and saved in {}'.format(outfile))
- with open(outfile, 'w') as f:
+ print( # noqa: T201
+ f'The collected ruptures are sorted by MeanAnnualRate and saved in {outfile}'
+ )
+ with open(outfile, 'w') as f: # noqa: PLW1514, PTH123
ujson.dump(erf_data, f, indent=2)
# print(f"Time consumed by json dump is {(time.process_time_ns()-startTime)/1e9}s")
@@ -513,22 +648,27 @@ def export_to_json(erf, site_loc, outfile = None, EqName = None, minMag = 0.0, m
return erf_data
-def CreateIMRInstance(gmpe_name):
-
+def CreateIMRInstance(gmpe_name): # noqa: N802, D103
# GMPE name map
- gmpe_map = {str(ASK_2014.NAME): ASK_2014_Wrapper.class_.getName(),
- str(BSSA_2014.NAME): BSSA_2014_Wrapper.class_.getName(),
- str(CB_2014.NAME): CB_2014_Wrapper.class_.getName(),
- str(CY_2014.NAME): CY_2014_Wrapper.class_.getName(),
- str(KS_2006_AttenRel.NAME): KS_2006_AttenRel.class_.getName(),
- str(BommerEtAl_2009_AttenRel.NAME): BommerEtAl_2009_AttenRel.class_.getName(),
- str(AfshariStewart_2016_AttenRel.NAME): AfshariStewart_2016_AttenRel.class_.getName()}
+ gmpe_map = {
+ str(ASK_2014.NAME): ASK_2014_Wrapper.class_.getName(), # noqa: F405
+ str(BSSA_2014.NAME): BSSA_2014_Wrapper.class_.getName(), # noqa: F405
+ str(CB_2014.NAME): CB_2014_Wrapper.class_.getName(), # noqa: F405
+ str(CY_2014.NAME): CY_2014_Wrapper.class_.getName(), # noqa: F405
+ str(KS_2006_AttenRel.NAME): KS_2006_AttenRel.class_.getName(), # noqa: F405
+ str(
+ BommerEtAl_2009_AttenRel.NAME # noqa: F405
+ ): BommerEtAl_2009_AttenRel.class_.getName(), # noqa: F405
+ str(
+ AfshariStewart_2016_AttenRel.NAME # noqa: F405
+ ): AfshariStewart_2016_AttenRel.class_.getName(), # noqa: F405
+ }
# Mapping GMPE name
- imrClassName = gmpe_map.get(gmpe_name, None)
+ imrClassName = gmpe_map.get(gmpe_name) # noqa: N806
if imrClassName is None:
return imrClassName
# Getting the java class
- imrClass = Class.forName(imrClassName)
+ imrClass = Class.forName(imrClassName) # noqa: N806, F405
ctor = imrClass.getConstructor()
imr = ctor.newInstance()
# Setting default parameters
@@ -537,268 +677,306 @@ def CreateIMRInstance(gmpe_name):
return imr
-def get_DataSource(paramName, siteData):
- typeMap = SiteTranslator.DATA_TYPE_PARAM_NAME_MAP
- for dataType in typeMap.getTypesForParameterName(paramName):
- if dataType == SiteData.TYPE_VS30:
- for dataValue in siteData:
+def get_DataSource(paramName, siteData): # noqa: N802, N803, D103
+ typeMap = SiteTranslator.DATA_TYPE_PARAM_NAME_MAP # noqa: N806, F405
+ for dataType in typeMap.getTypesForParameterName(paramName): # noqa: N806
+ if dataType == SiteData.TYPE_VS30: # noqa: F405
+ for dataValue in siteData: # noqa: N806
if dataValue.getDataType() != dataType:
continue
- vs30 = Double(dataValue.getValue())
+ vs30 = Double(dataValue.getValue()) # noqa: F405
if (not vs30.isNaN()) and (vs30 > 0.0):
return dataValue.getSourceName()
- elif (dataType == SiteData.TYPE_DEPTH_TO_1_0) or (dataType == SiteData.TYPE_DEPTH_TO_2_5):
- for dataValue in siteData:
+ elif (dataType == SiteData.TYPE_DEPTH_TO_1_0) or ( # noqa: F405, PLR1714
+ dataType == SiteData.TYPE_DEPTH_TO_2_5 # noqa: F405
+ ):
+ for dataValue in siteData: # noqa: N806
if dataValue.getDataType() != dataType:
continue
- depth = Double(dataValue.getValue())
+ depth = Double(dataValue.getValue()) # noqa: F405
if (not depth.isNaN()) and (depth > 0.0):
return dataValue.getSourceName()
return 1
-def get_site_prop(gmpe_name, siteSpec):
-
+def get_site_prop(gmpe_name, siteSpec): # noqa: C901, N803, D103
# GMPE
try:
imr = CreateIMRInstance(gmpe_name)
- except:
- print('Please check GMPE name.')
+ except: # noqa: E722
+ print('Please check GMPE name.') # noqa: T201
return 1
# Site data
- sites = ArrayList()
+ sites = ArrayList() # noqa: F405
for cur_site in siteSpec:
- cur_loc = Location(cur_site['Location']['Latitude'], cur_site['Location']['Longitude'])
- sites.add(Site(cur_loc))
- siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults()
+ cur_loc = Location( # noqa: F405
+ cur_site['Location']['Latitude'], cur_site['Location']['Longitude']
+ )
+ sites.add(Site(cur_loc)) # noqa: F405
+ siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults() # noqa: N806, F405
try:
- availableSiteData = siteDataProviders.getAllAvailableData(sites)
- except:
- availableSiteData = []
- print('remote getAllAvailableData is not available temporarily, will use site Vs30 in the site csv file.')
- #return 1
- siteTrans = SiteTranslator()
+ availableSiteData = siteDataProviders.getAllAvailableData(sites) # noqa: N806
+ except: # noqa: E722
+ availableSiteData = [] # noqa: N806
+ print( # noqa: T201
+ 'remote getAllAvailableData is not available temporarily, will use site Vs30 in the site csv file.'
+ )
+ # return 1
+ siteTrans = SiteTranslator() # noqa: N806, F405
# Looping over all sites
site_prop = []
for i in range(len(siteSpec)):
- site_tmp = dict()
+ site_tmp = dict() # noqa: C408
# Current site
site = sites.get(i)
# Location
cur_site = siteSpec[i]
- locResults = {'Latitude': cur_site['Location']['Latitude'],
- 'Longitude': cur_site['Location']['Longitude']}
- cur_loc = Location(cur_site['Location']['Latitude'], cur_site['Location']['Longitude'])
- siteDataValues = ArrayList()
+ locResults = { # noqa: N806
+ 'Latitude': cur_site['Location']['Latitude'],
+ 'Longitude': cur_site['Location']['Longitude'],
+ }
+ cur_loc = Location( # noqa: F405
+ cur_site['Location']['Latitude'], cur_site['Location']['Longitude']
+ )
+ siteDataValues = ArrayList() # noqa: N806, F405
for j in range(len(availableSiteData)):
siteDataValues.add(availableSiteData.get(j).getValue(i))
- imrSiteParams = imr.getSiteParams()
- siteDataResults = []
+ imrSiteParams = imr.getSiteParams() # noqa: N806
+ siteDataResults = [] # noqa: N806
# Setting site parameters
for j in range(imrSiteParams.size()):
- siteParam = imrSiteParams.getByIndex(j)
- newParam = Parameter.clone(siteParam)
+ siteParam = imrSiteParams.getByIndex(j) # noqa: N806
+ newParam = Parameter.clone(siteParam) # noqa: N806, F405
if siteDataValues.size() > 0:
- siteDataFound = siteTrans.setParameterValue(newParam, siteDataValues)
+ siteDataFound = siteTrans.setParameterValue(newParam, siteDataValues) # noqa: N806
else:
- siteDataFound = False
- if (str(newParam.getName())=='Vs30' and bool(cur_site.get('Vs30', None))):
- newParam.setValue(Double(cur_site['Vs30']))
- siteDataResults.append({'Type': 'Vs30',
- 'Value': float(newParam.getValue()),
- 'Source': 'User Defined'})
- elif (str(newParam.getName())=='Vs30 Type' and bool(cur_site.get('Vs30', None))):
- newParam.setValue("Measured")
- siteDataResults.append({'Type': 'Vs30 Type',
- 'Value': 'Measured',
- 'Source': 'User Defined'})
+ siteDataFound = False # noqa: N806
+ if str(newParam.getName()) == 'Vs30' and bool(
+ cur_site.get('Vs30', None)
+ ):
+ newParam.setValue(Double(cur_site['Vs30'])) # noqa: F405
+ siteDataResults.append(
+ {
+ 'Type': 'Vs30',
+ 'Value': float(newParam.getValue()),
+ 'Source': 'User Defined',
+ }
+ )
+ elif str(newParam.getName()) == 'Vs30 Type' and bool(
+ cur_site.get('Vs30', None)
+ ):
+ newParam.setValue('Measured')
+ siteDataResults.append(
+ {
+ 'Type': 'Vs30 Type',
+ 'Value': 'Measured',
+ 'Source': 'User Defined',
+ }
+ )
elif siteDataFound:
- provider = "Unknown"
+ provider = 'Unknown'
provider = get_DataSource(newParam.getName(), siteDataValues)
if 'String' in str(type(newParam.getValue())):
tmp_value = str(newParam.getValue())
elif 'Double' in str(type(newParam.getValue())):
tmp_value = float(newParam.getValue())
- if str(newParam.getName())=='Vs30':
- cur_site.update({'Vs30': tmp_value})
+ if str(newParam.getName()) == 'Vs30':
+ cur_site.update({'Vs30': tmp_value})
else:
tmp_value = str(newParam.getValue())
- siteDataResults.append({'Type': str(newParam.getName()),
- 'Value': tmp_value,
- 'Source': str(provider)})
+ siteDataResults.append(
+ {
+ 'Type': str(newParam.getName()),
+ 'Value': tmp_value,
+ 'Source': str(provider),
+ }
+ )
else:
newParam.setValue(siteParam.getDefaultValue())
- siteDataResults.append({'Type': str(siteParam.getName()),
- 'Value': siteParam.getDefaultValue(),
- 'Source': 'Default'})
+ siteDataResults.append(
+ {
+ 'Type': str(siteParam.getName()),
+ 'Value': siteParam.getDefaultValue(),
+ 'Source': 'Default',
+ }
+ )
site.addParameter(newParam)
# End for j
# Updating site specifications
siteSpec[i] = cur_site
- site_tmp.update({'Location': locResults,
- 'SiteData': siteDataResults})
+ site_tmp.update({'Location': locResults, 'SiteData': siteDataResults})
site_prop.append(site_tmp)
# Return
return siteSpec, sites, site_prop
-def get_IM(gmpe_info, erf, sites, siteSpec, site_prop, source_info, station_info, im_info):
-
+def get_IM( # noqa: C901, N802, D103
+ gmpe_info,
+ erf,
+ sites,
+ siteSpec, # noqa: N803
+ site_prop,
+ source_info,
+ station_info,
+ im_info,
+):
# GMPE name
gmpe_name = gmpe_info['Type']
# Creating intensity measure relationship instance
try:
imr = CreateIMRInstance(gmpe_name)
- except:
- print('Please check GMPE name.')
+ except: # noqa: E722
+ print('Please check GMPE name.') # noqa: T201
return 1, station_info
# Getting supported intensity measure types
ims = imr.getSupportedIntensityMeasures()
- saParam = ims.getParameter(SA_Param.NAME)
- supportedPeriods = saParam.getPeriodParam().getPeriods()
- Arrays.sort(supportedPeriods)
+ saParam = ims.getParameter(SA_Param.NAME) # noqa: N806, F405
+ supportedPeriods = saParam.getPeriodParam().getPeriods() # noqa: N806
+ Arrays.sort(supportedPeriods) # noqa: F405
# Rupture
- eqRup = EqkRupture()
+ eqRup = EqkRupture() # noqa: N806, F405
if source_info['Type'] == 'PointSource':
eqRup.setMag(source_info['Magnitude'])
- eqRupLocation = Location(source_info['Location']['Latitude'],
- source_info['Location']['Longitude'],
- source_info['Location']['Depth'])
+ eqRupLocation = Location( # noqa: N806, F405
+ source_info['Location']['Latitude'],
+ source_info['Location']['Longitude'],
+ source_info['Location']['Depth'],
+ )
eqRup.setPointSurface(eqRupLocation, source_info['AverageDip'])
eqRup.setAveRake(source_info['AverageRake'])
magnitude = source_info['Magnitude']
- meanAnnualRate = None
+ meanAnnualRate = None # noqa: N806
elif source_info['Type'] == 'ERF':
- timeSpan = TimeSpan(TimeSpan.NONE, TimeSpan.YEARS)
- erfParams = source_info.get('Parameters', None)
+ timeSpan = TimeSpan(TimeSpan.NONE, TimeSpan.YEARS) # noqa: N806, F405
+ erfParams = source_info.get('Parameters', None) # noqa: N806
# Additional parameters (if any)
if erfParams is not None:
for k in erfParams.keys:
erf.setParameter(k, erfParams[k])
# Time span
- timeSpan = erf.getTimeSpan()
+ timeSpan = erf.getTimeSpan() # noqa: N806
# Source
- eqSource = erf.getSource(source_info['SourceIndex'])
+ eqSource = erf.getSource(source_info['SourceIndex']) # noqa: N806
eqSource.getName()
# Rupture
- eqRup = eqSource.getRupture(source_info['RuptureIndex'])
+ eqRup = eqSource.getRupture(source_info['RuptureIndex']) # noqa: N806
# Properties
magnitude = eqRup.getMag()
- averageDip = eqRup.getRuptureSurface().getAveDip()
- averageRake = eqRup.getAveRake()
+ averageDip = eqRup.getRuptureSurface().getAveDip() # noqa: N806, F841
+ averageRake = eqRup.getAveRake() # noqa: N806, F841
# Probability
- probEqRup = eqRup
- probability = probEqRup.getProbability()
+ probEqRup = eqRup # noqa: N806
+ probability = probEqRup.getProbability() # noqa: F841
# MAF
- meanAnnualRate = probEqRup.getMeanAnnualRate(timeSpan.getDuration())
+ meanAnnualRate = probEqRup.getMeanAnnualRate(timeSpan.getDuration()) # noqa: N806
# Rupture surface
- surface = eqRup.getRuptureSurface()
+ surface = eqRup.getRuptureSurface() # noqa: F841
# Setting up imr
imr.setEqkRupture(eqRup)
- imrParams = gmpe_info['Parameters']
+ imrParams = gmpe_info['Parameters'] # noqa: N806
if bool(imrParams):
- for k in imrParams.keys():
+ for k in imrParams.keys(): # noqa: SIM118
imr.getParameter(k).setValue(imrParams[k])
# Station
if station_info['Type'] == 'SiteList':
- siteSpec = station_info['SiteList']
+ siteSpec = station_info['SiteList'] # noqa: N806
# Intensity measure
periods = im_info.get('Periods', None)
if periods is not None:
periods = supportedPeriods
- tag_SA = False
- tag_PGA = False
- tag_PGV = False
- tag_Ds575 = False
- tag_Ds595 = False
+ tag_SA = False # noqa: N806
+ tag_PGA = False # noqa: N806
+ tag_PGV = False # noqa: N806
+ tag_Ds575 = False # noqa: N806, F841
+ tag_Ds595 = False # noqa: N806, F841
if 'SA' in im_info['Type']:
- tag_SA = True
+ tag_SA = True # noqa: N806
if 'PGA' in im_info['Type']:
- tag_PGA = True
+ tag_PGA = True # noqa: N806
if 'PGV' in im_info['Type']:
- tag_PGV = True
+ tag_PGV = True # noqa: N806
# Looping over sites
gm_collector = []
for i in range(len(siteSpec)):
- gmResults = site_prop[i]
+ gmResults = site_prop[i] # noqa: N806
# Current site
site = sites.get(i)
# Location
- cur_site = siteSpec[i]
+ cur_site = siteSpec[i] # noqa: F841
# Set up the site in the imr
imr.setSite(site)
try:
- stdDevParam = imr.getParameter(StdDevTypeParam.NAME)
- hasIEStats = stdDevParam.isAllowed(StdDevTypeParam.STD_DEV_TYPE_INTER) and \
- stdDevParam.isAllowed(StdDevTypeParam.STD_DEV_TYPE_INTRA)
- except:
- stdDevParaam = None
- hasIEStats = False
- cur_T = im_info.get('Periods', None)
+ stdDevParam = imr.getParameter(StdDevTypeParam.NAME) # noqa: N806, F405
+ hasIEStats = stdDevParam.isAllowed( # noqa: N806
+ StdDevTypeParam.STD_DEV_TYPE_INTER # noqa: F405
+ ) and stdDevParam.isAllowed(StdDevTypeParam.STD_DEV_TYPE_INTRA) # noqa: F405
+ except: # noqa: E722
+ stdDevParaam = None # noqa: N806, F841
+ hasIEStats = False # noqa: N806
+ cur_T = im_info.get('Periods', None) # noqa: N806
if tag_SA:
- saResult = {'Mean': [],
- 'TotalStdDev': []}
+ saResult = {'Mean': [], 'TotalStdDev': []} # noqa: N806
if hasIEStats:
saResult.update({'InterEvStdDev': []})
saResult.update({'IntraEvStdDev': []})
- imr.setIntensityMeasure("SA")
- imtParam = imr.getIntensityMeasure()
- for Tj in cur_T:
- imtParam.getIndependentParameter(PeriodParam.NAME).setValue(float(Tj))
+ imr.setIntensityMeasure('SA')
+ imtParam = imr.getIntensityMeasure() # noqa: N806
+ for Tj in cur_T: # noqa: N806
+ imtParam.getIndependentParameter(PeriodParam.NAME).setValue( # noqa: F405
+ float(Tj)
+ )
mean = imr.getMean()
saResult['Mean'].append(float(mean))
if stdDevParam is not None:
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_TOTAL)
- stdDev = imr.getStdDev()
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_TOTAL) # noqa: F405
+ stdDev = imr.getStdDev() # noqa: N806
saResult['TotalStdDev'].append(float(stdDev))
if hasIEStats:
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER)
- interEvStdDev = imr.getStdDev()
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA)
- intraEvStdDev = imr.getStdDev()
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER) # noqa: F405
+ interEvStdDev = imr.getStdDev() # noqa: N806
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA) # noqa: F405
+ intraEvStdDev = imr.getStdDev() # noqa: N806
saResult['InterEvStdDev'].append(float(interEvStdDev))
saResult['IntraEvStdDev'].append(float(intraEvStdDev))
gmResults.update({'lnSA': saResult})
if tag_PGA:
# for PGV current T = 0
- cur_T = [0.00]
- pgaResult = {'Mean': [],
- 'TotalStdDev': []}
+ cur_T = [0.00] # noqa: N806
+ pgaResult = {'Mean': [], 'TotalStdDev': []} # noqa: N806
if hasIEStats:
pgaResult.update({'InterEvStdDev': []})
pgaResult.update({'IntraEvStdDev': []})
- imr.setIntensityMeasure("PGA")
+ imr.setIntensityMeasure('PGA')
mean = imr.getMean()
pgaResult['Mean'].append(float(mean))
- stdDev = imr.getStdDev()
+ stdDev = imr.getStdDev() # noqa: N806
pgaResult['TotalStdDev'].append(float(stdDev))
if hasIEStats:
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER)
- interEvStdDev = imr.getStdDev()
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA)
- intraEvStdDev = imr.getStdDev()
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER) # noqa: F405
+ interEvStdDev = imr.getStdDev() # noqa: N806
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA) # noqa: F405
+ intraEvStdDev = imr.getStdDev() # noqa: N806
pgaResult['InterEvStdDev'].append(float(interEvStdDev))
pgaResult['IntraEvStdDev'].append(float(intraEvStdDev))
gmResults.update({'lnPGA': pgaResult})
if tag_PGV:
# for PGV current T = 0
- cur_T = [0.00]
- pgvResult = {'Mean': [],
- 'TotalStdDev': []}
+ cur_T = [0.00] # noqa: N806
+ pgvResult = {'Mean': [], 'TotalStdDev': []} # noqa: N806
if hasIEStats:
pgvResult.update({'InterEvStdDev': []})
pgvResult.update({'IntraEvStdDev': []})
- imr.setIntensityMeasure("PGV")
+ imr.setIntensityMeasure('PGV')
mean = imr.getMean()
pgvResult['Mean'].append(float(mean))
- stdDev = imr.getStdDev()
+ stdDev = imr.getStdDev() # noqa: N806
pgvResult['TotalStdDev'].append(float(stdDev))
if hasIEStats:
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER)
- interEvStdDev = imr.getStdDev()
- stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA)
- intraEvStdDev = imr.getStdDev()
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTER) # noqa: F405
+ interEvStdDev = imr.getStdDev() # noqa: N806
+ stdDevParam.setValue(StdDevTypeParam.STD_DEV_TYPE_INTRA) # noqa: F405
+ intraEvStdDev = imr.getStdDev() # noqa: N806
pgvResult['InterEvStdDev'].append(float(interEvStdDev))
pgvResult['IntraEvStdDev'].append(float(intraEvStdDev))
gmResults.update({'lnPGV': pgvResult})
@@ -808,40 +986,44 @@ def get_IM(gmpe_info, erf, sites, siteSpec, site_prop, source_info, station_info
if station_info['Type'] == 'SiteList':
station_info.update({'SiteList': siteSpec})
# Final results
- res = {'Magnitude': magnitude,
- 'MeanAnnualRate': meanAnnualRate,
- 'SiteSourceDistance': source_info.get('SiteSourceDistance',None),
- 'SiteRuptureDistance': source_info.get('SiteRuptureDistance',None),
- 'Periods': cur_T,
- 'GroundMotions': gm_collector}
+ res = {
+ 'Magnitude': magnitude,
+ 'MeanAnnualRate': meanAnnualRate,
+ 'SiteSourceDistance': source_info.get('SiteSourceDistance', None),
+ 'SiteRuptureDistance': source_info.get('SiteRuptureDistance', None),
+ 'Periods': cur_T,
+ 'GroundMotions': gm_collector,
+ }
# return
return res, station_info
-def get_site_vs30_from_opensha(lat, lon, vs30model='CGS/Wills VS30 Map (2015)'):
+def get_site_vs30_from_opensha(lat, lon, vs30model='CGS/Wills VS30 Map (2015)'): # noqa: D103
# set up site java object
- sites = ArrayList()
+ sites = ArrayList() # noqa: F405
num_sites = len(lat)
for i in range(num_sites):
- sites.add(Site(Location(lat[i], lon[i])))
-
+ sites.add(Site(Location(lat[i], lon[i]))) # noqa: F405
+
# prepare site data java object
- siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults()
- siteData = siteDataProviders.getAllAvailableData(sites)
+ siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults() # noqa: N806, F405
+ siteData = siteDataProviders.getAllAvailableData(sites) # noqa: N806
# search name
vs30 = []
for i in range(int(siteData.size())):
- cur_siteData = siteData.get(i)
+ cur_siteData = siteData.get(i) # noqa: N806
if str(cur_siteData.getSourceName()) == vs30model:
- vs30 = [float(cur_siteData.getValue(x).getValue()) for x in range(num_sites)]
+ vs30 = [
+ float(cur_siteData.getValue(x).getValue()) for x in range(num_sites)
+ ]
break
- else:
+ else: # noqa: RET508
continue
# check if any nan (Wills Map return nan for offshore sites)
# Using global vs30 as default patch - 'Global Vs30 from Topographic Slope (Wald & Allen 2008)'
- if any([np.isnan(x) for x in vs30]):
+ if any([np.isnan(x) for x in vs30]): # noqa: C419
non_list = np.where(np.isnan(vs30))[0].tolist()
for i in non_list:
vs30[i] = float(siteData.get(3).getValue(i).getValue())
@@ -849,31 +1031,30 @@ def get_site_vs30_from_opensha(lat, lon, vs30model='CGS/Wills VS30 Map (2015)'):
# return
return vs30
-def get_site_z1pt0_from_opensha(lat, lon):
- sites = ArrayList()
- sites.add(Site(Location(lat, lon)))
+
+def get_site_z1pt0_from_opensha(lat, lon): # noqa: D103
+ sites = ArrayList() # noqa: F405
+ sites.add(Site(Location(lat, lon))) # noqa: F405
# prepare site data java object
- siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults()
- siteData = siteDataProviders.getAllAvailableData(sites)
+ siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults() # noqa: N806, F405
+ siteData = siteDataProviders.getAllAvailableData(sites) # noqa: N806
for data in siteData:
- if data.getValue(0).getDataType()=='Depth to Vs = 1.0 km/sec':
+ if data.getValue(0).getDataType() == 'Depth to Vs = 1.0 km/sec':
z1pt0 = float(data.getValue(0).getValue())
if not np.isnan(z1pt0):
break
- return z1pt0*1000.0
+ return z1pt0 * 1000.0
-def get_site_z2pt5_from_opensha(lat, lon):
- sites = ArrayList()
- sites.add(Site(Location(lat, lon)))
+
+def get_site_z2pt5_from_opensha(lat, lon): # noqa: D103
+ sites = ArrayList() # noqa: F405
+ sites.add(Site(Location(lat, lon))) # noqa: F405
# prepare site data java object
- siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults()
- siteData = siteDataProviders.getAllAvailableData(sites)
+ siteDataProviders = OrderedSiteDataProviderList.createSiteDataProviderDefaults() # noqa: N806, F405
+ siteData = siteDataProviders.getAllAvailableData(sites) # noqa: N806
for data in siteData:
- if data.getValue(0).getDataType()=='Depth to Vs = 2.5 km/sec':
+ if data.getValue(0).getDataType() == 'Depth to Vs = 2.5 km/sec':
z2pt5 = float(data.getValue(0).getValue())
if not np.isnan(z2pt5):
break
- return z2pt5*1000.0
-
-
-
+ return z2pt5 * 1000.0
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/GMSimulators.py b/modules/performRegionalEventSimulation/regionalGroundMotion/GMSimulators.py
index 4ec66312f..24147eeae 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/GMSimulators.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/GMSimulators.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -34,425 +33,544 @@
# You should have received a copy of the BSD 3-Clause License along with
# this file. If not, see .
#
-# The computation method of ground motion intensity map using Markhivida et al. and
+# The computation method of ground motion intensity map using Markhivida et al. and
# the Baker-Jayaram correlation models is contributed by Dr. Anne Husley's
-# seaturtles package (https://github.com/annehulsey/seaturtles).
+# seaturtles package (https://github.com/annehulsey/seaturtles).
#
# Contributors:
# Anne Husley
# Kuanshi Zhong
# Jinyan Zhao
-import warnings, h5py, time, ujson, copy, sys
+import sys
+import time
+import warnings
+
+import h5py
import numpy as np
-from tqdm import tqdm
+import ujson
from gmpe import CorrelationModel
+from tqdm import tqdm
+
+IM_CORR_INTER = {
+ 'Baker & Jayaram (2008)': ['SA', 'PGA'],
+ 'Baker & Bradley (2017)': ['SA', 'PGA', 'PGV', 'DS575H', 'DS595H'],
+}
+
+IM_CORR_INTRA = {
+ 'Jayaram & Baker (2009)': ['SA', 'PGA'],
+ 'Loth & Baker (2013)': ['SA', 'PGA'],
+ 'Markhvida et al. (2017)': ['SA', 'PGA'],
+ 'Du & Ning (2021)': ['SA', 'PGA', 'PGV', 'Ia', 'CAV', 'DS575H', 'DS595H'],
+}
+
+IM_CORR = {'INTER': IM_CORR_INTER, 'INTRA': IM_CORR_INTRA}
+
+
+def simulate_ground_motion( # noqa: D103
+ stations,
+ im_raw_path,
+ im_list,
+ scenarios,
+ num_simu,
+ correlation_info,
+ im_info,
+ eq_ids,
+):
+ # create a ground motion simulator
+ ln_im_mr = []
+ mag_maf = []
+ t_start = time.time()
+ im_sampled = dict() # noqa: C408
+ if im_raw_path.endswith('.json'):
+ with open(im_raw_path) as f: # noqa: PLW1514, PTH123
+ im_raw = ujson.load(f)
+ for i in eq_ids:
+ im_sampled.update({i: im_raw[str(i)]})
+ gm_simulator = GM_Simulator(
+ site_info=stations,
+ im_list=im_list,
+ num_simu=num_simu,
+ correlation_info=correlation_info,
+ im_info=im_info,
+ )
+ elif im_raw_path.endswith('.hdf5'):
+ with h5py.File(im_raw_path, 'r') as f:
+ for i in eq_ids:
+ sample = dict() # noqa: C408
+ sample.update({'Mean': f[str(i)]['Mean'][()]})
+ sample.update({'InterEvStdDev': f[str(i)]['InterEvStdDev'][()]})
+ sample.update({'IntraEvStdDev': f[str(i)]['IntraEvStdDev'][()]})
+ im_sampled.update({i: sample})
+ gm_simulator = GM_Simulator_hdf5(
+ site_info=stations,
+ im_list=im_list,
+ num_simu=num_simu,
+ correlation_info=correlation_info,
+ im_info=im_info,
+ )
+ else:
+ SystemError(f'Unrecognized IM mean and stddev file format in {im_raw_path}') # noqa: PLW0133
+ im_raw = im_sampled
+ for scen_i in tqdm(
+ range(len(eq_ids)),
+ desc=f'ComputeIntensityMeasure for {len(eq_ids)} scenarios',
+ ):
+ # for i, cur_im_raw in enumerate(im_raw):
+ # print('ComputeIntensityMeasure: Scenario #{}/{}'.format(i+1,len(im_raw)))
+ cur_im_raw = im_raw[eq_ids[scen_i]]
+ # set im_raw
+ gm_simulator.set_im_raw(cur_im_raw, im_list)
+ # Computing inter event residuals
+ # t_start = time.time()
+ epsilon = gm_simulator.compute_inter_event_residual()
+ # print('ComputeIntensityMeasure: inter-event correlation {0} sec'.format(time.time() - t_start))
+ # Computing intra event residuals
+ # t_start = time.time()
+ eta = gm_simulator.compute_intra_event_residual()
+ # print('ComputeIntensityMeasure: intra-event correlation {0} sec'.format(time.time() - t_start))
+ ln_im_all = np.zeros((gm_simulator.num_sites, gm_simulator.num_im, num_simu))
+ for i in range(num_simu):
+ epsilon_m = np.array(
+ [epsilon[:, i] for j in range(gm_simulator.num_sites)]
+ )
+ ln_im_all[:, :, i] = (
+ gm_simulator.get_ln_im()
+ + gm_simulator.get_inter_sigma_im() * epsilon_m
+ + gm_simulator.get_intra_sigma_im() * eta[:, :, i]
+ )
+
+ ln_im_mr.append(ln_im_all)
+ scenario = scenarios[eq_ids[scen_i]]
+ mag_maf.append(
+ [
+ scenario['Magnitude'],
+ scenario.get('MeanAnnualRate', None),
+ scenario.get('SiteSourceDistance', None),
+ scenario.get('SiteRuptureDistance', None),
+ ]
+ )
+
+ print( # noqa: T201
+ f'ComputeIntensityMeasure: all inter- and intra-event correlation {time.time() - t_start} sec'
+ )
+ # return
+ return ln_im_mr, mag_maf
+
+
+class GM_Simulator: # noqa: D101
+ def __init__(
+ self,
+ site_info=[], # noqa: B006
+ im_list=[], # noqa: B006
+ im_raw=dict(), # noqa: B006, C408
+ num_simu=0,
+ correlation_info=None,
+ im_info=None,
+ ):
+ self.set_sites(site_info)
+ self.set_num_simu(num_simu)
+ self.parse_correlation_info(correlation_info, im_info)
+ self.set_im_raw(im_raw, im_list)
+ self.cross_check_im_correlation()
+
+ def set_sites(self, site_info): # noqa: D102
+ # set sites
+ self.sites = site_info.copy()
+ self.num_sites = len(self.sites)
+ if self.num_sites < 2: # noqa: PLR2004
+ self.stn_dist = None
+ print( # noqa: T201
+ 'GM_Simulator: Only one site is defined, spatial correlation models ignored.'
+ )
+ return
+ self._compute_distance_matrix()
+
+ def _compute_distance_matrix(self):
+ # site number check
+ if self.num_sites < 2: # noqa: PLR2004
+ print('GM_Simulator: error - please give at least two sites.') # noqa: T201
+ self.stn_dist = None
+ return
+ # compute the distance matrix
+ tmp = np.zeros((self.num_sites, self.num_sites))
+ for i in range(self.num_sites):
+ loc_i = np.array([self.sites[i]['lat'], self.sites[i]['lon']])
+ for j in range(self.num_sites):
+ loc_j = np.array([self.sites[j]['lat'], self.sites[j]['lon']])
+ # Computing station-wise distances
+ tmp[i, j] = CorrelationModel.get_distance_from_lat_lon(loc_i, loc_j)
+ self.stn_dist = tmp
+
+ def set_num_simu(self, num_simu): # noqa: D102
+ # set simulation number
+ self.num_simu = num_simu
+
+ def set_im_raw(self, im_raw, im_list): # noqa: D102
+ # get IM type list
+ self.im_type_list = im_raw.get('IM', [])
+ # get im_data
+ self.im_data = im_raw.get('GroundMotions', [])
+ # get period
+ self.periods = [x for x in im_raw.get('Periods', []) if x is not None]
+ # im name list
+ self.im_name_list = im_list
+ # set IM size
+ self.num_im = len(self.im_name_list)
+
+ def get_ln_im(self): # noqa: D102
+ ln_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = []
+ for cur_im_type in self.im_type_list:
+ tmp_im_data = ( # noqa: PLR6104
+ tmp_im_data + self.im_data[i][f'ln{cur_im_type}']['Mean']
+ )
+ ln_im.append(tmp_im_data)
+ return ln_im
+
+ def get_inter_sigma_im(self): # noqa: D102
+ inter_sigma_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = []
+ for cur_im_type in self.im_type_list:
+ tmp_im_data = ( # noqa: PLR6104
+ tmp_im_data
+ + self.im_data[i][f'ln{cur_im_type}']['InterEvStdDev']
+ )
+ inter_sigma_im.append(tmp_im_data)
+ return inter_sigma_im
+
+ def get_intra_sigma_im(self): # noqa: D102
+ intra_sigma_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = []
+ for cur_im_type in self.im_type_list:
+ tmp_im_data = ( # noqa: PLR6104
+ tmp_im_data
+ + self.im_data[i][f'ln{cur_im_type}']['IntraEvStdDev']
+ )
+ intra_sigma_im.append(tmp_im_data)
+ return intra_sigma_im
+
+ def parse_correlation_info(self, correlation_info, im_info): # noqa: C901, D102
+ # default is no correlation model and uncorrelated motions if generated
+ self.inter_cm = None
+ self.intra_cm = None
+ # parse correlation information if any
+ if correlation_info is None:
+ print( # noqa: T201
+ 'GM_Simulator: warning - correlation information not found - results will be uncorrelated motions.'
+ )
+ return
+ if correlation_info.get('Type', None) == 'Vector':
+ inter_cm = dict() # noqa: C408
+ im_info.pop('Type')
+ for im, item in im_info.items():
+ # for im in self.im_type_list:
+ inter_cm.update({im: item['InterEventCorr']})
+ inter_cm_unique = list(set([item for _, item in inter_cm.items()])) # noqa: C403
+ if len(inter_cm_unique) == 1:
+ inter_cm = inter_cm_unique[0]
+ self.inter_cm = inter_cm
+ intra_cm = dict() # noqa: C408
+ for im, item in im_info.items():
+ # for im in self.im_type_list:
+ intra_cm.update({im: item['IntraEventCorr']})
+ intra_cm_unique = list(set([item for _, item in intra_cm.items()])) # noqa: C403
+ if len(intra_cm_unique) == 1:
+ intra_cm = intra_cm_unique[0]
+ self.intra_cm = intra_cm
+ return
+
+ # inter-event model
+ if correlation_info.get('InterEvent', None):
+ self.inter_cm = correlation_info['InterEvent']
+ elif correlation_info.get('SaInterEvent', None):
+ # back compatibility
+ self.inter_cm = correlation_info['SaInterEvent']
+ else:
+ print( # noqa: T201
+ 'GM_Simulator: no inter-event correlation information not found - results will be uncorrelated motions.'
+ )
+ # intra-event model
+ if correlation_info.get('IntraEvent', None):
+ self.intra_cm = correlation_info['IntraEvent']
+ if correlation_info.get('SaIntraEvent', None):
+ # back compatibility
+ self.intra_cm = correlation_info['SaIntraEvent']
+ else:
+ print( # noqa: T201
+ 'GM_Simulator: no intra-event correlation information not found - results will be uncorrelated motions.'
+ )
+
+ def cross_check_im_correlation(self): # noqa: C901, D102
+ # because each correlation model only applies to certain intensity measure
+ # so hear we check if the correlation models are applicable for the required intensity measures
+ self.im_cm_inter_flag = True
+ self.im_cm_intra_flag = True
+ if type(self.inter_cm) == dict: # noqa: E721
+ for cur_im in self.im_type_list:
+ avail_im_inter_cm = IM_CORR_INTER.get(self.inter_cm[cur_im])
+ if cur_im not in avail_im_inter_cm:
+ print( # noqa: T201
+ f'GM_Simulator.cross_check_im_correlation: warning - {cur_im} is not available in {self.inter_cm}'
+ )
+ self.im_cm_inter_flag = False
+ continue
+ else:
+ avail_im_inter_cm = IM_CORR_INTER.get(self.inter_cm)
+ if avail_im_inter_cm is not None:
+ for cur_im in self.im_type_list:
+ if cur_im not in avail_im_inter_cm:
+ print( # noqa: T201
+ f'GM_Simulator.cross_check_im_correlation: warning - {cur_im} is not available in {self.inter_cm}'
+ )
+ self.im_cm_inter_flag = False
+ continue
+ if type(self.intra_cm) == dict: # noqa: E721
+ for cur_im in self.im_type_list:
+ avail_im_intra_cm = IM_CORR_INTRA.get(self.intra_cm[cur_im])
+ if cur_im not in avail_im_intra_cm:
+ print( # noqa: T201
+ f'GM_Simulator.cross_check_im_correlation: warning - {cur_im} is not available in {self.intra_cm}'
+ )
+ self.im_cm_intra_flag = False
+ continue
+ else:
+ avail_im_intra_cm = IM_CORR_INTRA.get(self.intra_cm)
+ if avail_im_intra_cm is not None:
+ for cur_im in self.im_type_list:
+ if cur_im not in avail_im_intra_cm:
+ print( # noqa: T201
+ f'GM_Simulator.cross_check_im_correlation: warning - {cur_im} is not available in {self.intra_cm}'
+ )
+ self.im_cm_intra_flag = False
+ continue
+
+ def compute_inter_event_residual_ij(self, cm, im_name_list_1, im_name_list_2): # noqa: D102, PLR6301
+ if cm == 'Baker & Jayaram (2008)':
+ rho = np.array(
+ [
+ CorrelationModel.baker_jayaram_correlation_2008(im1, im2)
+ for im1 in im_name_list_1
+ for im2 in im_name_list_2
+ ]
+ ).reshape([len(im_name_list_1), len(im_name_list_2)])
+ elif cm == 'Baker & Bradley (2017)':
+ rho = np.array(
+ [
+ CorrelationModel.baker_bradley_correlation_2017(im1, im2)
+ for im1 in im_name_list_1
+ for im2 in im_name_list_2
+ ]
+ ).reshape([len(im_name_list_1), len(im_name_list_2)])
+ else:
+ # TODO: extending this to more inter-event correlation models # noqa: TD002
+ sys.exit(
+ 'GM_Simulator.compute_inter_event_residual: currently supporting Baker & Jayaram (2008), Baker & Bradley (2017)'
+ )
+ return rho
+
+ def replace_submatrix(self, mat, ind1, ind2, mat_replace): # noqa: D102, PLR6301
+ for i, index in enumerate(ind1):
+ mat[index, ind2] = mat_replace[i, :]
+ return mat
+
+ def compute_inter_event_residual(self): # noqa: D102
+ if type(self.inter_cm) == dict: # noqa: E721
+ rho = np.zeros([self.num_im, self.num_im])
+ im_types = list(self.inter_cm.keys())
+ for i in range(len(im_types)):
+ for j in range(i + 1):
+ im_type_i = im_types[i]
+ im_type_j = im_types[j]
+ im_name_list_i = [
+ im_name
+ for im_name in self.im_name_list
+ if im_name.startswith(im_type_i)
+ ]
+ im_indices_i = [
+ index
+ for index, element in enumerate(self.im_name_list)
+ if element.startswith(im_type_i)
+ ]
+ im_name_list_j = [
+ im_name
+ for im_name in self.im_name_list
+ if im_name.startswith(im_type_j)
+ ]
+ im_indices_j = [
+ index
+ for index, element in enumerate(self.im_name_list)
+ if element.startswith(im_type_j)
+ ]
+ # In R2D, use SA(0.01) to approximate PGA
+ im_name_list_i = [
+ 'SA(0.01)' if x == 'PGA' else x for x in im_name_list_i
+ ]
+ im_name_list_j = [
+ 'SA(0.01)' if x == 'PGA' else x for x in im_name_list_j
+ ]
+ rho_ij = self.compute_inter_event_residual_ij(
+ self.inter_cm[im_types[i]], im_name_list_i, im_name_list_j
+ )
+ rho = self.replace_submatrix(
+ rho, im_indices_i, im_indices_j, rho_ij
+ )
+ if i != j:
+ rho = self.replace_submatrix(
+ rho, im_indices_j, im_indices_i, rho_ij.T
+ )
+ else:
+ rho = self.compute_inter_event_residual_ij(
+ self.inter_cm, self.im_name_list, self.im_name_list
+ )
+ # Simulating residuals
+ with warnings.catch_warnings():
+ # The intra-event models produce rho with tiny negative eigen values
+ # This warning is suppressed
+ warnings.filterwarnings(
+ 'ignore',
+ message='covariance is not symmetric positive-semidefinite.',
+ )
+ residuals = np.random.multivariate_normal(
+ np.zeros(self.num_im), rho, self.num_simu
+ ).T
+ # return
+ return residuals # noqa: RET504
+
+ def compute_intra_event_residual_i(self, cm, im_name_list, num_simu): # noqa: D102
+ if cm == 'Jayaram & Baker (2009)':
+ rho = np.zeros((self.num_sites, self.num_sites, len(im_name_list)))
+ for i in range(self.num_sites):
+ for j in range(self.num_sites):
+ cur_stn_dist = self.stn_dist[i, j]
+ for k in range(len(im_name_list)):
+ rho[i, j, k] = (
+ CorrelationModel.jayaram_baker_correlation_2009(
+ im_name_list[k], cur_stn_dist, flag_clustering=False
+ )
+ )
+ # Simulating residuals
+ residuals = np.zeros((self.num_sites, len(im_name_list), num_simu))
+ for k in range(self.num_im):
+ residuals[:, k, :] = np.random.multivariate_normal(
+ np.zeros(self.num_sites), rho[:, :, k], num_simu
+ ).T
+ elif cm == 'Loth & Baker (2013)':
+ residuals = CorrelationModel.loth_baker_correlation_2013(
+ self.sites, im_name_list, num_simu
+ )
+ elif cm == 'Markhvida et al. (2017)':
+ num_pc = 19
+ residuals = CorrelationModel.markhvida_ceferino_baker_correlation_2017(
+ self.sites, im_name_list, num_simu, num_pc
+ )
+ elif cm == 'Du & Ning (2021)':
+ num_pc = 23
+ residuals = CorrelationModel.du_ning_correlation_2021(
+ self.sites, im_name_list, num_simu, num_pc
+ )
+ else:
+ # TODO: extending this to more inter-event correlation models # noqa: TD002
+ sys.exit(
+ 'GM_Simulator.compute_intra_event_residual: currently supporting Jayaram & Baker (2009), Loth & Baker (2013),Markhvida et al. (2017), Du & Ning (2021)'
+ )
+ return residuals
+
+ def compute_intra_event_residual(self): # noqa: D102
+ if type(self.intra_cm) == dict: # noqa: E721
+ cm_groups = dict() # noqa: C408
+ # Group the IMs using the same cm
+ for key, item in self.intra_cm.items():
+ if item not in cm_groups:
+ cm_groups.update({item: [key]})
+ else:
+ cm_groups[item].append(key)
+ residuals = np.zeros((self.num_sites, self.num_im, self.num_simu))
+ for cm, im_types in cm_groups.items():
+ # im_type_list = [im_name.split('(')[0] for im_name in self.im_name_list]
+ im_name_list = [
+ im_name
+ for im_name in self.im_name_list
+ if im_name.split('(')[0] in im_types
+ ]
+ im_indices = [
+ index
+ for index, element in enumerate(self.im_name_list)
+ if element.split('(')[0] in im_types
+ ]
+ residuals_i = self.compute_intra_event_residual_i(
+ cm, im_name_list, self.num_simu
+ )
+ for i, ind in enumerate(im_indices):
+ residuals[:, ind, :] = residuals_i[:, i, :]
+ else:
+ residuals = self.compute_intra_event_residual_i(
+ self.intra_cm, self.im_name_list, self.num_simu
+ )
+ # return
+ return residuals
+
+
+class GM_Simulator_hdf5(GM_Simulator): # noqa: D101
+ def __init__(
+ self,
+ site_info=[], # noqa: B006
+ im_list=[], # noqa: B006
+ num_simu=0,
+ correlation_info=None,
+ im_info=None,
+ ):
+ self.set_im_type(im_list)
+ self.set_sites(site_info)
+ self.set_num_simu(num_simu)
+ self.parse_correlation_info(correlation_info, im_info)
+ self.cross_check_im_correlation()
+
+ def set_im_type(self, im_list): # noqa: D102
+ self.im_name_list = im_list
+ im_types = set()
+ for im in im_list:
+ if im.startswith('PGA'):
+ im_types.add('PGA')
+ elif im.startswith('SA'):
+ im_types.add('SA')
+ elif im.startswith('PGV'):
+ im_types.add('PGV')
+ else:
+ SyntaxError(f'Unrecognized im type: {im}') # noqa: PLW0133
+ # Add ims one by one because the order is important
+ self.im_type_list = []
+ if ('PGA') in im_types:
+ self.im_type_list.append('PGA')
+ if ('SA') in im_types:
+ self.im_type_list.append('SA')
+ if ('PGV') in im_types:
+ self.im_type_list.append('PGV')
+
+ def set_im_raw(self, im_raw, im_list): # noqa: D102
+ self.im_name_list = im_list
+ self.num_im = len(im_list)
+ self.im_data = im_raw
+
+ def get_ln_im(self): # noqa: D102
+ ln_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = self.im_data['Mean'][i, :].tolist()
+ ln_im.append(tmp_im_data)
+ return ln_im
+
+ def get_inter_sigma_im(self): # noqa: D102
+ inter_sigma_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = self.im_data['InterEvStdDev'][i, :].tolist()
+ inter_sigma_im.append(tmp_im_data)
+ return inter_sigma_im
-IM_CORR_INTER = {"Baker & Jayaram (2008)": ["SA", "PGA"],
- "Baker & Bradley (2017)": ["SA", "PGA", "PGV", "DS575H", "DS595H"]}
-
-IM_CORR_INTRA = {"Jayaram & Baker (2009)": ["SA", "PGA"],
- "Loth & Baker (2013)": ["SA", "PGA"],
- "Markhvida et al. (2017)": ["SA", "PGA"],
- "Du & Ning (2021)": ["SA", "PGA", "PGV", "Ia", "CAV", "DS575H", "DS595H"]}
-
-IM_CORR = {"INTER": IM_CORR_INTER,
- "INTRA": IM_CORR_INTRA}
-
-def simulate_ground_motion(stations, im_raw_path, im_list, scenarios,\
- num_simu, correlation_info, im_info, eq_ids):
-
- # create a ground motion simulator
- ln_im_mr = []
- mag_maf = []
- t_start = time.time()
- im_sampled = dict()
- if im_raw_path.endswith('.json'):
- with open(im_raw_path, 'r') as f:
- im_raw = ujson.load(f)
- for i in eq_ids:
- im_sampled.update({i: im_raw[str(i)]})
- gm_simulator = GM_Simulator(site_info=stations, im_list = im_list,
- num_simu=num_simu,
- correlation_info=correlation_info,im_info=im_info)
- elif im_raw_path.endswith('.hdf5'):
- with h5py.File(im_raw_path, 'r') as f:
- for i in eq_ids:
- sample = dict()
- sample.update({'Mean': f[str(i)]['Mean'][()]})
- sample.update({'InterEvStdDev': f[str(i)]['InterEvStdDev'][()]})
- sample.update({'IntraEvStdDev': f[str(i)]['IntraEvStdDev'][()]})
- im_sampled.update({i: sample})
- gm_simulator = GM_Simulator_hdf5(site_info=stations, im_list = im_list,
- num_simu=num_simu,
- correlation_info=correlation_info,im_info=im_info)
- else:
- SystemError(f'Unrecognized IM mean and stddev file format in {im_raw_path}')
- im_raw = im_sampled
- for scen_i in tqdm(range(len(eq_ids)), desc=f"ComputeIntensityMeasure for {len(eq_ids)} scenarios"):
- # for i, cur_im_raw in enumerate(im_raw):
- # print('ComputeIntensityMeasure: Scenario #{}/{}'.format(i+1,len(im_raw)))
- cur_im_raw = im_raw[eq_ids[scen_i]]
- # set im_raw
- gm_simulator.set_im_raw(cur_im_raw, im_list)
- # Computing inter event residuals
- #t_start = time.time()
- epsilon = gm_simulator.compute_inter_event_residual()
- #print('ComputeIntensityMeasure: inter-event correlation {0} sec'.format(time.time() - t_start))
- # Computing intra event residuals
- #t_start = time.time()
- eta = gm_simulator.compute_intra_event_residual()
- #print('ComputeIntensityMeasure: intra-event correlation {0} sec'.format(time.time() - t_start))
- ln_im_all = np.zeros((gm_simulator.num_sites, gm_simulator.num_im, num_simu))
- for i in range(num_simu):
- epsilon_m = np.array([epsilon[:, i] for j in range(gm_simulator.num_sites)])
- ln_im_all[:, :, i] = gm_simulator.get_ln_im() + \
- gm_simulator.get_inter_sigma_im() * epsilon_m + \
- gm_simulator.get_intra_sigma_im() * eta[:, :, i]
-
- ln_im_mr.append(ln_im_all)
- scenario = scenarios[eq_ids[scen_i]]
- mag_maf.append([scenario['Magnitude'], scenario.get('MeanAnnualRate',None),
- scenario.get('SiteSourceDistance',None),
- scenario.get('SiteRuptureDistance',None)])
-
- print('ComputeIntensityMeasure: all inter- and intra-event correlation {0} sec'.format(time.time() - t_start))
- # return
- return ln_im_mr, mag_maf
-
-class GM_Simulator:
-
- def __init__(self, site_info = [], im_list = [], im_raw=dict(), num_simu=0,\
- correlation_info=None,im_info = None):
-
- self.set_sites(site_info)
- self.set_num_simu(num_simu)
- self.parse_correlation_info(correlation_info, im_info)
- self.set_im_raw(im_raw, im_list)
- self.cross_check_im_correlation()
-
- def set_sites(self, site_info):
- # set sites
- self.sites = site_info.copy()
- self.num_sites = len(self.sites)
- if self.num_sites < 2:
- self.stn_dist = None
- print('GM_Simulator: Only one site is defined, spatial correlation models ignored.')
- return
- self._compute_distance_matrix()
-
- def _compute_distance_matrix(self):
-
- # site number check
- if self.num_sites < 2:
- print('GM_Simulator: error - please give at least two sites.')
- self.stn_dist = None
- return
- # compute the distance matrix
- tmp = np.zeros((self.num_sites, self.num_sites))
- for i in range(self.num_sites):
- loc_i = np.array([self.sites[i]['lat'],
- self.sites[i]['lon']])
- for j in range(self.num_sites):
- loc_j = np.array([self.sites[j]['lat'],
- self.sites[j]['lon']])
- # Computing station-wise distances
- tmp[i,j] = CorrelationModel.get_distance_from_lat_lon(loc_i, loc_j)
- self.stn_dist = tmp
-
- def set_num_simu(self, num_simu):
- # set simulation number
- self.num_simu = num_simu
-
- def set_im_raw(self, im_raw, im_list):
- # get IM type list
- self.im_type_list = im_raw.get('IM',[])
- # get im_data
- self.im_data = im_raw.get('GroundMotions',[])
- # get period
- self.periods = [x for x in im_raw.get('Periods',[]) if x is not None]
- # im name list
- self.im_name_list = im_list
- # set IM size
- self.num_im = len(self.im_name_list)
-
- def get_ln_im(self):
- ln_im = []
- for i in range(self.num_sites):
- tmp_im_data = []
- for cur_im_type in self.im_type_list:
- tmp_im_data = tmp_im_data+self.im_data[i]['ln{}'.format(cur_im_type)]['Mean']
- ln_im.append(tmp_im_data)
- return ln_im
-
- def get_inter_sigma_im(self):
- inter_sigma_im = []
- for i in range(self.num_sites):
- tmp_im_data = []
- for cur_im_type in self.im_type_list:
- tmp_im_data = tmp_im_data+self.im_data[i]['ln{}'.format(cur_im_type)]['InterEvStdDev']
- inter_sigma_im.append(tmp_im_data)
- return inter_sigma_im
-
- def get_intra_sigma_im(self):
- intra_sigma_im = []
- for i in range(self.num_sites):
- tmp_im_data = []
- for cur_im_type in self.im_type_list:
- tmp_im_data = tmp_im_data+self.im_data[i]['ln{}'.format(cur_im_type)]['IntraEvStdDev']
- intra_sigma_im.append(tmp_im_data)
- return intra_sigma_im
-
- def parse_correlation_info(self, correlation_info, im_info):
-
- # default is no correlation model and uncorrelated motions if generated
- self.inter_cm = None
- self.intra_cm = None
- # parse correlation infomation if any
- if correlation_info is None:
- print('GM_Simulator: warning - correlation information not found - results will be uncorrelated motions.')
- return
- if correlation_info.get('Type', None) == 'Vector':
- inter_cm = dict()
- im_info.pop('Type')
- for im, item in im_info.items():
- # for im in self.im_type_list:
- inter_cm.update({im:item['InterEventCorr']})
- inter_cm_unique = list(set([item for _, item in inter_cm.items()]))
- if len(inter_cm_unique) == 1:
- inter_cm = inter_cm_unique[0]
- self.inter_cm = inter_cm
- intra_cm = dict()
- for im, item in im_info.items():
- # for im in self.im_type_list:
- intra_cm.update({im:item['IntraEventCorr']})
- intra_cm_unique = list(set([item for _, item in intra_cm.items()]))
- if len(intra_cm_unique) == 1:
- intra_cm = intra_cm_unique[0]
- self.intra_cm = intra_cm
- return
-
- # inter-event model
- if correlation_info.get('InterEvent', None):
- self.inter_cm = correlation_info['InterEvent']
- elif correlation_info.get('SaInterEvent', None):
- # back compatibility
- self.inter_cm = correlation_info['SaInterEvent']
- else:
- print('GM_Simulator: no inter-event correlation information not found - results will be uncorrelated motions.')
- # intra-event model
- if correlation_info.get('IntraEvent', None):
- self.intra_cm = correlation_info['IntraEvent']
- if correlation_info.get('SaIntraEvent', None):
- # back compatibility
- self.intra_cm = correlation_info['SaIntraEvent']
- else:
- print('GM_Simulator: no intra-event correlation information not found - results will be uncorrelated motions.')
-
- def cross_check_im_correlation(self):
- # because each correlation model only applies to certain intensity measure
- # so hear we check if the correlation models are applicable for the required intensity measures
- self.im_cm_inter_flag = True
- self.im_cm_intra_flag = True
- if type(self.inter_cm)==dict:
- for cur_im in self.im_type_list:
- avail_im_inter_cm = IM_CORR_INTER.get(self.inter_cm[cur_im])
- if cur_im not in avail_im_inter_cm:
- print('GM_Simulator.cross_check_im_correlation: warning - {} is not available in {}'.format(cur_im, self.inter_cm))
- self.im_cm_inter_flag = False
- continue
- else:
- avail_im_inter_cm = IM_CORR_INTER.get(self.inter_cm)
- if avail_im_inter_cm is not None:
- for cur_im in self.im_type_list:
- if cur_im not in avail_im_inter_cm:
- print('GM_Simulator.cross_check_im_correlation: warning - {} is not available in {}'.format(cur_im, self.inter_cm))
- self.im_cm_inter_flag = False
- continue
- if type(self.intra_cm) ==dict:
- for cur_im in self.im_type_list:
- avail_im_intra_cm = IM_CORR_INTRA.get(self.intra_cm[cur_im])
- if cur_im not in avail_im_intra_cm:
- print('GM_Simulator.cross_check_im_correlation: warning - {} is not available in {}'.format(cur_im, self.intra_cm))
- self.im_cm_intra_flag = False
- continue
- else:
- avail_im_intra_cm = IM_CORR_INTRA.get(self.intra_cm)
- if avail_im_intra_cm is not None:
- for cur_im in self.im_type_list:
- if cur_im not in avail_im_intra_cm:
- print('GM_Simulator.cross_check_im_correlation: warning - {} is not available in {}'.format(cur_im, self.intra_cm))
- self.im_cm_intra_flag = False
- continue
-
- def compute_inter_event_residual_ij(self, cm, im_name_list_1, im_name_list_2):
- if cm == 'Baker & Jayaram (2008)':
- rho = np.array([CorrelationModel.baker_jayaram_correlation_2008(im1, im2)
- for im1 in im_name_list_1 for im2 in im_name_list_2]).\
- reshape([len(im_name_list_1),\
- len(im_name_list_2)])
- elif cm == 'Baker & Bradley (2017)':
- rho = np.array([CorrelationModel.baker_bradley_correlation_2017(im1, im2)
- for im1 in im_name_list_1 for im2 in im_name_list_2]).\
- reshape([len(im_name_list_1),\
- len(im_name_list_2)])
- else:
- # TODO: extending this to more inter-event correlation models
- sys.exit('GM_Simulator.compute_inter_event_residual: currently supporting Baker & Jayaram (2008), Baker & Bradley (2017)')
- return rho
-
- def replace_submatrix(self, mat, ind1, ind2, mat_replace):
- for i, index in enumerate(ind1):
- mat[index, ind2] = mat_replace[i, :]
- return mat
- def compute_inter_event_residual(self):
- if type(self.inter_cm) == dict:
- rho = np.zeros([self.num_im, self.num_im])
- im_types = list(self.inter_cm.keys())
- for i in range(len(im_types)):
- for j in range(0,i+1):
- im_type_i = im_types[i]
- im_type_j = im_types[j]
- im_name_list_i = [im_name for im_name in self.im_name_list\
- if im_name.startswith(im_type_i)]
- im_indices_i = [index for index, element in enumerate(self.im_name_list)\
- if element.startswith(im_type_i)]
- im_name_list_j = [im_name for im_name in self.im_name_list\
- if im_name.startswith(im_type_j)]
- im_indices_j = [index for index, element in enumerate(self.im_name_list)\
- if element.startswith(im_type_j)]
- # In R2D, use SA(0.01) to approximate PGA
- im_name_list_i = ['SA(0.01)' if x == 'PGA' else x\
- for x in im_name_list_i]
- im_name_list_j = ['SA(0.01)' if x == 'PGA' else x\
- for x in im_name_list_j]
- rho_ij = self.compute_inter_event_residual_ij(\
- self.inter_cm[im_types[i]],im_name_list_i, im_name_list_j)
- rho = self.replace_submatrix(rho, im_indices_i, im_indices_j,\
- rho_ij)
- if i!=j:
- rho = self.replace_submatrix(rho, im_indices_j, im_indices_i,\
- rho_ij.T)
- else:
- rho = self.compute_inter_event_residual_ij(\
- self.inter_cm, self.im_name_list, self.im_name_list)
- # Simulating residuals
- with warnings.catch_warnings():
- # The intra-event models produce rho with tiny negative eigen values
- # This warning is suppressed
- warnings.filterwarnings("ignore", message="covariance is not symmetric positive-semidefinite.")
- residuals = np.random.multivariate_normal(np.zeros(self.num_im), rho, self.num_simu).T
- # return
- return residuals
- def compute_intra_event_residual_i(self, cm, im_name_list, num_simu):
- if cm == 'Jayaram & Baker (2009)':
- rho = np.zeros((self.num_sites, self.num_sites, len(im_name_list)))
- for i in range(self.num_sites):
- for j in range(self.num_sites):
- cur_stn_dist = self.stn_dist[i, j]
- for k in range(len(im_name_list)):
- rho[i, j, k] = CorrelationModel.jayaram_baker_correlation_2009(im_name_list[k], cur_stn_dist,
- flag_clustering = False)
- # Simulating residuals
- residuals = np.zeros((self.num_sites, len(im_name_list), num_simu))
- for k in range(self.num_im):
- residuals[:, k, :] = np.random.multivariate_normal(np.zeros(self.num_sites), rho[:, :, k], num_simu).T
- elif cm == 'Loth & Baker (2013)':
- residuals = CorrelationModel.loth_baker_correlation_2013(self.sites,\
- im_name_list, num_simu)
- elif cm == 'Markhvida et al. (2017)':
- num_pc = 19
- residuals = CorrelationModel.markhvida_ceferino_baker_correlation_2017(\
- self.sites, im_name_list, num_simu, num_pc)
- elif cm == 'Du & Ning (2021)':
- num_pc = 23
- residuals = CorrelationModel.du_ning_correlation_2021(self.sites,\
- im_name_list, num_simu, num_pc)
- else:
- # TODO: extending this to more inter-event correlation models
- sys.exit('GM_Simulator.compute_intra_event_residual: currently supporting Jayaram & Baker (2009), Loth & Baker (2013),Markhvida et al. (2017), Du & Ning (2021)')
- return residuals
-
- def compute_intra_event_residual(self):
- if type(self.intra_cm) == dict:
- cm_groups = dict()
- # Group the IMs using the same cm
- for key, item in self.intra_cm.items():
- if item not in cm_groups.keys():
- cm_groups.update({item:[key]})
- else:
- cm_groups[item].append(key)
- residuals = np.zeros((self.num_sites, self.num_im, self.num_simu))
- for cm, im_types in cm_groups.items():
- # im_type_list = [im_name.split('(')[0] for im_name in self.im_name_list]
- im_name_list = [im_name for im_name in self.im_name_list\
- if im_name.split('(')[0] in im_types]
- im_indices = [index for index, element in enumerate(self.im_name_list)\
- if element.split('(')[0] in im_types]
- residuals_i = self.compute_intra_event_residual_i(cm,\
- im_name_list, self.num_simu)
- for i, ind in enumerate(im_indices):
- residuals[:,ind,:] = residuals_i[:,i,:]
- else:
- residuals = self.compute_intra_event_residual_i(self.intra_cm,
- self.im_name_list, self.num_simu)
- # return
- return residuals
-
-class GM_Simulator_hdf5(GM_Simulator):
-
- def __init__(self, site_info = [], im_list = [], num_simu=0,\
- correlation_info=None,im_info = None):
- self.set_im_type(im_list)
- self.set_sites(site_info)
- self.set_num_simu(num_simu)
- self.parse_correlation_info(correlation_info, im_info)
- self.cross_check_im_correlation()
-
- def set_im_type(self, im_list):
- self.im_name_list = im_list
- im_types = set()
- for im in im_list:
- if im.startswith('PGA'):
- im_types.add('PGA')
- elif im.startswith('SA'):
- im_types.add('SA')
- elif im.startswith('PGV'):
- im_types.add('PGV')
- else:
- SyntaxError(f'Unrecognized im type: {im}')
- # Add ims one by one because the order is important
- self.im_type_list = []
- if ('PGA') in im_types:
- self.im_type_list.append('PGA')
- if ('SA') in im_types:
- self.im_type_list.append('SA')
- if ('PGV') in im_types:
- self.im_type_list.append('PGV')
-
-
- def set_im_raw(self, im_raw, im_list):
- self.im_name_list = im_list
- self.num_im = len(im_list)
- self.im_data = im_raw
-
- def get_ln_im(self):
- ln_im = []
- for i in range(self.num_sites):
- tmp_im_data = self.im_data['Mean'][i, :].tolist()
- ln_im.append(tmp_im_data)
- return ln_im
-
- def get_inter_sigma_im(self):
- inter_sigma_im = []
- for i in range(self.num_sites):
- tmp_im_data = self.im_data['InterEvStdDev'][i, :].tolist()
- inter_sigma_im.append(tmp_im_data)
- return inter_sigma_im
-
- def get_intra_sigma_im(self):
- intra_sigma_im = []
- for i in range(self.num_sites):
- tmp_im_data = self.im_data['IntraEvStdDev'][i, :].tolist()
- intra_sigma_im.append(tmp_im_data)
- return intra_sigma_im
-
\ No newline at end of file
+ def get_intra_sigma_im(self): # noqa: D102
+ intra_sigma_im = []
+ for i in range(self.num_sites):
+ tmp_im_data = self.im_data['IntraEvStdDev'][i, :].tolist()
+ intra_sigma_im.append(tmp_im_data)
+ return intra_sigma_im
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardOccurrence.py b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardOccurrence.py
index 924a11e6d..54bf7a752 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardOccurrence.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardOccurrence.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,90 +37,108 @@
# Kuanshi Zhong
#
+import collections
+import itertools
+import json
+import os
+import sys
+import threading
+import time
+
+import h5py
import numpy as np
-import pulp, h5py, sys
import pandas as pd
-import json, os, itertools, threading, collections, time
+import pulp
from scipy.stats import norm
-from USGS_API import *
-from tqdm import tqdm
from sklearn.linear_model import lasso_path
-
-
-def configure_hazard_occurrence(input_dir,
- output_dir,
- IMfile,
- im_list,
- scenarios,
- hzo_config=None,
- site_config=None,
- mth_flag=True):
-
+from tqdm import tqdm
+from USGS_API import * # noqa: F403
+
+
+def configure_hazard_occurrence( # noqa: C901, D103
+ input_dir,
+ output_dir,
+ IMfile, # noqa: N803
+ im_list,
+ scenarios,
+ hzo_config=None,
+ site_config=None,
+ mth_flag=True, # noqa: FBT002
+):
if hzo_config is None or site_config is None:
# no model is defined
return {}
# model type
model_type = hzo_config.get('Model')
# number of earthquake in the subset
- num_target_eqs = hzo_config.get('EarthquakeSampleSize',10)
+ num_target_eqs = hzo_config.get('EarthquakeSampleSize', 10)
# number of ground motion maps
- num_target_gmms = hzo_config.get('GroundMotionMapSize',num_target_eqs*10)
+ num_target_gmms = hzo_config.get('GroundMotionMapSize', num_target_eqs * 10)
# return periods
- return_periods = hzo_config.get('ReturnPeriods',None)
+ return_periods = hzo_config.get('ReturnPeriods', None)
if return_periods is None:
return {}
# im type
- im_type = hzo_config.get('IntensityMeasure',None)
+ im_type = hzo_config.get('IntensityMeasure', None)
if im_type is None:
return {}
# get hazard curve input
- hc_input = hzo_config.get('HazardCurveInput',None)
+ hc_input = hzo_config.get('HazardCurveInput', None)
# return periods
if hc_input is None:
return {}
- elif hc_input == 'Inferred_NSHMP':
- period = hzo_config.get('Period',0.0)
+ elif hc_input == 'Inferred_NSHMP': # noqa: RET505
+ period = hzo_config.get('Period', 0.0)
if im_type == 'SA':
- cur_imt = im_type+"{:.1f}".format(period).replace('.','P')
+ cur_imt = im_type + f'{period:.1f}'.replace('.', 'P')
else:
cur_imt = im_type
- # fecthing hazard curve from usgs
- cur_edition = hzo_config.get('Edition','E2014')
+ # fetching hazard curve from usgs
+ cur_edition = hzo_config.get('Edition', 'E2014')
hazard_curve_collector = []
for site_id in range(len(site_config)):
cur_site = site_config[site_id]
cur_lon = cur_site.get('lon')
cur_lat = cur_site.get('lat')
- cur_vs30 = cur_site.get('vs30',760)
- hazard_curve_collector.append(USGS_HazardCurve(longitude=cur_lon,
- latitude=cur_lat,
- vs30=cur_vs30,
- edition=cur_edition,
- imt=cur_imt,
- tag=site_id))
+ cur_vs30 = cur_site.get('vs30', 760)
+ hazard_curve_collector.append(
+ USGS_HazardCurve( # noqa: F405
+ longitude=cur_lon,
+ latitude=cur_lat,
+ vs30=cur_vs30,
+ edition=cur_edition,
+ imt=cur_imt,
+ tag=site_id,
+ )
+ )
hc_data = []
- print('HazardOCcurrence: fetching USGS hazard curve for individual sites - this may take a while.')
+ print( # noqa: T201
+ 'HazardOCcurrence: fetching USGS hazard curve for individual sites - this may take a while.'
+ )
t_start = time.time()
if mth_flag:
num_bins = 100
- bin_size = int(np.ceil(len(hazard_curve_collector)/num_bins))
+ bin_size = int(np.ceil(len(hazard_curve_collector) / num_bins))
ids_list = []
collector_list = []
sub_ths = []
hc_dict = {}
for k in range(0, len(hazard_curve_collector), bin_size):
- ids_list.append(list(range(k,k+bin_size)))
- collector_list.append(hazard_curve_collector[k:k+bin_size])
- #print(ids_list)
+ ids_list.append(list(range(k, k + bin_size)))
+ collector_list.append(hazard_curve_collector[k : k + bin_size])
+ # print(ids_list)
for i in range(len(ids_list)):
- th = threading.Thread(target=fetch_usgs_hazard_curve_para, args=(ids_list[i], collector_list[i], hc_dict))
+ th = threading.Thread(
+ target=fetch_usgs_hazard_curve_para,
+ args=(ids_list[i], collector_list[i], hc_dict),
+ )
sub_ths.append(th)
th.start()
for th in sub_ths:
th.join()
# order the res_dict by id
res_ordered = collections.OrderedDict(sorted(hc_dict.items()))
- for i, cur_res in res_ordered.items():
+ for i, cur_res in res_ordered.items(): # noqa: B007
hc_data.append(cur_res)
else:
for i in range(len(hazard_curve_collector)):
@@ -129,26 +146,32 @@ def configure_hazard_occurrence(input_dir,
if cur_collector.fetch_url():
hc_data.append(cur_collector.get_hazard_curve())
else:
- print('HazardOCcurrence: error in fetching hazard curve for site {}.'.format(i))
- return
-
- print('HazardOCcurrence: all hazard curves fetched {0} sec.'.format(time.time()-t_start))
+ print( # noqa: T201
+ f'HazardOCcurrence: error in fetching hazard curve for site {i}.'
+ )
+ return None
+
+ print( # noqa: T201
+ f'HazardOCcurrence: all hazard curves fetched {time.time() - t_start} sec.'
+ )
elif hc_input == 'Inferred_sourceFile':
- period = hzo_config.get('Period',0.0)
+ period = hzo_config.get('Period', 0.0)
if im_type == 'SA':
- cur_imt = 'SA({})'.format(str(period))
+ cur_imt = f'SA({period!s})'
else:
cur_imt = im_type
if IMfile.lower().endswith('.json'):
- with open(IMfile, 'r') as f:
- IMdata = json.load(f)
- hc_data = calc_hazard_curves(IMdata,site_config, cur_imt)
+ with open(IMfile) as f: # noqa: PLW1514, PTH123
+ IMdata = json.load(f) # noqa: N806
+ hc_data = calc_hazard_curves(IMdata, site_config, cur_imt)
elif IMfile.lower().endswith('.hdf5'):
- hc_data = calc_hazard_curves_hdf5(IMfile, im_list, site_config, cur_imt, scenarios)
+ hc_data = calc_hazard_curves_hdf5(
+ IMfile, im_list, site_config, cur_imt, scenarios
+ )
# c_vect = calc_hazard_contribution(IMdata, site_config,
# return_periods, hc_data, cur_imt)
else:
- hc_input = os.path.join(input_dir,hc_input)
+ hc_input = os.path.join(input_dir, hc_input) # noqa: PTH118
if hc_input.endswith('.csv'):
hc_data = get_hazard_curves(input_csv=hc_input)
elif hc_input.endswith('.json'):
@@ -158,282 +181,312 @@ def configure_hazard_occurrence(input_dir,
# interpolate the hazard curve with the return periods
num_sites = len(hc_data)
num_rps = len(return_periods)
- hc_interp = np.zeros((num_sites,num_rps))
+ hc_interp = np.zeros((num_sites, num_rps))
ln_maf = [np.log(x) for x in return_periods]
for i in range(num_sites):
ln_cur_maf = [np.log(x) for x in hc_data[i].get('ReturnPeriod')]
ln_cur_sa = np.log(hc_data[i].get('IM')).tolist()
- hc_interp[i,:] = np.exp(np.interp(ln_maf,ln_cur_maf,ln_cur_sa,left=ln_cur_sa[0],right=ln_cur_sa[-1]))
+ hc_interp[i, :] = np.exp(
+ np.interp(
+ ln_maf, ln_cur_maf, ln_cur_sa, left=ln_cur_sa[0], right=ln_cur_sa[-1]
+ )
+ )
hc_interp_list = hc_interp.tolist()
# summary
occ_dict = {
- "Model": model_type,
- "NumTargetEQs": num_target_eqs,
- "NumTargetGMMs": num_target_gmms,
- "ReturnPeriods": return_periods,
- "IntensityMeasure": im_type,
- "Period": period,
- "HazardCurves": hc_interp_list
+ 'Model': model_type,
+ 'NumTargetEQs': num_target_eqs,
+ 'NumTargetGMMs': num_target_gmms,
+ 'ReturnPeriods': return_periods,
+ 'IntensityMeasure': im_type,
+ 'Period': period,
+ 'HazardCurves': hc_interp_list,
}
# output the hazard occurrence information file
- with open(os.path.join(output_dir,"HazardCurves.json"),"w") as f:
+ with open(os.path.join(output_dir, 'HazardCurves.json'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
json.dump(occ_dict, f, indent=2)
occ_dict = {
- "Model": model_type,
- "NumTargetEQs": num_target_eqs,
- "NumTargetGMMs": num_target_gmms,
- "ReturnPeriods": return_periods,
- "IntensityMeasure": im_type,
- "Period": period,
- "HazardCurves": hc_interp
+ 'Model': model_type,
+ 'NumTargetEQs': num_target_eqs,
+ 'NumTargetGMMs': num_target_gmms,
+ 'ReturnPeriods': return_periods,
+ 'IntensityMeasure': im_type,
+ 'Period': period,
+ 'HazardCurves': hc_interp,
}
# return
- return occ_dict
+ return occ_dict # noqa: RET504
-def fetch_usgs_hazard_curve_para(ids, hc_collectors, hc_dict):
-
+def fetch_usgs_hazard_curve_para(ids, hc_collectors, hc_dict): # noqa: D103
for cur_id, cur_collector in zip(ids, hc_collectors):
if cur_collector.fetch_url():
hc_dict[cur_id] = cur_collector.get_hazard_curve()
else:
- print('HazardOCcurrence: error in fetching hazard curve for site {}.'.format(cur_id))
+ print( # noqa: T201
+ f'HazardOCcurrence: error in fetching hazard curve for site {cur_id}.'
+ )
# return
- return
-def calc_hazard_curve_and_contri(IMdata, site_config, im, targetReturnPeriods):
+
+def calc_hazard_curve_and_contri(IMdata, site_config, im, targetReturnPeriods): # noqa: ARG001, N803, D103
if im[0:2] == 'SA':
- period = float(im[2:].replace('P','.'))
+ period = float(im[2:].replace('P', '.'))
im_name = 'lnSA'
- periods = IMdata[list(IMdata.keys())[0]]['Periods']
- im_ind = np.where(np.array(periods)==period)[0][0]
+ periods = IMdata[list(IMdata.keys())[0]]['Periods'] # noqa: RUF015
+ im_ind = np.where(np.array(periods) == period)[0][0]
else:
- im_name = 'lnPGA'
- im_ind = 0
+ im_name = 'lnPGA' # noqa: F841
+ im_ind = 0 # noqa: F841
-def calc_hazard_contribution(IMdata, site_config, targetReturnPeriods, hc_data, im):
+
+def calc_hazard_contribution(IMdata, site_config, targetReturnPeriods, hc_data, im): # noqa: N803, D103
if im[0:2] == 'SA':
- period = float(im[2:].replace('P','.'))
+ period = float(im[2:].replace('P', '.'))
im_name = 'lnSA'
- periods = IMdata[list(IMdata.keys())[0]]['Periods']
- im_ind = np.where(np.array(periods)==period)[0][0]
+ periods = IMdata[list(IMdata.keys())[0]]['Periods'] # noqa: RUF015
+ im_ind = np.where(np.array(periods) == period)[0][0]
else:
im_name = 'lnPGA'
im_ind = 0
c_vect = np.zeros(len(IMdata))
- for j in tqdm(range(len(IMdata)), desc="Calculate "\
- f"Hazard Contribution of {len(IMdata)} scenarios"):
+ for j in tqdm(
+ range(len(IMdata)),
+ desc='Calculate ' f'Hazard Contribution of {len(IMdata)} scenarios',
+ ):
c_j = 0
scenario = IMdata[list(IMdata.keys())[j]]
- mar = scenario['MeanAnnualRate']
+ mar = scenario['MeanAnnualRate'] # noqa: F841
for r in range(len(targetReturnPeriods)):
for i in range(len(site_config)):
- lnIM = scenario['GroundMotions'][i][im_name]
- lnIM_mean = lnIM['Mean'][im_ind]
- lnIM_std = lnIM['TotalStdDev'][im_ind]
- y_ir = np.interp(targetReturnPeriods[r],
- np.array(hc_data[i]['ReturnPeriod']),
- np.array(hc_data[i]['IM']),
- left=hc_data[i]['ReturnPeriod'][0],
- right=hc_data[i]['ReturnPeriod'][-1])
+ lnIM = scenario['GroundMotions'][i][im_name] # noqa: N806
+ lnIM_mean = lnIM['Mean'][im_ind] # noqa: N806
+ lnIM_std = lnIM['TotalStdDev'][im_ind] # noqa: N806
+ y_ir = np.interp(
+ targetReturnPeriods[r],
+ np.array(hc_data[i]['ReturnPeriod']),
+ np.array(hc_data[i]['IM']),
+ left=hc_data[i]['ReturnPeriod'][0],
+ right=hc_data[i]['ReturnPeriod'][-1],
+ )
p_exceed = 1 - norm.cdf(np.log(y_ir), lnIM_mean, lnIM_std)
- normConstant = 0
+ normConstant = 0 # noqa: N806
for j2 in range(len(IMdata)):
pj = IMdata[list(IMdata.keys())[j2]]['MeanAnnualRate']
- lnIM2 = IMdata[list(IMdata.keys())[j2]]['GroundMotions'][i][im_name]
- lnIM_mean2 = lnIM2['Mean'][im_ind]
- lnIM_std2 = lnIM2['TotalStdDev'][im_ind]
+ lnIM2 = IMdata[list(IMdata.keys())[j2]]['GroundMotions'][i][ # noqa: N806
+ im_name
+ ]
+ lnIM_mean2 = lnIM2['Mean'][im_ind] # noqa: N806
+ lnIM_std2 = lnIM2['TotalStdDev'][im_ind] # noqa: N806
p_exceed2 = 1 - norm.cdf(np.log(y_ir), lnIM_mean2, lnIM_std2)
- normConstant += p_exceed2
+ normConstant += p_exceed2 # noqa: N806
c_j += pj * p_exceed / normConstant
c_vect[j] = c_j
return c_vect
-
-def calc_hazard_curves(IMdata, site_config, im):
+
+def calc_hazard_curves(IMdata, site_config, im): # noqa: N803, D103
if im[0:2] == 'SA':
- period = float(im[2:].replace('P','.'))
+ period = float(im[2:].replace('P', '.'))
im_name = 'lnSA'
- periods = IMdata[list(IMdata.keys())[0]]['Periods']
- im_ind = np.where(np.array(periods)==period)[0][0]
+ periods = IMdata[list(IMdata.keys())[0]]['Periods'] # noqa: RUF015
+ im_ind = np.where(np.array(periods) == period)[0][0]
else:
im_name = 'lnPGA'
im_ind = 0
- IMRange = np.power(10, np.linspace(-4, 2, 60))
- exceedRate = np.zeros((len(IMRange), len(site_config)))
- hc_data = [{'siteID':0,
- 'ReturnPeriod':list(exceedRate),
- 'IM':list(exceedRate)}]*len(site_config)
+ IMRange = np.power(10, np.linspace(-4, 2, 60)) # noqa: N806
+ exceedRate = np.zeros((len(IMRange), len(site_config))) # noqa: N806
+ hc_data = [
+ {'siteID': 0, 'ReturnPeriod': list(exceedRate), 'IM': list(exceedRate)}
+ ] * len(site_config)
scenario_idx = list(IMdata.keys())
- for scenario_ind in tqdm(range(len(scenario_idx)), desc="Calculate "\
- f"Hazard Curves from {len(scenario_idx)} scenarios"):
+ for scenario_ind in tqdm(
+ range(len(scenario_idx)),
+ desc='Calculate ' f'Hazard Curves from {len(scenario_idx)} scenarios',
+ ):
scenario = IMdata[scenario_idx[scenario_ind]]
mar = scenario['MeanAnnualRate']
for site_ind in range(len(site_config)):
- lnIM = scenario['GroundMotions'][site_ind][im_name]
- lnIM_mean = lnIM['Mean'][im_ind]
- lnIM_std = lnIM['TotalStdDev'][im_ind]
+ lnIM = scenario['GroundMotions'][site_ind][im_name] # noqa: N806
+ lnIM_mean = lnIM['Mean'][im_ind] # noqa: N806
+ lnIM_std = lnIM['TotalStdDev'][im_ind] # noqa: N806
p_exceed = 1 - norm.cdf(np.log(IMRange), lnIM_mean, lnIM_std)
rate_exceed = mar * p_exceed
- exceedRate[:, site_ind] = exceedRate[:, site_ind] + rate_exceed
- exceedRate[exceedRate<1e-20] = 1e-20
+ exceedRate[:, site_ind] = exceedRate[:, site_ind] + rate_exceed # noqa: PLR6104
+ exceedRate[exceedRate < 1e-20] = 1e-20 # noqa: PLR2004
for site_ind, site in enumerate(site_config):
- hc_data[site_ind] = {'SiteID': site['ID'],
- "ReturnPeriod":list(1/exceedRate[:,site_ind]),
- "IM":list(IMRange)
+ hc_data[site_ind] = {
+ 'SiteID': site['ID'],
+ 'ReturnPeriod': list(1 / exceedRate[:, site_ind]),
+ 'IM': list(IMRange),
}
return hc_data
-def calc_hazard_curves_hdf5(IMfile, im_list, site_config, im, scenarios):
+
+def calc_hazard_curves_hdf5(IMfile, im_list, site_config, im, scenarios): # noqa: N803, D103
im_ind = im_list.index(im)
- IMRange = np.power(10, np.linspace(-4, 2, 60))
- exceedRate = np.zeros((len(IMRange), len(site_config)))
- hc_data = [{'siteID':0,
- 'ReturnPeriod':list(exceedRate),
- 'IM':list(exceedRate)}]*len(site_config)
+ IMRange = np.power(10, np.linspace(-4, 2, 60)) # noqa: N806
+ exceedRate = np.zeros((len(IMRange), len(site_config))) # noqa: N806
+ hc_data = [
+ {'siteID': 0, 'ReturnPeriod': list(exceedRate), 'IM': list(exceedRate)}
+ ] * len(site_config)
scenario_idx = list(scenarios.keys())
- with h5py.File(IMfile, 'r') as IMdata:
- for scenario_ind in tqdm(range(len(scenario_idx)), desc="Calculate "\
- f"Hazard Curves from {len(scenario_idx)} scenarios"):
+ with h5py.File(IMfile, 'r') as IMdata: # noqa: N806
+ for scenario_ind in tqdm(
+ range(len(scenario_idx)),
+ desc='Calculate ' f'Hazard Curves from {len(scenario_idx)} scenarios',
+ ):
scenario_im = IMdata[str(scenario_idx[scenario_ind])]
mar = scenarios[scenario_idx[scenario_ind]]['MeanAnnualRate']
- lnIM_mean = scenario_im['Mean'][:,im_ind]
- lnIM_interStd = scenario_im['InterEvStdDev'][:,im_ind]
- lnIM_intraStd = scenario_im['IntraEvStdDev'][:,im_ind]
- lnIM_std = np.sqrt(lnIM_intraStd**2 + lnIM_interStd**2)
+ lnIM_mean = scenario_im['Mean'][:, im_ind] # noqa: N806
+ lnIM_interStd = scenario_im['InterEvStdDev'][:, im_ind] # noqa: N806
+ lnIM_intraStd = scenario_im['IntraEvStdDev'][:, im_ind] # noqa: N806
+ lnIM_std = np.sqrt(lnIM_intraStd**2 + lnIM_interStd**2) # noqa: N806
for site_ind in range(len(site_config)):
- p_exceed = 1 - norm.cdf(np.log(IMRange), lnIM_mean[site_ind],\
- lnIM_std[site_ind])
+ p_exceed = 1 - norm.cdf(
+ np.log(IMRange), lnIM_mean[site_ind], lnIM_std[site_ind]
+ )
rate_exceed = mar * p_exceed
- exceedRate[:, site_ind] = exceedRate[:, site_ind] + rate_exceed
- exceedRate[exceedRate<1e-20] = 1e-20
+ exceedRate[:, site_ind] = exceedRate[:, site_ind] + rate_exceed # noqa: PLR6104
+ exceedRate[exceedRate < 1e-20] = 1e-20 # noqa: PLR2004
for site_ind, site in enumerate(site_config):
- hc_data[site_ind] = {'SiteID': site['ID'],
- "ReturnPeriod":list(1/exceedRate[:,site_ind]),
- "IM":list(IMRange)
+ hc_data[site_ind] = {
+ 'SiteID': site['ID'],
+ 'ReturnPeriod': list(1 / exceedRate[:, site_ind]),
+ 'IM': list(IMRange),
}
return hc_data
-
-def get_hazard_curves(input_dir=None,
- input_csv=None,
- input_json=None):
+def get_hazard_curves(input_dir=None, input_csv=None, input_json=None): # noqa: D103
if input_dir is not None:
- return
+ return None
if input_csv is not None:
- df_hc = pd.read_csv(input_csv,header=None)
- num_sites = df_hc.shape[0]-1
- return_periods = df_hc.iloc[0,1:].to_numpy().tolist()
+ df_hc = pd.read_csv(input_csv, header=None)
+ num_sites = df_hc.shape[0] - 1
+ return_periods = df_hc.iloc[0, 1:].to_numpy().tolist()
hc_data = []
for i in range(num_sites):
- hc_data.append({
- "SiteID": i,
- "ReturnPeriod": return_periods,
- "IM": df_hc.iloc[i+1,1:].to_numpy().tolist()
- })
+ hc_data.append( # noqa: PERF401
+ {
+ 'SiteID': i,
+ 'ReturnPeriod': return_periods,
+ 'IM': df_hc.iloc[i + 1, 1:].to_numpy().tolist(),
+ }
+ )
return hc_data
- if input_json is not None:
- with open(input_json, 'r') as f:
+ if input_json is not None: # noqa: RET503
+ with open(input_json) as f: # noqa: PLW1514, PTH123
hc_data = json.load(f)
- return hc_data
+ return hc_data # noqa: RET504
# KZ-08/23/22: adding a function for computing exceeding probability at an im level
-def get_im_exceedance_probility(IMfile,
- im_list,
- im_type,
- period,
- im_level,
- scenario_idx):
-
+def get_im_exceedance_probility( # noqa: C901, D103
+ IMfile, # noqa: N803
+ im_list,
+ im_type,
+ period,
+ im_level,
+ scenario_idx,
+):
# number of scenarios
num_scen = len(scenario_idx)
# number of intensity levels
num_rps = im_level.shape[1]
-
+
# initialize output
if IMfile.lower().endswith('.json'):
- with open(IMfile, 'r') as f:
+ with open(IMfile) as f: # noqa: PLW1514, PTH123
im_raw = json.load(f)
num_sites = len(im_raw[scenario_idx[0]].get('GroundMotions'))
elif IMfile.lower().endswith('.hdf5'):
with h5py.File(IMfile, 'r') as f:
num_sites = f[str(scenario_idx[0])]['Mean'].shape[0]
-
- im_exceedance_prob = np.zeros((num_sites,num_scen,num_rps))
-
+
+ im_exceedance_prob = np.zeros((num_sites, num_scen, num_rps))
+
if IMfile.lower().endswith('.json'):
if im_type == 'PGA':
if 'PGA' not in im_raw[scenario_idx[0]]['IM']:
- print('IM_Calculator.get_im_exceedance_probility: error - IM {} does not match to {}.'.\
- format(period,im_raw[scenario_idx[0]].get('IM')))
+ print( # noqa: T201
+ 'IM_Calculator.get_im_exceedance_probility: error - IM {} does not match to {}.'.format(
+ period, im_raw[scenario_idx[0]].get('IM')
+ )
+ )
return im_exceedance_prob
- else:
- periodID = 0
+ else: # noqa: RET505
+ periodID = 0 # noqa: N806
+ elif period not in im_raw[scenario_idx[0]].get('Periods'):
+ print( # noqa: T201
+ 'IM_Calculator.get_im_exceedance_probility: error - period {} does not match to {}.'.format(
+ period, im_raw[scenario_idx[0]].get('Periods')
+ )
+ )
+ return im_exceedance_prob
else:
- if period not in im_raw[scenario_idx[0]].get('Periods'):
- print('IM_Calculator.get_im_exceedance_probility: error - period {} does not match to {}.'.\
- format(period,im_raw[scenario_idx[0]].get('Periods')))
- return im_exceedance_prob
- else:
- periodID = im_raw[scenario_idx[0]].get('Periods').index(period)
+ periodID = im_raw[scenario_idx[0]].get('Periods').index(period) # noqa: N806
# start to compute the exceedance probability
for k in range(num_scen):
- allGM = im_raw[scenario_idx[k]].get('GroundMotions')
+ allGM = im_raw[scenario_idx[k]].get('GroundMotions') # noqa: N806
for i in range(num_sites):
- curIM = allGM[i].get('ln{}'.format(im_type))
- curMean = curIM.get('Mean')[periodID]
- curStd = curIM.get('TotalStdDev')[periodID]
- im_exceedance_prob[i,k,:] = 1.0-norm.cdf(np.log(im_level[i,:]),loc=curMean,scale=curStd)
+ curIM = allGM[i].get(f'ln{im_type}') # noqa: N806
+ curMean = curIM.get('Mean')[periodID] # noqa: N806
+ curStd = curIM.get('TotalStdDev')[periodID] # noqa: N806
+ im_exceedance_prob[i, k, :] = 1.0 - norm.cdf(
+ np.log(im_level[i, :]), loc=curMean, scale=curStd
+ )
elif IMfile.lower().endswith('.hdf5'):
if im_type == 'PGA':
im_name = 'PGA'
elif im_type == 'SA':
if isinstance(period, int) or period.is_integer():
- im_name = 'SA({})'.format(str(int(period)))
+ im_name = f'SA({int(period)!s})'
else:
- im_name = 'SA({})'.format(str(period))
+ im_name = f'SA({period!s})'
else:
- SystemExit(f'{im_type} is not supported in hazard downsampling')
+ SystemExit(f'{im_type} is not supported in hazard downsampling') # noqa: PLW0133
if im_name not in im_list:
- print('IM_Calculator.get_im_exceedance_probility: error - intensity measure {} does not match to {}.'.\
- format(im_name,im_list))
+ print( # noqa: T201
+ f'IM_Calculator.get_im_exceedance_probility: error - intensity measure {im_name} does not match to {im_list}.'
+ )
return im_exceedance_prob
im_ind = im_list.index(im_name)
with h5py.File(IMfile, 'r') as im_raw:
for k in range(num_scen):
- curIM = im_raw[str(scenario_idx[k])]
+ curIM = im_raw[str(scenario_idx[k])] # noqa: N806
for i in range(num_sites):
- curMean = curIM['Mean'][i, im_ind]
- curInterStd = curIM['InterEvStdDev'][i, im_ind]
- curIntraStd = curIM['IntraEvStdDev'][i, im_ind]
- curStd = np.sqrt(curInterStd**2 + curIntraStd**2)
- im_exceedance_prob[i,k,:] = 1.0-norm.cdf(
- np.log(im_level[i,:]),loc=curMean,scale=curStd)
+ curMean = curIM['Mean'][i, im_ind] # noqa: N806
+ curInterStd = curIM['InterEvStdDev'][i, im_ind] # noqa: N806
+ curIntraStd = curIM['IntraEvStdDev'][i, im_ind] # noqa: N806
+ curStd = np.sqrt(curInterStd**2 + curIntraStd**2) # noqa: N806
+ im_exceedance_prob[i, k, :] = 1.0 - norm.cdf(
+ np.log(im_level[i, :]), loc=curMean, scale=curStd
+ )
# return
return im_exceedance_prob
-def get_im_exceedance_probability_gm(im_raw,
- im_list,
- im_type,
- period,
- im_level,
- mar_scen):
-
+def get_im_exceedance_probability_gm( # noqa: D103
+ im_raw,
+ im_list,
+ im_type,
+ period,
+ im_level,
+ mar_scen,
+):
# get periodID
for i in range(len(im_list)):
if im_type in im_list[i]:
if im_type == 'SA' and float(im_list[i].split('(')[1][:-1]) == period:
- periodID = i
+ periodID = i # noqa: N806
break
- else:
- periodID = i
+ else: # noqa: RET508
+ periodID = i # noqa: N806
# number of intensity levels
num_rps = im_level.shape[1]
@@ -442,69 +495,81 @@ def get_im_exceedance_probability_gm(im_raw,
num_scen = len(im_raw)
num_site = im_raw[0].shape[0]
num_simu = im_raw[0].shape[-1]
- im_exceedance_prob = np.zeros((num_site,num_simu*num_scen,num_rps))
- #print('im_exceedance_prob_gm.shape=',im_exceedance_prob)
- occurrence_rate = [None] * num_simu*num_scen
+ im_exceedance_prob = np.zeros((num_site, num_simu * num_scen, num_rps))
+ # print('im_exceedance_prob_gm.shape=',im_exceedance_prob)
+ occurrence_rate = [None] * num_simu * num_scen
for i in range(num_scen):
for j in range(num_site):
- curIM = im_raw[i][j,periodID,:]
+ curIM = im_raw[i][j, periodID, :] # noqa: N806
for k in range(num_simu):
- im_exceedance_prob[j,i*num_simu+k,:] = [int(x) for x in curIM[k]>im_level[j,:]]
- occurrence_rate[i*num_simu+k] = mar_scen[i]/num_simu
+ im_exceedance_prob[j, i * num_simu + k, :] = [
+ int(x) for x in curIM[k] > im_level[j, :]
+ ]
+ occurrence_rate[i * num_simu + k] = mar_scen[i] / num_simu
# return
return im_exceedance_prob, occurrence_rate
-def sample_earthquake_occurrence(model_type,
- num_target_eqs,
- return_periods,
- im_exceedance_prob,
- reweight_only,
- occurence_rate_origin,
- hzo_config):
-
+def sample_earthquake_occurrence( # noqa: D103
+ model_type,
+ num_target_eqs,
+ return_periods,
+ im_exceedance_prob,
+ reweight_only,
+ occurence_rate_origin,
+ hzo_config,
+):
# model type
if model_type == 'Manzour & Davidson (2016)':
# create occurrence model
- om = OccurrenceModel_ManzourDavidson2016(return_periods=return_periods,
- im_exceedance_probs=im_exceedance_prob,
- num_scenarios=num_target_eqs,
- reweight_only=reweight_only,
- occurence_rate_origin=occurence_rate_origin)
+ om = OccurrenceModel_ManzourDavidson2016(
+ return_periods=return_periods,
+ im_exceedance_probs=im_exceedance_prob,
+ num_scenarios=num_target_eqs,
+ reweight_only=reweight_only,
+ occurence_rate_origin=occurence_rate_origin,
+ )
# solve the optimiation
om.solve_opt()
elif model_type == 'Wang et al. (2023)':
# create occurrence model
- om = OccurrenceModel_Wangetal2023(return_periods=return_periods,
- im_exceedance_probs=im_exceedance_prob,
- num_scenarios=num_target_eqs,
- reweight_only=reweight_only,
- occurence_rate_origin=occurence_rate_origin,
- hzo_config = hzo_config)
+ om = OccurrenceModel_Wangetal2023(
+ return_periods=return_periods,
+ im_exceedance_probs=im_exceedance_prob,
+ num_scenarios=num_target_eqs,
+ reweight_only=reweight_only,
+ occurence_rate_origin=occurence_rate_origin,
+ hzo_config=hzo_config,
+ )
# solve the optimiation
om.solve_opt()
else:
- print('HazardOccurrence.get_im_exceedance_probility: {} is not available yet.')
+ print( # noqa: T201
+ 'HazardOccurrence.get_im_exceedance_probility: {} is not available yet.'
+ )
return None
return om
-def export_sampled_earthquakes(error, id_selected_eqs, eqdata, P, output_dir=None):
- probabilityWeight = [P[x] for x in id_selected_eqs]
+
+
+def export_sampled_earthquakes(error, id_selected_eqs, eqdata, P, output_dir=None): # noqa: N803, D103
+ probabilityWeight = [P[x] for x in id_selected_eqs] # noqa: N806
selected_eqs = []
for i in id_selected_eqs:
- selected_eqs.append(eqdata[i])
+ selected_eqs.append(eqdata[i]) # noqa: PERF401
dict_selected_eqs = {
'EarthquakeNumber': len(id_selected_eqs),
'EarthquakeID': id_selected_eqs,
'EarthquakeInfo': selected_eqs,
'ProbabilityWeight': probabilityWeight,
- 'MeanSquareError':error.tolist()
+ 'MeanSquareError': error.tolist(),
}
if output_dir is not None:
- with open(os.path.join(output_dir,'RupSampled.json'), 'w') as f:
+ with open(os.path.join(output_dir, 'RupSampled.json'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
json.dump(dict_selected_eqs, f, indent=2)
+
# def export_sampled_earthquakes(occ_dict, im_raw, site_config, id_selected_eqs, eqdata, P, output_dir=None):
# probabilityWeight = [P[x] for x in id_selected_eqs]
# period = occ_dict.get('Period',0.0)
@@ -545,23 +610,23 @@ def export_sampled_earthquakes(error, id_selected_eqs, eqdata, P, output_dir=Non
# if output_dir is not None:
# with open(os.path.join(output_dir,'RupSampled.json'), 'w') as f:
# json.dump(dict_selected_eqs, f, indent=2)
-
-
-class OccurrenceModel_ManzourDavidson2016:
-
- def __init__(self,
- return_periods = [],
- im_exceedance_probs = [],
- num_scenarios = -1,
- reweight_only = False,
- occurence_rate_origin = None):
- """
- __init__: initialization a hazard occurrence optimizer
+
+
+class OccurrenceModel_ManzourDavidson2016: # noqa: D101
+ def __init__(
+ self,
+ return_periods=[], # noqa: B006
+ im_exceedance_probs=[], # noqa: B006
+ num_scenarios=-1,
+ reweight_only=False, # noqa: FBT002
+ occurence_rate_origin=None,
+ ):
+ """__init__: initialization a hazard occurrence optimizer
:param return_periods: 1-D array of return periods, RP(r)
:param earthquake_mafs: 1-D array of annual occurrence probability, MAF(j)
:param im_exceedance_probs: 3-D array of exceedance probability of Sa, EP(i,j,r) for site #i, earthquake #j, return period #r
:param num_scenarios: integer for number of target scenarios
- """
+ """ # noqa: D205, D400
# read input parameters
self.return_periods = return_periods
self.im_exceedance_probs = im_exceedance_probs
@@ -572,52 +637,65 @@ def __init__(self,
# check input parameters
self.input_valid = self._input_check()
if not self.input_valid:
- print('OccurrenceModel_ManzourDavidson2016.__init__: at least one input parameter invalid.')
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016.__init__: at least one input parameter invalid.'
+ )
return
def _input_check(self):
- """
- _input_check: check of input parameters
- """
+ """_input_check: check of input parameters""" # noqa: D400
# number of return periods
if len(self.return_periods) > 0:
self.num_return_periods = len(self.return_periods)
- print('OccurrenceModel_ManzourDavidson2016._input_check: number of return periods = {}.'.format(self.num_return_periods))
+ print( # noqa: T201
+ f'OccurrenceModel_ManzourDavidson2016._input_check: number of return periods = {self.num_return_periods}.'
+ )
else:
- print('OccurrenceModel_ManzourDavidson2016._input_check: no return period is defined.')
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: no return period is defined.'
+ )
return False
# shape of exceedance probability
- if len(self.im_exceedance_probs.shape) != 3:
- print('OccurrenceModel_ManzourDavidson2016._input_check: exceedance probability array should be 3-D.')
+ if len(self.im_exceedance_probs.shape) != 3: # noqa: PLR2004
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: exceedance probability array should be 3-D.'
+ )
+ return False
+ elif self.im_exceedance_probs.shape[-1] != len(self.return_periods): # noqa: RET505
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: exceedance probability array should have dimensions of (#site, #eq, #return_period).'
+ )
return False
else:
- if self.im_exceedance_probs.shape[-1] != len(self.return_periods):
- print('OccurrenceModel_ManzourDavidson2016._input_check: exceedance probability array should have dimensions of (#site, #eq, #return_period).')
- return False
- else:
- self.num_sites = self.im_exceedance_probs.shape[0]
- print('OccurrenceModel_ManzourDavidson2016._input_check: number of sites = {}.'.format(self.num_sites))
+ self.num_sites = self.im_exceedance_probs.shape[0]
+ print( # noqa: T201
+ f'OccurrenceModel_ManzourDavidson2016._input_check: number of sites = {self.num_sites}.'
+ )
# number of target scenarios
if self.num_scenarios <= 0:
- print('OccurrenceModel_ManzourDavidson2016._input_check: number of target scenarios should be positive.')
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: number of target scenarios should be positive.'
+ )
return False
- else:
+ else: # noqa: RET505
# initialize outputs
init_flag = False
init_flag = self._opt_initialization()
if init_flag:
- print('OccurrenceModel_ManzourDavidson2016._input_check: initialization completed.')
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: initialization completed.'
+ )
return True
- else:
- print('OccurrenceModel_ManzourDavidson2016._input_check: initialization errors.')
+ else: # noqa: RET505
+ print( # noqa: T201
+ 'OccurrenceModel_ManzourDavidson2016._input_check: initialization errors.'
+ )
return False
def _opt_initialization(self):
- """
- _opt_initialization: intialization of optimization problem
- """
+ """_opt_initialization: initialization of optimization problem""" # noqa: D400
# the problem is mixed integer program
- self.prob = pulp.LpProblem("MIP", pulp.LpMinimize)
+ self.prob = pulp.LpProblem('MIP', pulp.LpMinimize)
# variables
self.e_plus = {}
@@ -626,100 +704,124 @@ def _opt_initialization(self):
self.e_minus_name = {}
for i in range(self.num_sites):
for j in range(self.num_return_periods):
- self.e_plus_name[i,j] = 'ep-{}-{}'.format(i,j)
- self.e_minus_name[i,j] = 'en-{}-{}'.format(i,j)
- self.e_plus[i,j] = pulp.LpVariable(self.e_plus_name[i,j], 0, None)
- self.e_minus[i,j] = pulp.LpVariable(self.e_minus_name[i,j], 0, None)
+ self.e_plus_name[i, j] = f'ep-{i}-{j}'
+ self.e_minus_name[i, j] = f'en-{i}-{j}'
+ self.e_plus[i, j] = pulp.LpVariable(self.e_plus_name[i, j], 0, None)
+ self.e_minus[i, j] = pulp.LpVariable(
+ self.e_minus_name[i, j], 0, None
+ )
self.P = {}
self.Z = {}
self.P_name = {}
self.Z_name = {}
for i in range(self.num_eqs):
- self.P_name[i] = 'p-{}'.format(i)
- self.Z_name[i] = 'z-{}'.format(i)
+ self.P_name[i] = f'p-{i}'
+ self.Z_name[i] = f'z-{i}'
if self.reweight_only:
- self.P[i] = pulp.LpVariable(self.P_name[i], self.occurence_rate_origin[i], 1)
+ self.P[i] = pulp.LpVariable(
+ self.P_name[i], self.occurence_rate_origin[i], 1
+ )
else:
self.P[i] = pulp.LpVariable(self.P_name[i], 0, 1)
self.Z[i] = pulp.LpVariable(self.Z_name[i], 0, 1, pulp.LpBinary)
# objective function
- comb_sites_rps = list(itertools.product(range(self.num_sites),range(self.num_return_periods)))
- self.prob += pulp.lpSum(self.return_periods[j]*self.e_plus[(i,j)]+self.return_periods[j]*self.e_minus[(i,j)] for (i,j) in comb_sites_rps)
+ comb_sites_rps = list(
+ itertools.product(range(self.num_sites), range(self.num_return_periods))
+ )
+ self.prob += pulp.lpSum(
+ self.return_periods[j] * self.e_plus[(i, j)]
+ + self.return_periods[j] * self.e_minus[(i, j)]
+ for (i, j) in comb_sites_rps
+ )
# constraints
for i in range(self.num_sites):
for j in range(self.num_return_periods):
- self.prob += pulp.lpSum(self.P[k]*self.im_exceedance_probs[i,k,j] for k in range(self.num_eqs))+self.e_minus[i,j]-self.e_plus[i,j] == 1.0/self.return_periods[j]
+ self.prob += (
+ pulp.lpSum(
+ self.P[k] * self.im_exceedance_probs[i, k, j]
+ for k in range(self.num_eqs)
+ )
+ + self.e_minus[i, j]
+ - self.e_plus[i, j]
+ == 1.0 / self.return_periods[j]
+ )
if not self.reweight_only:
for i in range(self.num_eqs):
- self.prob += self.P[i]-self.Z[i] <= 0
+ self.prob += self.P[i] - self.Z[i] <= 0
- self.prob += pulp.lpSum(self.Z[i] for i in range(self.num_eqs)) <= self.num_scenarios
+ self.prob += (
+ pulp.lpSum(self.Z[i] for i in range(self.num_eqs))
+ <= self.num_scenarios
+ )
return True
-
def solve_opt(self):
- """
- target_function: compute the target function to be minimized
+ """target_function: compute the target function to be minimized
:param X: 2-D array of annual occurrence probability of earthquakes and corresponding binary variables (many values are reduced to zeros)
- """
- maximum_runtime = 1*60*60 # 1 hours maximum
+ """ # noqa: D205, D400
+ maximum_runtime = 1 * 60 * 60 # 1 hours maximum
self.prob.solve(pulp.PULP_CBC_CMD(timeLimit=maximum_runtime, gapRel=0.001))
- print("Status:", pulp.LpStatus[self.prob.status])
+ print('Status:', pulp.LpStatus[self.prob.status]) # noqa: T201
- def get_selected_earthquake(self):
-
- P_selected = [self.P[i].varValue for i in range(self.num_eqs)]
+ def get_selected_earthquake(self): # noqa: D102
+ P_selected = [self.P[i].varValue for i in range(self.num_eqs)] # noqa: N806
if self.reweight_only:
- Z_selected = [1 for i in range(self.num_eqs)]
+ Z_selected = [1 for i in range(self.num_eqs)] # noqa: N806
else:
- Z_selected = [self.Z[i].varValue for i in range(self.num_eqs)]
+ Z_selected = [self.Z[i].varValue for i in range(self.num_eqs)] # noqa: N806
return P_selected, Z_selected
- def get_error_vector(self):
-
+ def get_error_vector(self): # noqa: D102
e_plus_selected = np.zeros([self.num_sites, self.num_return_periods])
e_minus_selected = np.zeros([self.num_sites, self.num_return_periods])
for i in range(self.num_sites):
for j in range(self.num_return_periods):
- e_plus_selected[i, j] = self.e_plus[i,j].varValue
- e_minus_selected[i, j] = self.e_minus[i,j].varValue
- error = ((e_plus_selected-e_minus_selected)**2).sum(axis = 1)/self.num_return_periods
- return error
-
- def export_sampled_gmms(self, id_selected_gmms, id_selected_scens, P, output_dir=None):
-
+ e_plus_selected[i, j] = self.e_plus[i, j].varValue
+ e_minus_selected[i, j] = self.e_minus[i, j].varValue
+ error = ((e_plus_selected - e_minus_selected) ** 2).sum(
+ axis=1
+ ) / self.num_return_periods
+ return error # noqa: RET504
+
+ def export_sampled_gmms( # noqa: D102
+ self,
+ id_selected_gmms,
+ id_selected_scens,
+ P, # noqa: N803
+ output_dir=None,
+ ):
dict_selected_gmms = {
'EarthquakeID': id_selected_scens.astype(int).tolist(),
'ProbabilityWeight': [P[x] for x in id_selected_gmms],
- 'MeanSquareError': self.get_error_vector().tolist()
+ 'MeanSquareError': self.get_error_vector().tolist(),
}
if output_dir is not None:
- with open(os.path.join(output_dir,'InfoSampledGM.json'), 'w') as f:
+ with open(os.path.join(output_dir, 'InfoSampledGM.json'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
json.dump(dict_selected_gmms, f, indent=2)
-class OccurrenceModel_Wangetal2023:
-
- def __init__(self,
- return_periods = [],
- im_exceedance_probs = [],
- num_scenarios = -1,
- reweight_only = False,
- occurence_rate_origin = None,
- hzo_config = None):
- """
- __init__: initialization a hazard occurrence optimizer
+class OccurrenceModel_Wangetal2023: # noqa: D101
+ def __init__(
+ self,
+ return_periods=[], # noqa: B006
+ im_exceedance_probs=[], # noqa: B006
+ num_scenarios=-1,
+ reweight_only=False, # noqa: FBT002
+ occurence_rate_origin=None,
+ hzo_config=None,
+ ):
+ """__init__: initialization a hazard occurrence optimizer
:param return_periods: 1-D array of return periods, RP(r)
:param earthquake_mafs: 1-D array of annual occurrence probability, MAF(j)
:param im_exceedance_probs: 3-D array of exceedance probability of Sa, EP(i,j,r) for site #i, earthquake #j, return period #r
:param num_scenarios: integer for number of target scenarios
- """
+ """ # noqa: D205, D400
# read input parameters
self.return_periods = return_periods
self.im_exceedance_probs = im_exceedance_probs
@@ -734,53 +836,70 @@ def __init__(self,
# check input parameters
self.input_valid = self._input_check()
if not self.input_valid:
- print('OccurrenceModel_Wangetal2023.__init__: at least one input parameter invalid.')
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023.__init__: at least one input parameter invalid.'
+ )
return
def _input_check(self):
- """
- _input_check: check of input parameters
- """
+ """_input_check: check of input parameters""" # noqa: D400
# number of return periods
if len(self.return_periods) > 0:
self.num_return_periods = len(self.return_periods)
- print('OccurrenceModel_Wangetal2023._input_check: number of return periods = {}.'.format(self.num_return_periods))
+ print( # noqa: T201
+ f'OccurrenceModel_Wangetal2023._input_check: number of return periods = {self.num_return_periods}.'
+ )
else:
- print('OccurrenceModel_Wangetal2023._input_check: no return period is defined.')
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: no return period is defined.'
+ )
return False
# shape of exceedance probability
- if len(self.im_exceedance_probs.shape) != 3:
- print('OccurrenceModel_Wangetal2023._input_check: exceedance probability array should be 3-D.')
+ if len(self.im_exceedance_probs.shape) != 3: # noqa: PLR2004
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: exceedance probability array should be 3-D.'
+ )
+ return False
+ elif self.im_exceedance_probs.shape[-1] != len(self.return_periods): # noqa: RET505
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: exceedance probability array should have dimensions of (#site, #eq, #return_period).'
+ )
return False
else:
- if self.im_exceedance_probs.shape[-1] != len(self.return_periods):
- print('OccurrenceModel_Wangetal2023._input_check: exceedance probability array should have dimensions of (#site, #eq, #return_period).')
- return False
- else:
- self.num_sites = self.im_exceedance_probs.shape[0]
- print('OccurrenceModel_Wangetal2023._input_check: number of sites = {}.'.format(self.num_sites))
+ self.num_sites = self.im_exceedance_probs.shape[0]
+ print( # noqa: T201
+ f'OccurrenceModel_Wangetal2023._input_check: number of sites = {self.num_sites}.'
+ )
# number of target scenarios
if self.num_scenarios <= 0:
- print('OccurrenceModel_Wangetal2023._input_check: number of target scenarios should be positive.')
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: number of target scenarios should be positive.'
+ )
return False
- else:
+ else: # noqa: RET505
# initialize outputs
init_flag = False
init_flag = self._opt_initialization()
if init_flag:
- print('OccurrenceModel_Wangetal2023._input_check: initialization completed.')
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: initialization completed.'
+ )
return True
- else:
- print('OccurrenceModel_Wangetal2023._input_check: initialization errors.')
+ else: # noqa: RET505
+ print( # noqa: T201
+ 'OccurrenceModel_Wangetal2023._input_check: initialization errors.'
+ )
return False
def _opt_initialization(self):
- """
- _opt_initialization: intialization of LASSO regression
- """
+ """_opt_initialization: initialization of LASSO regression""" # noqa: D400
# define X
- self.X_P = self.im_exceedance_probs.transpose(1,0,2).reshape(self.im_exceedance_probs.shape[1],-1).T
- self.y = 1/np.tile(self.return_periods,self.im_exceedance_probs.shape[0])
+ self.X_P = (
+ self.im_exceedance_probs.transpose(1, 0, 2)
+ .reshape(self.im_exceedance_probs.shape[1], -1)
+ .T
+ )
+ self.y = 1 / np.tile(self.return_periods, self.im_exceedance_probs.shape[0])
# define weights
self.W = np.diag(np.sqrt(1 / self.y))
@@ -788,7 +907,8 @@ def _opt_initialization(self):
# rate matrix for events
# self.occurence_rate_origin_mat = np.repeat(self.occurence_rate_origin, self.X_P.shape[0]).reshape(self.X_P.shape[0], -1)
self.occurence_rate_origin_mat = np.vstack(
- [np.array(self.occurence_rate_origin)]*self.X_P.shape[0])
+ [np.array(self.occurence_rate_origin)] * self.X_P.shape[0]
+ )
# hazard by each event
self.X = self.X_P * self.occurence_rate_origin_mat
@@ -799,50 +919,73 @@ def _opt_initialization(self):
return True
def solve_opt(self):
- """
- LASSO regression
- """
+ """LASSO regression""" # noqa: D400
if self.alpha_path:
- self.alphas,self.coefs,_ = lasso_path(X=self.X_weighted, y=self.y_weighted, alphas=self.alpha_path, positive=True)
- else:
- self.alphas,self.coefs,_ = lasso_path(X=self.X_weighted, y=self.y_weighted, eps=1e-4, n_alphas=1000, alphas=None, positive=True)
-
- # re-regression may be needed here !!!
+ self.alphas, self.coefs, _ = lasso_path(
+ X=self.X_weighted,
+ y=self.y_weighted,
+ alphas=self.alpha_path,
+ positive=True,
+ )
+ else:
+ self.alphas, self.coefs, _ = lasso_path(
+ X=self.X_weighted,
+ y=self.y_weighted,
+ eps=1e-4,
+ n_alphas=1000,
+ alphas=None,
+ positive=True,
+ )
- def get_selected_earthquake(self):
+ # re-regression may be needed here !!!
+ def get_selected_earthquake(self): # noqa: D102
# calculate the number of selected events for each step
- self.num_selected = [sum(x > 0 for x in self.coefs[:,i]) for i in range(self.coefs.shape[1])]
+ self.num_selected = [
+ sum(x > 0 for x in self.coefs[:, i]) for i in range(self.coefs.shape[1])
+ ]
# find the selection such that the number of selected events is closest to the user defined target number of scenarios
# the flip() is used to find the last one which has the closest number of selected events to the target value.
- self.selected_alpha_ind = self.num_selected.__len__() - 1 - np.abs(np.flip(self.num_selected) - self.num_scenarios).argmin()
+ self.selected_alpha_ind = (
+ self.num_selected.__len__() # noqa: PLC2801
+ - 1
+ - np.abs(np.flip(self.num_selected) - self.num_scenarios).argmin()
+ )
if self.num_selected[self.selected_alpha_ind] == 0:
- sys.exit("ERROR: Zero scenarios/ground motions are selected in Wang et al. (2023).\n"+
- f"The tunnling parameter used is {self.alphas[self.selected_alpha_ind]}.\n"+
- "Try using a smaller tunning parameter.")
- self.Rate_selected = self.coefs[:,self.selected_alpha_ind] * self.occurence_rate_origin
- self.Z_selected = self.coefs[:,self.selected_alpha_ind]>0
+ sys.exit(
+ 'ERROR: Zero scenarios/ground motions are selected in Wang et al. (2023).\n' # noqa: ISC003
+ + f'The tunnling parameter used is {self.alphas[self.selected_alpha_ind]}.\n'
+ + 'Try using a smaller tuning parameter.'
+ )
+ self.Rate_selected = (
+ self.coefs[:, self.selected_alpha_ind] * self.occurence_rate_origin
+ )
+ self.Z_selected = self.coefs[:, self.selected_alpha_ind] > 0
return self.Rate_selected, self.Z_selected
- def get_error_vector(self):
-
+ def get_error_vector(self): # noqa: D102
# self.e_selected = self.y - np.dot(self.X, self.coefs[:,self.selected_alpha_ind])
- error = self.y - self.X.sum(axis = 1)
+ error = self.y - self.X.sum(axis=1)
error = error.reshape(self.num_sites, self.num_return_periods)
- error = (error**2).sum(axis = 1)/self.num_return_periods
- return error
-
- def export_sampled_gmms(self, id_selected_gmms, id_selected_scens, P, output_dir=None):
-
+ error = (error**2).sum(axis=1) / self.num_return_periods
+ return error # noqa: RET504
+
+ def export_sampled_gmms( # noqa: D102
+ self,
+ id_selected_gmms,
+ id_selected_scens,
+ P, # noqa: N803
+ output_dir=None,
+ ):
dict_selected_gmms = {
'EarthquakeID': id_selected_scens.astype(int).tolist(),
'ProbabilityWeight': [P[x] for x in id_selected_gmms],
- 'LassoTuningParameter':self.alphas[self.selected_alpha_ind],
- 'MeanSquareError': self.get_error_vector().tolist()
+ 'LassoTuningParameter': self.alphas[self.selected_alpha_ind],
+ 'MeanSquareError': self.get_error_vector().tolist(),
}
if output_dir is not None:
- with open(os.path.join(output_dir,'InfoSampledGM.json'), 'w') as f:
- json.dump(dict_selected_gmms, f, indent=2)
\ No newline at end of file
+ with open(os.path.join(output_dir, 'InfoSampledGM.json'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
+ json.dump(dict_selected_gmms, f, indent=2)
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulation.py b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulation.py
index a7438e39a..607167ca6 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulation.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,35 +37,38 @@
# Kuanshi Zhong
#
-import os, shutil, psutil
+import argparse
+import importlib
+import json
+import os
+import shutil
+import subprocess # noqa: S404
import sys
-import subprocess
-import argparse, posixpath, json
-import numpy as np
-import pandas as pd
import time
-import importlib
+
+import numpy as np
+import psutil
R2D = True
-def site_job(hazard_info):
+def site_job(hazard_info): # noqa: C901, D103
# Sites and stations
- print('HazardSimulation: creating stations.')
+ print('HazardSimulation: creating stations.') # noqa: T201
site_info = hazard_info['Site']
if site_info['Type'] == 'From_CSV':
- input_file = os.path.join(input_dir,site_info['input_file'])
- output_file = site_info.get('output_file',False)
+ input_file = os.path.join(input_dir, site_info['input_file']) # noqa: PTH118
+ output_file = site_info.get('output_file', False)
if output_file:
- output_file = os.path.join(output_dir, output_file)
- min_ID = site_info['min_ID']
- max_ID = site_info['max_ID']
+ output_file = os.path.join(output_dir, output_file) # noqa: PTH118
+ min_ID = site_info['min_ID'] # noqa: N806
+ max_ID = site_info['max_ID'] # noqa: N806
# forward compatibility
if minID:
- min_ID = minID
+ min_ID = minID # noqa: N806
site_info['min_ID'] = minID
if maxID:
- max_ID = maxID
+ max_ID = maxID # noqa: N806
site_info['max_ID'] = maxID
# Creating stations from the csv input file
z1_tag = 0
@@ -81,11 +83,11 @@ def site_job(hazard_info):
else:
vs30_tag = 0
# Bedrock depth
- zTR_tag = 0
+ zTR_tag = 0 # noqa: N806
if 'SoilGrid250' in site_info['BedrockDepth']['Type']:
- zTR_tag = 0
+ zTR_tag = 0 # noqa: N806
elif 'National Crustal Model' in site_info['BedrockDepth']['Type']:
- zTR_tag = 1
+ zTR_tag = 1 # noqa: N806
# soil model if any
if site_info.get('SoilModel', None) is not None:
soil_model_type = site_info['SoilModel'].get('Type', 'EI')
@@ -96,33 +98,45 @@ def site_job(hazard_info):
if soil_model_type == 'User':
soil_user_fun = site_info['SoilModel'].get('Parameters', None)
if soil_user_fun is not None:
- soil_user_fun = os.path.join(input_dir, soil_user_fun)
+ soil_user_fun = os.path.join(input_dir, soil_user_fun) # noqa: PTH118
# Creating stations from the csv input file
- stations = create_stations(input_file, output_file, min_ID, max_ID, vs30_tag, z1_tag, z25_tag, zTR_tag=zTR_tag,
- soil_flag=True, soil_model_type=soil_model_type, soil_user_fun=soil_user_fun)
+ stations = create_stations( # noqa: F405
+ input_file,
+ output_file,
+ min_ID,
+ max_ID,
+ vs30_tag,
+ z1_tag,
+ z25_tag,
+ zTR_tag=zTR_tag,
+ soil_flag=True,
+ soil_model_type=soil_model_type,
+ soil_user_fun=soil_user_fun,
+ )
if stations:
- print('HazardSimulation: site data are fetched and saved in {}.'.format(output_file))
+ print(f'HazardSimulation: site data are fetched and saved in {output_file}.') # noqa: T201
else:
- print('HazardSimulation: please check the "Input" directory in the configuration json file.')
- exit()
-
+ print( # noqa: T201
+ 'HazardSimulation: please check the "Input" directory in the configuration json file.'
+ )
+ exit() # noqa: PLR1722
-def hazard_job(hazard_info):
+def hazard_job(hazard_info): # noqa: C901, D103, PLR0914, PLR0915
# Sites and stations
- print('HazardSimulation: creating stations.')
+ print('HazardSimulation: creating stations.') # noqa: T201
site_info = hazard_info['Site']
if site_info['Type'] == 'From_CSV':
- input_file = os.path.join(input_dir,site_info['input_file'])
- output_file = site_info.get('output_file',False)
+ input_file = os.path.join(input_dir, site_info['input_file']) # noqa: PTH118
+ output_file = site_info.get('output_file', False)
if output_file:
- output_file = os.path.join(input_dir, output_file)
- min_ID = site_info.get('min_ID',None)
- max_ID = site_info.get('max_ID',None)
- filterIDs = site_info.get('filterIDs',None)
- # backward compability. Deleter after new frontend releases
+ output_file = os.path.join(input_dir, output_file) # noqa: PTH118
+ min_ID = site_info.get('min_ID', None) # noqa: N806
+ max_ID = site_info.get('max_ID', None) # noqa: N806
+ filterIDs = site_info.get('filterIDs', None) # noqa: N806
+ # backward compatibility. Deleter after new frontend releases
if min_ID is not None and max_ID is not None:
- filterIDs = str(min_ID)+"-"+str(max_ID)
+ filterIDs = str(min_ID) + '-' + str(max_ID) # noqa: N806
# Creating stations from the csv input file
z1_tag = 0
z25_tag = 0
@@ -138,226 +152,321 @@ def hazard_job(hazard_info):
else:
vs30_tag = 0
# Creating stations from the csv input file
- stations = create_stations(input_file, output_file, filterIDs, vs30_tag, z1_tag, z25_tag)
+ stations = create_stations( # noqa: F405
+ input_file, output_file, filterIDs, vs30_tag, z1_tag, z25_tag
+ )
if stations:
- print('HazardSimulation: stations created.')
+ print('HazardSimulation: stations created.') # noqa: T201
else:
- print('HazardSimulation: please check the "Input" directory in the configuration json file.')
- exit()
- #print(stations)
+ print( # noqa: T201
+ 'HazardSimulation: please check the "Input" directory in the configuration json file.'
+ )
+ exit() # noqa: PLR1722
+ # print(stations)
# Scenarios
- print('HazardSimulation: creating scenarios.')
+ print('HazardSimulation: creating scenarios.') # noqa: T201
scenario_info = hazard_info['Scenario']
if scenario_info['Type'] == 'Earthquake':
# KZ-10/31/2022: checking user-provided scenarios
- user_scenarios = scenario_info.get('EqRupture').get('UserScenarioFile', False)
+ user_scenarios = scenario_info.get('EqRupture').get(
+ 'UserScenarioFile', False
+ )
if user_scenarios:
- scenarios = load_earthquake_scenarios(scenario_info, stations, dir_info)
+ scenarios = load_earthquake_scenarios(scenario_info, stations, dir_info) # noqa: F405
# Creating earthquake scenarios
- elif scenario_info['EqRupture']['Type'] in ['PointSource', 'ERF']:
- scenarios = create_earthquake_scenarios(scenario_info, stations, dir_info)
+ elif scenario_info['EqRupture']['Type'] in ['PointSource', 'ERF']: # noqa: PLR6201
+ scenarios = create_earthquake_scenarios( # noqa: F405
+ scenario_info, stations, dir_info
+ )
elif scenario_info['Type'] == 'Wind':
# Creating wind scenarios
- scenarios = create_wind_scenarios(scenario_info, stations, input_dir)
+ scenarios = create_wind_scenarios(scenario_info, stations, input_dir) # noqa: F405
else:
- # TODO: extending this to other hazards
- print('HazardSimulation: currently only supports EQ and Wind simulations.')
- #print(scenarios)
- print('HazardSimulation: scenarios created.')
+ # TODO: extending this to other hazards # noqa: TD002
+ print('HazardSimulation: currently only supports EQ and Wind simulations.') # noqa: T201
+ # print(scenarios)
+ print('HazardSimulation: scenarios created.') # noqa: T201
# Computing intensity measures
- print('HazardSimulation: computing intensity measures.')
+ print('HazardSimulation: computing intensity measures.') # noqa: T201
if scenario_info['Type'] == 'Earthquake':
# Computing uncorrelated Sa
event_info = hazard_info['Event']
if opensha_flag:
- im_raw, im_info = compute_im(scenarios, stations['Stations'],
- event_info['GMPE'], event_info['IntensityMeasure'],
- scenario_info.get('EqRupture').get('HazardOccurrence',None), output_dir, mth_flag=False)
+ im_raw, im_info = compute_im( # noqa: F405
+ scenarios,
+ stations['Stations'],
+ event_info['GMPE'],
+ event_info['IntensityMeasure'],
+ scenario_info.get('EqRupture').get('HazardOccurrence', None),
+ output_dir,
+ mth_flag=False,
+ )
# update the im_info
event_info['IntensityMeasure'] = im_info
elif oq_flag:
# Preparing config ini for OpenQuake
- filePath_ini, oq_ver_loaded, event_info = openquake_config(site_info, scenario_info, event_info, dir_info)
+ filePath_ini, oq_ver_loaded, event_info = openquake_config( # noqa: N806, F405
+ site_info, scenario_info, event_info, dir_info
+ )
if not filePath_ini:
# Error in ini file
- sys.exit('HazardSimulation: errors in preparing the OpenQuake configuration file.')
- if scenario_info['EqRupture']['Type'] in ['OpenQuakeClassicalPSHA','OpenQuakeUserConfig', 'OpenQuakeClassicalPSHA-User']:
+ sys.exit(
+ 'HazardSimulation: errors in preparing the OpenQuake configuration file.'
+ )
+ if scenario_info['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'OpenQuakeClassicalPSHA',
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
# Calling openquake to run classical PSHA
- #oq_version = scenario_info['EqRupture'].get('OQVersion',default_oq_version)
- oq_run_flag = oq_run_classical_psha(filePath_ini, exports='csv', oq_version=oq_ver_loaded, dir_info=dir_info)
+ # oq_version = scenario_info['EqRupture'].get('OQVersion',default_oq_version)
+ oq_run_flag = oq_run_classical_psha( # noqa: F405
+ filePath_ini,
+ exports='csv',
+ oq_version=oq_ver_loaded,
+ dir_info=dir_info,
+ )
if oq_run_flag:
err_msg = 'HazardSimulation: OpenQuake Classical PSHA failed.'
if not new_db_sqlite3:
- err_msg = err_msg + ' Please see if there is leaked python threads in background still occupying {}.'.format(os.path.expanduser('~/oqdata/db.sqlite3'))
- print(err_msg)
+ err_msg = ( # noqa: PLR6104
+ err_msg
+ + ' Please see if there is leaked python threads in background still occupying {}.'.format(
+ os.path.expanduser('~/oqdata/db.sqlite3') # noqa: PTH111
+ )
+ )
+ print(err_msg) # noqa: T201
sys.exit(err_msg)
else:
- print('HazardSimulation: OpenQuake Classical PSHA completed.')
+ print('HazardSimulation: OpenQuake Classical PSHA completed.') # noqa: T201
if scenario_info['EqRupture'].get('UHS', False):
- ln_im_mr, mag_maf, im_list = oq_read_uhs_classical_psha(scenario_info, event_info, dir_info)
+ ln_im_mr, mag_maf, im_list = oq_read_uhs_classical_psha( # noqa: F405
+ scenario_info, event_info, dir_info
+ )
else:
ln_im_mr = []
mag_maf = []
im_list = []
- #stn_new = stations['Stations']
+ # stn_new = stations['Stations']
elif scenario_info['EqRupture']['Type'] == 'OpenQuakeScenario':
# Creating and conducting OpenQuake calculations
- oq_calc = OpenQuakeHazardCalc(filePath_ini, event_info, oq_ver_loaded, dir_info=dir_info)
+ oq_calc = OpenQuakeHazardCalc( # noqa: F405
+ filePath_ini, event_info, oq_ver_loaded, dir_info=dir_info
+ )
oq_calc.run_calc()
im_raw = [oq_calc.eval_calc()]
- #stn_new = stations['Stations']
- print('HazardSimulation: OpenQuake Scenario calculation completed.')
+ # stn_new = stations['Stations']
+ print('HazardSimulation: OpenQuake Scenario calculation completed.') # noqa: T201
+
+ else:
+ sys.exit(
+ 'HazardSimulation: OpenQuakeClassicalPSHA, OpenQuakeUserConfig and OpenQuakeScenario are supported.'
+ )
- else:
- sys.exit('HazardSimulation: OpenQuakeClassicalPSHA, OpenQuakeUserConfig and OpenQuakeScenario are supported.')
-
# KZ-08/23/22: adding method to do hazard occurrence model
- #im_type = 'SA'
- #period = 1.0
- #im_level = 0.2*np.ones((len(im_raw[0].get('GroundMotions')),1))
- occurrence_sampling = scenario_info.get('EqRupture').get('OccurrenceSampling',False)
+ # im_type = 'SA'
+ # period = 1.0
+ # im_level = 0.2*np.ones((len(im_raw[0].get('GroundMotions')),1))
+ occurrence_sampling = scenario_info.get('EqRupture').get(
+ 'OccurrenceSampling', False
+ )
if occurrence_sampling:
# read all configurations
occurrence_info = scenario_info.get('EqRupture').get('HazardOccurrence')
- reweight_only = occurrence_info.get('ReweightOnly',False)
+ reweight_only = occurrence_info.get('ReweightOnly', False)
# KZ-10/31/22: adding a flag for whether to re-sample ground motion maps or just monte-carlo
sampling_gmms = occurrence_info.get('SamplingGMMs', True)
- occ_dict = configure_hazard_occurrence(input_dir, output_dir, hzo_config=occurrence_info, site_config=stations['Stations'])
+ occ_dict = configure_hazard_occurrence( # noqa: F405
+ input_dir,
+ output_dir,
+ hzo_config=occurrence_info,
+ site_config=stations['Stations'],
+ )
model_type = occ_dict.get('Model')
num_target_eqs = occ_dict.get('NumTargetEQs')
num_target_gmms = occ_dict.get('NumTargetGMMs')
- num_per_eq_avg = int(np.ceil(num_target_gmms/num_target_eqs))
+ num_per_eq_avg = int(np.ceil(num_target_gmms / num_target_eqs))
return_periods = occ_dict.get('ReturnPeriods')
im_type = occ_dict.get('IntensityMeasure')
period = occ_dict.get('Period')
hc_curves = occ_dict.get('HazardCurves')
# get im exceedance probabilities
- im_exceedance_prob = get_im_exceedance_probility(im_raw, im_type, period, hc_curves)
+ im_exceedance_prob = get_im_exceedance_probility( # noqa: F405
+ im_raw, im_type, period, hc_curves
+ )
# sample the earthquake scenario occurrence
if reweight_only:
- occurrence_rate_origin = [scenarios[i].get('MeanAnnualRate') for i in range(len(scenarios))]
+ occurrence_rate_origin = [
+ scenarios[i].get('MeanAnnualRate') for i in range(len(scenarios))
+ ]
else:
occurrence_rate_origin = None
- occurrence_model = sample_earthquake_occurrence(model_type,num_target_eqs,return_periods,im_exceedance_prob,reweight_only,occurrence_rate_origin)
- #print(occurrence_model)
- P, Z = occurrence_model.get_selected_earthquake()
+ occurrence_model = sample_earthquake_occurrence( # noqa: F405
+ model_type,
+ num_target_eqs,
+ return_periods,
+ im_exceedance_prob,
+ reweight_only,
+ occurrence_rate_origin,
+ )
+ # print(occurrence_model)
+ P, Z = occurrence_model.get_selected_earthquake() # noqa: N806
# now update the im_raw with selected eqs with Z > 0
id_selected_eqs = []
for i in range(len(Z)):
if P[i] > 0:
- id_selected_eqs.append(i)
+ id_selected_eqs.append(i) # noqa: PERF401
im_raw_sampled = [im_raw[i] for i in id_selected_eqs]
im_raw = im_raw_sampled
- num_per_eq_avg = int(np.ceil(num_target_gmms/len(id_selected_eqs)))
+ num_per_eq_avg = int(np.ceil(num_target_gmms / len(id_selected_eqs)))
# export sampled earthquakes
- _ = export_sampled_earthquakes(id_selected_eqs, scenarios, P, output_dir)
-
+ _ = export_sampled_earthquakes(id_selected_eqs, scenarios, P, output_dir) # noqa: F405
+
# Updating station information
- #stations['Stations'] = stn_new
- print('HazardSimulation: uncorrelated response spectra computed.')
- #print(im_raw)
+ # stations['Stations'] = stn_new
+ print('HazardSimulation: uncorrelated response spectra computed.') # noqa: T201
+ # print(im_raw)
# KZ-08/23/22: adding method to do hazard occurrence model
if occurrence_sampling and sampling_gmms:
num_gm_per_site = num_per_eq_avg
else:
num_gm_per_site = event_info['NumberPerSite']
- print('num_gm_per_site = ',num_gm_per_site)
- if not scenario_info['EqRupture']['Type'] in ['OpenQuakeClassicalPSHA','OpenQuakeUserConfig','OpenQuakeClassicalPSHA-User']:
+ print('num_gm_per_site = ', num_gm_per_site) # noqa: T201
+ if scenario_info['EqRupture']['Type'] not in [ # noqa: PLR6201
+ 'OpenQuakeClassicalPSHA',
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
# Computing correlated IMs
- ln_im_mr, mag_maf, im_list = simulate_ground_motion(stations['Stations'], im_raw,
- num_gm_per_site,
- event_info['CorrelationModel'],
- event_info['IntensityMeasure'])
- print('HazardSimulation: correlated response spectra computed.')
+ ln_im_mr, mag_maf, im_list = simulate_ground_motion( # noqa: F405
+ stations['Stations'],
+ im_raw,
+ num_gm_per_site,
+ event_info['CorrelationModel'],
+ event_info['IntensityMeasure'],
+ )
+ print('HazardSimulation: correlated response spectra computed.') # noqa: T201
# KZ-08/23/22: adding method to do hazard occurrence model
if occurrence_sampling and sampling_gmms:
# get im exceedance probabilities for individual ground motions
- #print('im_list = ',im_list)
- im_exceedance_prob_gmm = get_im_exceedance_probability_gm(np.exp(ln_im_mr), im_list, im_type, period, hc_curves)
+ # print('im_list = ',im_list)
+ im_exceedance_prob_gmm = get_im_exceedance_probability_gm( # noqa: F405
+ np.exp(ln_im_mr), im_list, im_type, period, hc_curves
+ )
# sample the earthquake scenario occurrence
- occurrence_model_gmm = sample_earthquake_occurrence(model_type,num_target_gmms,return_periods,im_exceedance_prob_gmm)
- #print(occurrence_model)
- P_gmm, Z_gmm = occurrence_model_gmm.get_selected_earthquake()
+ occurrence_model_gmm = sample_earthquake_occurrence( # noqa: F405
+ model_type, num_target_gmms, return_periods, im_exceedance_prob_gmm
+ )
+ # print(occurrence_model)
+ P_gmm, Z_gmm = occurrence_model_gmm.get_selected_earthquake() # noqa: N806
# now update the im_raw with selected eqs with Z > 0
id_selected_gmms = []
for i in range(len(Z_gmm)):
if P_gmm[i] > 0:
- id_selected_gmms.append(i)
- id_selected_scens = [int(x/num_gm_per_site) for x in id_selected_gmms]
- id_selected_simus = [x%num_gm_per_site for x in id_selected_gmms]
+ id_selected_gmms.append(i) # noqa: PERF401
+ id_selected_scens = [int(x / num_gm_per_site) for x in id_selected_gmms]
+ id_selected_simus = [x % num_gm_per_site for x in id_selected_gmms]
# export sampled earthquakes
- _ = export_sampled_gmms(id_selected_gmms, id_selected_scens, P_gmm, output_dir)
+ _ = export_sampled_gmms( # noqa: F405
+ id_selected_gmms, id_selected_scens, P_gmm, output_dir
+ )
num_site = ln_im_mr[0].shape[0]
num_im = ln_im_mr[0].shape[1]
- sampled_im_gmms = np.zeros((num_site,num_im,len(id_selected_gmms)))
+ sampled_im_gmms = np.zeros((num_site, num_im, len(id_selected_gmms)))
count = 0
for i in range(len(id_selected_gmms)):
- sampled_im_gmms[:,:,count]=ln_im_mr[id_selected_scens[i]][:,:,id_selected_simus[i]].tolist()
- count = count+1
+ sampled_im_gmms[:, :, count] = ln_im_mr[id_selected_scens[i]][
+ :, :, id_selected_simus[i]
+ ].tolist()
+ count = count + 1 # noqa: PLR6104
ln_im_mr_sampled = [sampled_im_gmms]
ln_im_mr = ln_im_mr_sampled
- mag_maf = [[0,0,0,0]]
-
+ mag_maf = [[0, 0, 0, 0]]
+
if event_info['SaveIM'] and ln_im_mr:
- print('HazardSimulation: saving simulated intensity measures.')
- _ = export_im(stations['Stations'], im_list,
- ln_im_mr, mag_maf, output_dir, 'SiteIM.json', 1)
- print('HazardSimulation: simulated intensity measures saved.')
+ print('HazardSimulation: saving simulated intensity measures.') # noqa: T201
+ _ = export_im( # noqa: F405
+ stations['Stations'],
+ im_list,
+ ln_im_mr,
+ mag_maf,
+ output_dir,
+ 'SiteIM.json',
+ 1,
+ )
+ print('HazardSimulation: simulated intensity measures saved.') # noqa: T201
else:
- print('HazardSimulation: IM is not required to saved or no IM is found.')
- #print(np.exp(ln_im_mr[0][0, :, 1]))
- #print(np.exp(ln_im_mr[0][1, :, 1]))
+ print('HazardSimulation: IM is not required to saved or no IM is found.') # noqa: T201
+ # print(np.exp(ln_im_mr[0][0, :, 1]))
+ # print(np.exp(ln_im_mr[0][1, :, 1]))
else:
- # TODO: extending this to other hazards
- print('HazardSimulation currently only supports earthquake simulations.')
- print('HazardSimulation: intensity measures computed.')
+ # TODO: extending this to other hazards # noqa: TD002
+ print('HazardSimulation currently only supports earthquake simulations.') # noqa: T201
+ print('HazardSimulation: intensity measures computed.') # noqa: T201
# Selecting ground motion records
if scenario_info['Type'] == 'Earthquake':
# Selecting records
- data_source = event_info.get('Database',0)
+ data_source = event_info.get('Database', 0)
if data_source:
- print('HazardSimulation: selecting ground motion records.')
+ print('HazardSimulation: selecting ground motion records.') # noqa: T201
sf_max = event_info['ScalingFactor']['Maximum']
sf_min = event_info['ScalingFactor']['Minimum']
start_time = time.time()
- gm_id, gm_file = select_ground_motion(im_list, ln_im_mr, data_source,
- sf_max, sf_min, output_dir, 'EventGrid.csv',
- stations['Stations'])
- print('HazardSimulation: ground motion records selected ({0} s).'.format(time.time() - start_time))
- #print(gm_id)
+ gm_id, gm_file = select_ground_motion( # noqa: F405
+ im_list,
+ ln_im_mr,
+ data_source,
+ sf_max,
+ sf_min,
+ output_dir,
+ 'EventGrid.csv',
+ stations['Stations'],
+ )
+ print( # noqa: T201
+ f'HazardSimulation: ground motion records selected ({time.time() - start_time} s).'
+ )
+ # print(gm_id)
gm_id = [int(i) for i in np.unique(gm_id)]
- gm_file = [i for i in np.unique(gm_file)]
- runtag = output_all_ground_motion_info(gm_id, gm_file, output_dir, 'RecordsList.csv')
+ gm_file = [i for i in np.unique(gm_file)] # noqa: C416
+ runtag = output_all_ground_motion_info( # noqa: F405
+ gm_id, gm_file, output_dir, 'RecordsList.csv'
+ )
if runtag:
- print('HazardSimulation: the ground motion list saved.')
+ print('HazardSimulation: the ground motion list saved.') # noqa: T201
else:
- sys.exit('HazardSimulation: warning - issues with saving the ground motion list.')
+ sys.exit(
+ 'HazardSimulation: warning - issues with saving the ground motion list.'
+ )
# Downloading records
user_name = event_info.get('UserName', None)
user_password = event_info.get('UserPassword', None)
if (user_name is not None) and (user_password is not None) and (not R2D):
- print('HazardSimulation: downloading ground motion records.')
- raw_dir = download_ground_motion(gm_id, user_name,
- user_password, output_dir)
+ print('HazardSimulation: downloading ground motion records.') # noqa: T201
+ raw_dir = download_ground_motion( # noqa: F405
+ gm_id, user_name, user_password, output_dir
+ )
if raw_dir:
- print('HazardSimulation: ground motion records downloaded.')
+ print('HazardSimulation: ground motion records downloaded.') # noqa: T201
# Parsing records
- print('HazardSimulation: parsing records.')
- record_dir = parse_record(gm_file, raw_dir, output_dir,
- event_info['Database'],
- event_info['OutputFormat'])
- print('HazardSimulation: records parsed.')
+ print('HazardSimulation: parsing records.') # noqa: T201
+ record_dir = parse_record( # noqa: F405, F841
+ gm_file,
+ raw_dir,
+ output_dir,
+ event_info['Database'],
+ event_info['OutputFormat'],
+ )
+ print('HazardSimulation: records parsed.') # noqa: T201
else:
- print('HazardSimulation: No records to be parsed.')
+ print('HazardSimulation: No records to be parsed.') # noqa: T201
else:
- print('HazardSimulation: ground motion selection is not requested.')
+ print('HazardSimulation: ground motion selection is not requested.') # noqa: T201
if __name__ == '__main__':
-
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--hazard_config')
@@ -369,7 +478,7 @@ def hazard_job(hazard_info):
args = parser.parse_args()
# read the hazard configuration file
- with open(args.hazard_config) as f:
+ with open(args.hazard_config) as f: # noqa: PLW1514, PTH123
hazard_info = json.load(f)
# directory (back compatibility here)
@@ -385,29 +494,32 @@ def hazard_job(hazard_info):
dir_info['Output'] = output_dir
dir_info['Work'] = output_dir
try:
- os.mkdir(f"{output_dir}")
- except:
- print('HazardSimulation: output folder already exists.')
+ os.mkdir(f'{output_dir}') # noqa: PTH102
+ except: # noqa: E722
+ print('HazardSimulation: output folder already exists.') # noqa: T201
# site filter (if explicitly defined)
- minID = None
- maxID = None
+ minID = None # noqa: N816
+ maxID = None # noqa: N816
if args.filter:
tmp = [int(x) for x in args.filter.split('-')]
if len(tmp) == 1:
- minID = tmp[0]
- maxID = minID
+ minID = tmp[0] # noqa: N816
+ maxID = minID # noqa: N816
else:
- [minID, maxID] = tmp
+ [minID, maxID] = tmp # noqa: N816
# parse job type for set up environment and constants
try:
- opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in ['PointSource', 'ERF']
- except:
+ opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'PointSource',
+ 'ERF',
+ ]
+ except: # noqa: E722
opensha_flag = False
try:
oq_flag = 'OpenQuake' in hazard_info['Scenario']['EqRupture']['Type']
- except:
+ except: # noqa: E722
oq_flag = False
# dependencies
@@ -417,54 +529,68 @@ def hazard_job(hazard_info):
packages = ['selenium', 'tqdm', 'psutil', 'PuLP', 'requests']
for p in packages:
if importlib.util.find_spec(p) is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", p])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', '-q', p]) # noqa: S603
# set up environment
import socket
+
if 'stampede2' not in socket.gethostname():
if importlib.util.find_spec('jpype') is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "JPype1"])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'JPype1']) # noqa: S603
import jpype
- from jpype import imports
- from jpype.types import *
- memory_total = psutil.virtual_memory().total/(1024.**3)
- memory_request = int(memory_total*0.75)
+ from jpype.types import * # noqa: F403
+
+ memory_total = psutil.virtual_memory().total / (1024.0**3)
+ memory_request = int(memory_total * 0.75)
jpype.addClassPath('./lib/OpenSHA-1.5.2.jar')
try:
- jpype.startJVM("-Xmx{}G".format(memory_request), convertStrings=False)
- except:
- print(f"StartJVM of ./lib/OpenSHA-1.5.2.jar with {memory_request} GB Memory fails. Try again after releasing some memory")
+ jpype.startJVM(f'-Xmx{memory_request}G', convertStrings=False)
+ except: # noqa: E722
+ print( # noqa: T201
+ f'StartJVM of ./lib/OpenSHA-1.5.2.jar with {memory_request} GB Memory fails. Try again after releasing some memory'
+ )
if oq_flag:
# clear up old db.sqlite3 if any
- if os.path.isfile(os.path.expanduser('~/oqdata/db.sqlite3')):
+ if os.path.isfile(os.path.expanduser('~/oqdata/db.sqlite3')): # noqa: PTH111, PTH113
new_db_sqlite3 = True
try:
- os.remove(os.path.expanduser('~/oqdata/db.sqlite3'))
- except:
+ os.remove(os.path.expanduser('~/oqdata/db.sqlite3')) # noqa: PTH107, PTH111
+ except: # noqa: E722
new_db_sqlite3 = False
# data dir
- os.environ['OQ_DATADIR'] = os.path.join(os.path.abspath(output_dir), 'oqdata')
- print('HazardSimulation: local OQ_DATADIR = '+os.environ.get('OQ_DATADIR'))
- if os.path.exists(os.environ.get('OQ_DATADIR')):
- print('HazardSimulation: local OQ folder already exists, overwiting it now...')
+ os.environ['OQ_DATADIR'] = os.path.join( # noqa: PTH118
+ os.path.abspath(output_dir), # noqa: PTH100
+ 'oqdata',
+ )
+ print('HazardSimulation: local OQ_DATADIR = ' + os.environ.get('OQ_DATADIR')) # noqa: T201
+ if os.path.exists(os.environ.get('OQ_DATADIR')): # noqa: PTH110
+ print( # noqa: T201
+ 'HazardSimulation: local OQ folder already exists, overwriting it now...'
+ )
shutil.rmtree(os.environ.get('OQ_DATADIR'))
- os.makedirs(f"{os.environ.get('OQ_DATADIR')}")
+ os.makedirs(f"{os.environ.get('OQ_DATADIR')}") # noqa: PTH103
# import modules
- from CreateStation import *
- from CreateScenario import *
- from ComputeIntensityMeasure import *
- from SelectGroundMotion import *
+ from ComputeIntensityMeasure import * # noqa: F403
+ from CreateScenario import * # noqa: F403
+ from CreateStation import * # noqa: F403
+
# KZ-08/23/22: adding hazard occurrence model
- from HazardOccurrence import *
+ from HazardOccurrence import * # noqa: F403
+ from SelectGroundMotion import * # noqa: F403
+
if oq_flag:
# import FetchOpenQuake
- from FetchOpenQuake import *
-
+ from FetchOpenQuake import * # noqa: F403
# Initial process list
import psutil
- proc_list_init = [p.info for p in psutil.process_iter(attrs=['pid', 'name']) if 'python' in p.info['name']]
+
+ proc_list_init = [
+ p.info
+ for p in psutil.process_iter(attrs=['pid', 'name'])
+ if 'python' in p.info['name']
+ ]
# run the job
if args.job_type == 'Hazard':
@@ -472,7 +598,7 @@ def hazard_job(hazard_info):
elif args.job_type == 'Site':
site_job(hazard_info)
else:
- print('HazardSimulation: --job_type = Hazard or Site (please check).')
+ print('HazardSimulation: --job_type = Hazard or Site (please check).') # noqa: T201
# Closing the current process
- sys.exit(0)
\ No newline at end of file
+ sys.exit(0)
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulationEQ.py b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulationEQ.py
index 76719b201..e1c3f4ab1 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulationEQ.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/HazardSimulationEQ.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,219 +37,316 @@
# Kuanshi Zhong
#
-import os, shutil, psutil
+import argparse
+import importlib
+import json
+import os
+import shutil
+import subprocess # noqa: S404
import sys
-import subprocess
-import argparse, posixpath, json
+import time
+
import numpy as np
import pandas as pd
-import time
-import importlib
+import psutil
R2D = True
-def hazard_job(hazard_info):
- from CreateScenario import load_ruptures_openquake
- from GMSimulators import simulate_ground_motion
+
+def hazard_job(hazard_info): # noqa: C901, D103, PLR0914, PLR0915
+ from CreateScenario import load_ruptures_openquake # noqa: PLC0415
+ from GMSimulators import simulate_ground_motion # noqa: PLC0415
+
try:
# oq_flag = hazard_info['Scenario']['EqRupture']['Type'] in ['oqSourceXML']
- oq_flag = 'OpenQuake' in hazard_info['Scenario']['EqRupture']['Type']
- except:
+ oq_flag = 'OpenQuake' in hazard_info['Scenario']['EqRupture']['Type']
+ except: # noqa: E722
oq_flag = False
# Read Site .csv
- site_file = hazard_info['Site']["siteFile"]
+ site_file = hazard_info['Site']['siteFile']
try:
stations = pd.read_csv(site_file).to_dict(orient='records')
- print('HazardSimulation: stations loaded.')
- except:
- print('HazardSimulation: please check the station file {}'.format(site_file))
- exit()
- #print(stations)
+ print('HazardSimulation: stations loaded.') # noqa: T201
+ except: # noqa: E722
+ print(f'HazardSimulation: please check the station file {site_file}') # noqa: T201
+ exit() # noqa: PLR1722
+ # print(stations)
# Scenarios
- print('HazardSimulation: loading scenarios.')
+ print('HazardSimulation: loading scenarios.') # noqa: T201
scenario_info = hazard_info['Scenario']
if scenario_info['Type'] == 'Earthquake':
# KZ-10/31/2022: checking user-provided scenarios
if scenario_info['EqRupture']['Type'] == 'oqSourceXML':
- #The rup file is not enough for oq erf, so the rupture needs to be recalculated
- rupFile = scenario_info['sourceFile']
- scenarios = load_ruptures_openquake(scenario_info, stations,
- work_dir, site_file, rupFile)
+ # The rup file is not enough for oq erf, so the rupture needs to be recalculated
+ rupFile = scenario_info['sourceFile'] # noqa: N806
+ scenarios = load_ruptures_openquake(
+ scenario_info, stations, work_dir, site_file, rupFile
+ )
else:
- rupFile = scenario_info['sourceFile']
- scenarios = load_earthquake_rupFile(scenario_info, rupFile)
+ rupFile = scenario_info['sourceFile'] # noqa: N806
+ scenarios = load_earthquake_rupFile(scenario_info, rupFile) # noqa: F405
else:
- # TODO: extending this to other hazards
- print('HazardSimulation: currently only supports EQ and Wind simulations.')
- #print(scenarios)
- print('HazardSimulation: scenarios loaded.')
- selected_scen_ids = sorted(list(scenarios.keys()))
+ # TODO: extending this to other hazards # noqa: TD002
+ print('HazardSimulation: currently only supports EQ and Wind simulations.') # noqa: T201
+ # print(scenarios)
+ print('HazardSimulation: scenarios loaded.') # noqa: T201
+ selected_scen_ids = sorted(list(scenarios.keys())) # noqa: C414
# Computing intensity measures
- print('HazardSimulation: computing intensity measures.')
+ print('HazardSimulation: computing intensity measures.') # noqa: T201
if scenario_info['Type'] == 'Earthquake':
# Computing uncorrelated Sa
event_info = hazard_info['Event']
# When vector IM is used. The PGA/SA needs to be computed before PGV
im_info = event_info['IntensityMeasure']
- if im_info['Type']=='Vector' and 'PGV' in im_info.keys():
- PGV_info = im_info.pop('PGV')
+ if im_info['Type'] == 'Vector' and 'PGV' in im_info.keys(): # noqa: SIM118
+ PGV_info = im_info.pop('PGV') # noqa: N806
im_info.update({'PGV': PGV_info})
event_info['IntensityMeasure'] = im_info
- if opensha_flag or hazard_info['Scenario']['EqRupture']['Type'] == 'oqSourceXML':
- im_raw_path, im_list = compute_im(scenarios, stations, scenario_info,
- event_info.get('GMPE',None), event_info['IntensityMeasure'],
- scenario_info['Generator'], output_dir, mth_flag=False)
+ if (
+ opensha_flag
+ or hazard_info['Scenario']['EqRupture']['Type'] == 'oqSourceXML'
+ ):
+ im_raw_path, im_list = compute_im( # noqa: F405
+ scenarios,
+ stations,
+ scenario_info,
+ event_info.get('GMPE', None),
+ event_info['IntensityMeasure'],
+ scenario_info['Generator'],
+ output_dir,
+ mth_flag=False,
+ )
# update the im_info
event_info['IntensityMeasure'] = im_info
elif oq_flag:
# Preparing config ini for OpenQuake
- filePath_ini, oq_ver_loaded, event_info = openquake_config(hazard_info['Site'], scenario_info, event_info, hazard_info['Directory'])
+ filePath_ini, oq_ver_loaded, event_info = openquake_config( # noqa: N806, F405
+ hazard_info['Site'],
+ scenario_info,
+ event_info,
+ hazard_info['Directory'],
+ )
if not filePath_ini:
# Error in ini file
- sys.exit('HazardSimulation: errors in preparing the OpenQuake configuration file.')
- if scenario_info['EqRupture']['Type'] in ['OpenQuakeClassicalPSHA','OpenQuakeUserConfig', 'OpenQuakeClassicalPSHA-User']:
+ sys.exit(
+ 'HazardSimulation: errors in preparing the OpenQuake configuration file.'
+ )
+ if scenario_info['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'OpenQuakeClassicalPSHA',
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
# Calling openquake to run classical PSHA
- #oq_version = scenario_info['EqRupture'].get('OQVersion',default_oq_version)
- oq_run_flag = oq_run_classical_psha(filePath_ini, exports='csv', oq_version=oq_ver_loaded, dir_info=dir_info)
+ # oq_version = scenario_info['EqRupture'].get('OQVersion',default_oq_version)
+ oq_run_flag = oq_run_classical_psha( # noqa: F405
+ filePath_ini,
+ exports='csv',
+ oq_version=oq_ver_loaded,
+ dir_info=dir_info, # noqa: F405
+ )
if oq_run_flag:
err_msg = 'HazardSimulation: OpenQuake Classical PSHA failed.'
if not new_db_sqlite3:
- err_msg = err_msg + ' Please see if there is leaked python threads in background still occupying {}.'.format(os.path.expanduser('~/oqdata/db.sqlite3'))
- print(err_msg)
+ err_msg = ( # noqa: PLR6104
+ err_msg
+ + ' Please see if there is leaked python threads in background still occupying {}.'.format(
+ os.path.expanduser('~/oqdata/db.sqlite3') # noqa: PTH111
+ )
+ )
+ print(err_msg) # noqa: T201
sys.exit(err_msg)
else:
- print('HazardSimulation: OpenQuake Classical PSHA completed.')
+ print('HazardSimulation: OpenQuake Classical PSHA completed.') # noqa: T201
if scenario_info['EqRupture'].get('UHS', False):
- ln_im_mr, mag_maf, im_list = oq_read_uhs_classical_psha(scenario_info, event_info, dir_info)
+ ln_im_mr, mag_maf, im_list = oq_read_uhs_classical_psha( # noqa: F405
+ scenario_info,
+ event_info,
+ dir_info, # noqa: F405
+ )
else:
ln_im_mr = []
mag_maf = []
im_list = []
- #stn_new = stations['Stations']
+ # stn_new = stations['Stations']
elif scenario_info['EqRupture']['Type'] == 'oqSourceXML':
# Creating and conducting OpenQuake calculations
- oq_calc = OpenQuakeHazardCalc(filePath_ini, event_info, oq_ver_loaded, dir_info=hazard_info['Directory'])
+ oq_calc = OpenQuakeHazardCalc( # noqa: F405
+ filePath_ini,
+ event_info,
+ oq_ver_loaded,
+ dir_info=hazard_info['Directory'],
+ )
oq_calc.run_calc()
- im_raw = [oq_calc.eval_calc()]
- #stn_new = stations['Stations']
- print('HazardSimulation: OpenQuake Scenario calculation completed.')
+ im_raw = [oq_calc.eval_calc()] # noqa: F841
+ # stn_new = stations['Stations']
+ print('HazardSimulation: OpenQuake Scenario calculation completed.') # noqa: T201
+
+ else:
+ sys.exit(
+ 'HazardSimulation: OpenQuakeClassicalPSHA, OpenQuakeUserConfig and OpenQuakeScenario are supported.'
+ )
- else:
- sys.exit('HazardSimulation: OpenQuakeClassicalPSHA, OpenQuakeUserConfig and OpenQuakeScenario are supported.')
-
# KZ-08/23/22: adding method to do hazard occurrence model
- #im_type = 'SA'
- #period = 1.0
- #im_level = 0.2*np.ones((len(im_raw[0].get('GroundMotions')),1))
+ # im_type = 'SA'
+ # period = 1.0
+ # im_level = 0.2*np.ones((len(im_raw[0].get('GroundMotions')),1))
hc_curves = None
- occurrence_sampling = scenario_info['Generator']["method"]=='Subsampling'
+ occurrence_sampling = scenario_info['Generator']['method'] == 'Subsampling'
if occurrence_sampling:
# read all configurations
occurrence_info = scenario_info['Generator']['Parameters']
- reweight_only = occurrence_info.get('ReweightOnly',False)
+ reweight_only = occurrence_info.get('ReweightOnly', False)
# KZ-10/31/22: adding a flag for whether to re-sample ground motion maps or just monte-carlo
sampling_gmms = occurrence_info.get('SamplingGMMs', True)
- occ_dict = configure_hazard_occurrence(input_dir, output_dir, im_raw_path, \
- im_list, scenarios, hzo_config=occurrence_info,site_config=stations)
+ occ_dict = configure_hazard_occurrence( # noqa: F405
+ input_dir,
+ output_dir,
+ im_raw_path,
+ im_list,
+ scenarios,
+ hzo_config=occurrence_info,
+ site_config=stations,
+ )
model_type = occ_dict.get('Model')
num_target_eqs = occ_dict.get('NumTargetEQs')
num_target_gmms = occ_dict.get('NumTargetGMMs')
- num_per_eq_avg = int(np.ceil(num_target_gmms/num_target_eqs))
+ num_per_eq_avg = int(np.ceil(num_target_gmms / num_target_eqs))
return_periods = occ_dict.get('ReturnPeriods')
im_type = occ_dict.get('IntensityMeasure')
period = occ_dict.get('Period')
hc_curves = occ_dict.get('HazardCurves')
# get im exceedance probabilities
- im_exceedance_prob = get_im_exceedance_probility(im_raw_path, im_list,
- im_type, period, hc_curves, selected_scen_ids)
+ im_exceedance_prob = get_im_exceedance_probility( # noqa: F405
+ im_raw_path, im_list, im_type, period, hc_curves, selected_scen_ids
+ )
# sample the earthquake scenario occurrence
# if reweight_only:
# occurrence_rate_origin = [scenarios[i].get('MeanAnnualRate') for i in range(len(scenarios))]
# else:
# occurrence_rate_origin = None
- occurrence_rate_origin = [scenarios[i].get('MeanAnnualRate') for i in selected_scen_ids]
- occurrence_model = sample_earthquake_occurrence(model_type,num_target_eqs,
- return_periods,im_exceedance_prob,reweight_only,occurrence_rate_origin,
- occurrence_info)
- #print(occurrence_model)
- P, Z = occurrence_model.get_selected_earthquake()
+ occurrence_rate_origin = [
+ scenarios[i].get('MeanAnnualRate') for i in selected_scen_ids
+ ]
+ occurrence_model = sample_earthquake_occurrence( # noqa: F405
+ model_type,
+ num_target_eqs,
+ return_periods,
+ im_exceedance_prob,
+ reweight_only,
+ occurrence_rate_origin,
+ occurrence_info,
+ )
+ # print(occurrence_model)
+ P, Z = occurrence_model.get_selected_earthquake() # noqa: N806
# now update the im_raw with selected eqs with Z > 0
id_selected_eqs = []
for i in range(len(Z)):
if P[i] > 0:
- id_selected_eqs.append(selected_scen_ids[i])
+ id_selected_eqs.append(selected_scen_ids[i]) # noqa: PERF401
selected_scen_ids = id_selected_eqs
- num_per_eq_avg = int(np.ceil(num_target_gmms/len(selected_scen_ids)))
+ num_per_eq_avg = int(np.ceil(num_target_gmms / len(selected_scen_ids)))
# compute error from optimization residual
error = occurrence_model.get_error_vector()
# export sampled earthquakes
- _ = export_sampled_earthquakes(error, selected_scen_ids, scenarios, P, output_dir)
-
+ _ = export_sampled_earthquakes( # noqa: F405
+ error, selected_scen_ids, scenarios, P, output_dir
+ )
+
# Updating station information
- #stations['Stations'] = stn_new
- print('HazardSimulation: uncorrelated response spectra computed.')
- #print(im_raw)
+ # stations['Stations'] = stn_new
+ print('HazardSimulation: uncorrelated response spectra computed.') # noqa: T201
+ # print(im_raw)
# KZ-08/23/22: adding method to do hazard occurrence model
if occurrence_sampling and sampling_gmms:
num_gm_per_site = num_per_eq_avg
else:
num_gm_per_site = event_info['NumberPerSite']
- print('num_gm_per_site = ',num_gm_per_site)
- if not scenario_info['EqRupture']['Type'] in ['OpenQuakeClassicalPSHA','OpenQuakeUserConfig','OpenQuakeClassicalPSHA-User']:
+ print('num_gm_per_site = ', num_gm_per_site) # noqa: T201
+ if scenario_info['EqRupture']['Type'] not in [ # noqa: PLR6201
+ 'OpenQuakeClassicalPSHA',
+ 'OpenQuakeUserConfig',
+ 'OpenQuakeClassicalPSHA-User',
+ ]:
# Computing correlated IMs
- ln_im_mr, mag_maf = simulate_ground_motion(stations, im_raw_path,
- im_list, scenarios,
- num_gm_per_site,
- event_info['CorrelationModel'],
- event_info['IntensityMeasure'],
- selected_scen_ids)
- print('HazardSimulation: correlated response spectra computed.')
+ ln_im_mr, mag_maf = simulate_ground_motion(
+ stations,
+ im_raw_path,
+ im_list,
+ scenarios,
+ num_gm_per_site,
+ event_info['CorrelationModel'],
+ event_info['IntensityMeasure'],
+ selected_scen_ids,
+ )
+ print('HazardSimulation: correlated response spectra computed.') # noqa: T201
# KZ-08/23/22: adding method to do hazard occurrence model
if occurrence_sampling and sampling_gmms:
# get im exceedance probabilities for individual ground motions
- #print('im_list = ',im_list)
- im_exceedance_prob_gmm, occur_rate_origin = get_im_exceedance_probability_gm(\
- np.exp(ln_im_mr), im_list, im_type, period, hc_curves,\
- np.array(mag_maf)[:,1])
+ # print('im_list = ',im_list)
+ im_exceedance_prob_gmm, occur_rate_origin = (
+ get_im_exceedance_probability_gm( # noqa: F405
+ np.exp(ln_im_mr),
+ im_list,
+ im_type,
+ period,
+ hc_curves,
+ np.array(mag_maf)[:, 1],
+ )
+ )
# sample the earthquake scenario occurrence
# if reweight_only:
# occurrence_rate_origin = [scenarios[i].get('MeanAnnualRate') for i in range(len(scenarios))]
# else:
# occurrence_rate_origin = None
- occurrence_model_gmm = sample_earthquake_occurrence(model_type,\
- num_target_gmms,return_periods,im_exceedance_prob_gmm,\
- reweight_only, occur_rate_origin, occurrence_info)
- #print(occurrence_model)
- P_gmm, Z_gmm = occurrence_model_gmm.get_selected_earthquake()
+ occurrence_model_gmm = sample_earthquake_occurrence( # noqa: F405
+ model_type,
+ num_target_gmms,
+ return_periods,
+ im_exceedance_prob_gmm,
+ reweight_only,
+ occur_rate_origin,
+ occurrence_info,
+ )
+ # print(occurrence_model)
+ P_gmm, Z_gmm = occurrence_model_gmm.get_selected_earthquake() # noqa: N806
# now update the im_raw with selected eqs with Z > 0
id_selected_gmms = []
for i in range(len(Z_gmm)):
if P_gmm[i] > 0:
- id_selected_gmms.append(i)
- id_selected_scens = np.array([selected_scen_ids[int(x/num_gm_per_site)] for x in id_selected_gmms])
- id_selected_simus = np.array([x%num_gm_per_site for x in id_selected_gmms])
+ id_selected_gmms.append(i) # noqa: PERF401
+ id_selected_scens = np.array(
+ [
+ selected_scen_ids[int(x / num_gm_per_site)]
+ for x in id_selected_gmms
+ ]
+ )
+ id_selected_simus = np.array(
+ [x % num_gm_per_site for x in id_selected_gmms]
+ )
# export sampled earthquakes
- occurrence_model_gmm.export_sampled_gmms(id_selected_gmms, id_selected_scens, P_gmm, output_dir)
+ occurrence_model_gmm.export_sampled_gmms(
+ id_selected_gmms, id_selected_scens, P_gmm, output_dir
+ )
- selected_scen_ids_step2 = sorted(list(set(id_selected_scens)))
- sampled_ln_im_mr = [None]*len(selected_scen_ids_step2)
- sampled_mag_maf = [None]*len(selected_scen_ids_step2)
+ selected_scen_ids_step2 = sorted(list(set(id_selected_scens))) # noqa: C414
+ sampled_ln_im_mr = [None] * len(selected_scen_ids_step2)
+ sampled_mag_maf = [None] * len(selected_scen_ids_step2)
for i, selected_scen in enumerate(selected_scen_ids_step2):
scen_ind = selected_scen_ids.index(selected_scen)
- selected_simus_in_scen_i = sorted(list(set(
- id_selected_simus[id_selected_scens==selected_scen])))
- sampled_ln_im_mr[i] = ln_im_mr[scen_ind]\
- [:,:,selected_simus_in_scen_i]
+ selected_simus_in_scen_i = sorted( # noqa: C414
+ list(set(id_selected_simus[id_selected_scens == selected_scen]))
+ )
+ sampled_ln_im_mr[i] = ln_im_mr[scen_ind][
+ :, :, selected_simus_in_scen_i
+ ]
sampled_mag_maf[i] = mag_maf[scen_ind]
ln_im_mr = sampled_ln_im_mr
mag_maf = sampled_mag_maf
-
-
# if event_info['SaveIM'] and ln_im_mr:
# print('HazardSimulation: saving simulated intensity measures.')
# _ = export_im(stations, im_list,
@@ -258,89 +354,121 @@ def hazard_job(hazard_info):
# print('HazardSimulation: simulated intensity measures saved.')
# else:
# print('HazardSimulation: IM is not required to saved or no IM is found.')
- #print(np.exp(ln_im_mr[0][0, :, 1]))
- #print(np.exp(ln_im_mr[0][1, :, 1]))
+ # print(np.exp(ln_im_mr[0][0, :, 1]))
+ # print(np.exp(ln_im_mr[0][1, :, 1]))
else:
- # TODO: extending this to other hazards
- print('HazardSimulation currently only supports earthquake simulations.')
- print('HazardSimulation: intensity measures computed.')
+ # TODO: extending this to other hazards # noqa: TD002
+ print('HazardSimulation currently only supports earthquake simulations.') # noqa: T201
+ print('HazardSimulation: intensity measures computed.') # noqa: T201
# Selecting ground motion records
if scenario_info['Type'] == 'Earthquake':
# Selecting records
- data_source = event_info.get('Database',0)
+ data_source = event_info.get('Database', 0)
if data_source:
- print('HazardSimulation: selecting ground motion records.')
+ print('HazardSimulation: selecting ground motion records.') # noqa: T201
sf_max = event_info['ScalingFactor']['Maximum']
sf_min = event_info['ScalingFactor']['Minimum']
start_time = time.time()
- gm_id, gm_file = select_ground_motion(im_list, ln_im_mr, data_source,
- sf_max, sf_min, output_dir, 'EventGrid.csv',
- stations, selected_scen_ids)
- print('HazardSimulation: ground motion records selected ({0} s).'.format(time.time() - start_time))
- #print(gm_id)
+ gm_id, gm_file = select_ground_motion( # noqa: F405
+ im_list,
+ ln_im_mr,
+ data_source,
+ sf_max,
+ sf_min,
+ output_dir,
+ 'EventGrid.csv',
+ stations,
+ selected_scen_ids,
+ )
+ print( # noqa: T201
+ f'HazardSimulation: ground motion records selected ({time.time() - start_time} s).'
+ )
+ # print(gm_id)
gm_id = [int(i) for i in np.unique(gm_id)]
- gm_file = [i for i in np.unique(gm_file)]
- runtag = output_all_ground_motion_info(gm_id, gm_file, output_dir, 'RecordsList.csv')
+ gm_file = [i for i in np.unique(gm_file)] # noqa: C416
+ runtag = output_all_ground_motion_info( # noqa: F405
+ gm_id, gm_file, output_dir, 'RecordsList.csv'
+ )
if runtag:
- print('HazardSimulation: the ground motion list saved.')
+ print('HazardSimulation: the ground motion list saved.') # noqa: T201
else:
- sys.exit('HazardSimulation: warning - issues with saving the ground motion list.')
+ sys.exit(
+ 'HazardSimulation: warning - issues with saving the ground motion list.'
+ )
# Downloading records
user_name = event_info.get('UserName', None)
user_password = event_info.get('UserPassword', None)
if (user_name is not None) and (user_password is not None) and (not R2D):
- print('HazardSimulation: downloading ground motion records.')
- raw_dir = download_ground_motion(gm_id, user_name,
- user_password, output_dir)
+ print('HazardSimulation: downloading ground motion records.') # noqa: T201
+ raw_dir = download_ground_motion( # noqa: F405
+ gm_id, user_name, user_password, output_dir
+ )
if raw_dir:
- print('HazardSimulation: ground motion records downloaded.')
+ print('HazardSimulation: ground motion records downloaded.') # noqa: T201
# Parsing records
- print('HazardSimulation: parsing records.')
- record_dir = parse_record(gm_file, raw_dir, output_dir,
- event_info['Database'],
- event_info['OutputFormat'])
- print('HazardSimulation: records parsed.')
+ print('HazardSimulation: parsing records.') # noqa: T201
+ record_dir = parse_record( # noqa: F405, F841
+ gm_file,
+ raw_dir,
+ output_dir,
+ event_info['Database'],
+ event_info['OutputFormat'],
+ )
+ print('HazardSimulation: records parsed.') # noqa: T201
else:
- print('HazardSimulation: No records to be parsed.')
+ print('HazardSimulation: No records to be parsed.') # noqa: T201
else:
- print('HazardSimulation: ground motion selection is not requested.')
+ print('HazardSimulation: ground motion selection is not requested.') # noqa: T201
gf_im_list = []
- if "GroundFailure" in hazard_info['Event'].keys():
- ground_failure_info = hazard_info['Event']["GroundFailure"]
- if "Liquefaction" in ground_failure_info.keys():
- import liquefaction
+ if 'GroundFailure' in hazard_info['Event'].keys(): # noqa: SIM118
+ ground_failure_info = hazard_info['Event']['GroundFailure']
+ if 'Liquefaction' in ground_failure_info.keys(): # noqa: SIM118
+ import liquefaction # noqa: PLC0415
+
trigging_info = ground_failure_info['Liquefaction']['Triggering']
- trigging_model = getattr(liquefaction, trigging_info['Model'])(\
- trigging_info["Parameters"], stations)
- trigging_output_keys = ["liq_prob", "liq_susc"]
- additional_output_required_keys = liquefaction.find_additional_output_req(
- ground_failure_info['Liquefaction'], "Triggering"
+ trigging_model = getattr(liquefaction, trigging_info['Model'])(
+ trigging_info['Parameters'], stations
+ )
+ trigging_output_keys = ['liq_prob', 'liq_susc']
+ additional_output_required_keys = (
+ liquefaction.find_additional_output_req(
+ ground_failure_info['Liquefaction'], 'Triggering'
+ )
)
ln_im_mr, mag_maf, im_list, addtional_output = trigging_model.run(
- ln_im_mr, mag_maf, im_list,
- trigging_output_keys, additional_output_required_keys)
+ ln_im_mr,
+ mag_maf,
+ im_list,
+ trigging_output_keys,
+ additional_output_required_keys,
+ )
del trigging_model
gf_im_list += trigging_info['Output']
- if 'LateralSpreading' in ground_failure_info['Liquefaction'].keys():
- lat_spread_info = ground_failure_info['Liquefaction']['LateralSpreading']
+ if 'LateralSpreading' in ground_failure_info['Liquefaction'].keys(): # noqa: SIM118
+ lat_spread_info = ground_failure_info['Liquefaction'][
+ 'LateralSpreading'
+ ]
lat_spread_para = lat_spread_info['Parameters']
- if (lat_spread_info['Model'] == 'Hazus2020Lateral') and \
- addtional_output.get('dist_to_water', None) is not None:
- lat_spread_para.update("DistWater", addtional_output["dist_to_water"])
+ if (
+ lat_spread_info['Model'] == 'Hazus2020Lateral'
+ ) and addtional_output.get('dist_to_water', None) is not None:
+ lat_spread_para.update(
+ 'DistWater', addtional_output['dist_to_water']
+ )
lat_spread_model = getattr(liquefaction, lat_spread_info['Model'])(
stations, lat_spread_para
)
ln_im_mr, mag_maf, im_list = lat_spread_model.run(
- ln_im_mr, mag_maf, im_list
- )
+ ln_im_mr, mag_maf, im_list
+ )
gf_im_list += lat_spread_info['Output']
- if 'Settlement' in ground_failure_info['Liquefaction'].keys():
+ if 'Settlement' in ground_failure_info['Liquefaction'].keys(): # noqa: SIM118
settlement_info = ground_failure_info['Liquefaction']['Settlement']
settlement_model = getattr(liquefaction, settlement_info['Model'])()
ln_im_mr, mag_maf, im_list = settlement_model.run(
- ln_im_mr, mag_maf, im_list
- )
+ ln_im_mr, mag_maf, im_list
+ )
gf_im_list += settlement_info['Output']
if "Landslide" in ground_failure_info.keys():
import landslide
@@ -354,46 +482,56 @@ def hazard_job(hazard_info):
gf_im_list += lsld_info['Output']
-
-
if event_info['SaveIM'] and ln_im_mr:
- print('HazardSimulation: saving simulated intensity measures.')
- _ = export_im(stations, im_list, ln_im_mr, mag_maf, output_dir,\
- 'SiteIM.json', 1, gf_im_list, selected_scen_ids)
- print('HazardSimulation: simulated intensity measures saved.')
+ print('HazardSimulation: saving simulated intensity measures.') # noqa: T201
+ _ = export_im( # noqa: F405
+ stations,
+ im_list,
+ ln_im_mr,
+ mag_maf,
+ output_dir,
+ 'SiteIM.json',
+ 1,
+ gf_im_list,
+ selected_scen_ids,
+ )
+ print('HazardSimulation: simulated intensity measures saved.') # noqa: T201
else:
- print('HazardSimulation: IM is not required to saved or no IM is found.')
+ print('HazardSimulation: IM is not required to saved or no IM is found.') # noqa: T201
# If hazard downsampling algorithm is used. Save the errors.
-if __name__ == '__main__':
+if __name__ == '__main__':
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--hazard_config')
args = parser.parse_args()
# read the hazard configuration file
- with open(args.hazard_config) as f:
+ with open(args.hazard_config) as f: # noqa: PLW1514, PTH123
hazard_info = json.load(f)
# directory (back compatibility here)
work_dir = hazard_info['Directory']
- input_dir = os.path.join(work_dir, "Input")
- output_dir = os.path.join(work_dir, "Output")
+ input_dir = os.path.join(work_dir, 'Input') # noqa: PTH118
+ output_dir = os.path.join(work_dir, 'Output') # noqa: PTH118
try:
- os.mkdir(f"{output_dir}")
- except:
- print('HazardSimulation: output folder already exists.')
+ os.mkdir(f'{output_dir}') # noqa: PTH102
+ except: # noqa: E722
+ print('HazardSimulation: output folder already exists.') # noqa: T201
# parse job type for set up environment and constants
try:
- opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in ['PointSource', 'ERF']
- except:
+ opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'PointSource',
+ 'ERF',
+ ]
+ except: # noqa: E722
opensha_flag = False
try:
- oq_flag = hazard_info['Scenario']['EqRupture']['Type'] in ['oqSourceXML']
- except:
+ oq_flag = hazard_info['Scenario']['EqRupture']['Type'] == 'oqSourceXML'
+ except: # noqa: E722
oq_flag = False
# dependencies
@@ -403,49 +541,59 @@ def hazard_job(hazard_info):
packages = ['selenium', 'tqdm', 'psutil', 'PuLP', 'requests']
for p in packages:
if importlib.util.find_spec(p) is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", p])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', '-q', p]) # noqa: S603
# set up environment
import socket
+
if 'stampede2' not in socket.gethostname():
if importlib.util.find_spec('jpype') is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "JPype1"])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'JPype1']) # noqa: S603
import jpype
- from jpype import imports
- from jpype.types import *
- memory_total = psutil.virtual_memory().total/(1024.**3)
- memory_request = int(memory_total*0.75)
+ from jpype.types import * # noqa: F403
+
+ memory_total = psutil.virtual_memory().total / (1024.0**3)
+ memory_request = int(memory_total * 0.75)
jpype.addClassPath('./lib/OpenSHA-1.5.2.jar')
try:
- jpype.startJVM("-Xmx{}G".format(memory_request), convertStrings=False)
- except:
- print(f"StartJVM of ./lib/OpenSHA-1.5.2.jar with {memory_request} GB Memory fails. Try again after releasing some memory")
+ jpype.startJVM(f'-Xmx{memory_request}G', convertStrings=False)
+ except: # noqa: E722
+ print( # noqa: T201
+ f'StartJVM of ./lib/OpenSHA-1.5.2.jar with {memory_request} GB Memory fails. Try again after releasing some memory'
+ )
if oq_flag:
# clear up old db.sqlite3 if any
- if os.path.isfile(os.path.expanduser('~/oqdata/db.sqlite3')):
+ if os.path.isfile(os.path.expanduser('~/oqdata/db.sqlite3')): # noqa: PTH111, PTH113
new_db_sqlite3 = True
try:
- os.remove(os.path.expanduser('~/oqdata/db.sqlite3'))
- except:
+ os.remove(os.path.expanduser('~/oqdata/db.sqlite3')) # noqa: PTH107, PTH111
+ except: # noqa: E722
new_db_sqlite3 = False
# data dir
- os.environ['OQ_DATADIR'] = os.path.join(os.path.abspath(output_dir), 'oqdata')
- print('HazardSimulation: local OQ_DATADIR = '+os.environ.get('OQ_DATADIR'))
- if os.path.exists(os.environ.get('OQ_DATADIR')):
- print('HazardSimulation: local OQ folder already exists, overwiting it now...')
+ os.environ['OQ_DATADIR'] = os.path.join( # noqa: PTH118
+ os.path.abspath(output_dir), # noqa: PTH100
+ 'oqdata',
+ )
+ print('HazardSimulation: local OQ_DATADIR = ' + os.environ.get('OQ_DATADIR')) # noqa: T201
+ if os.path.exists(os.environ.get('OQ_DATADIR')): # noqa: PTH110
+ print( # noqa: T201
+ 'HazardSimulation: local OQ folder already exists, overwriting it now...'
+ )
shutil.rmtree(os.environ.get('OQ_DATADIR'))
- os.makedirs(f"{os.environ.get('OQ_DATADIR')}")
+ os.makedirs(f"{os.environ.get('OQ_DATADIR')}") # noqa: PTH103
# import modules
- from CreateStation import *
- from CreateScenario import *
- from ComputeIntensityMeasure import *
- from SelectGroundMotion import *
+ from ComputeIntensityMeasure import * # noqa: F403
+ from CreateScenario import * # noqa: F403
+ from CreateStation import * # noqa: F403
+
# KZ-08/23/22: adding hazard occurrence model
- from HazardOccurrence import *
+ from HazardOccurrence import * # noqa: F403
+ from SelectGroundMotion import * # noqa: F403
+
if oq_flag:
# import FetchOpenQuake
- from FetchOpenQuake import *
+ from FetchOpenQuake import * # noqa: F403
# untar site databases
# site_database = ['global_vs30_4km.tar.gz','global_zTR_4km.tar.gz','thompson_vs30_4km.tar.gz']
@@ -456,9 +604,14 @@ def hazard_job(hazard_info):
# Initial process list
import psutil
- proc_list_init = [p.info for p in psutil.process_iter(attrs=['pid', 'name']) if 'python' in p.info['name']]
+
+ proc_list_init = [
+ p.info
+ for p in psutil.process_iter(attrs=['pid', 'name'])
+ if 'python' in p.info['name']
+ ]
hazard_job(hazard_info)
# Closing the current process
- sys.exit(0)
\ No newline at end of file
+ sys.exit(0)
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/ScenarioForecast.py b/modules/performRegionalEventSimulation/regionalGroundMotion/ScenarioForecast.py
index a5a2d9faf..94fbca023 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/ScenarioForecast.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/ScenarioForecast.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,44 +37,49 @@
# Kuanshi Zhong
# Jinyan Zhao
-import os, shutil, psutil
-import sys
-import subprocess
-import argparse, posixpath, json
-import numpy as np
-import pandas as pd
-import time
+import argparse
import importlib
+import json
+import os
+import subprocess # noqa: S404
+import sys
import tarfile
-if __name__ == '__main__':
+import psutil
+if __name__ == '__main__':
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--hazard_config')
args = parser.parse_args()
# read the hazard configuration file
- with open(args.hazard_config) as f:
+ with open(args.hazard_config) as f: # noqa: PLW1514, PTH123
hazard_info = json.load(f)
# directory (back compatibility here)
work_dir = hazard_info['Directory']
- input_dir = os.path.join(work_dir, "Input")
- output_dir = os.path.join(work_dir, "Output")
+ input_dir = os.path.join(work_dir, 'Input') # noqa: PTH118
+ output_dir = os.path.join(work_dir, 'Output') # noqa: PTH118
try:
- os.mkdir(f"{output_dir}")
- except:
- print('HazardSimulation: output folder already exists.')
+ os.mkdir(f'{output_dir}') # noqa: PTH102
+ except: # noqa: E722
+ print('HazardSimulation: output folder already exists.') # noqa: T201
# parse job type for set up environment and constants
try:
- opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in ['PointSource', 'ERF']
- except:
+ opensha_flag = hazard_info['Scenario']['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'PointSource',
+ 'ERF',
+ ]
+ except: # noqa: E722
opensha_flag = False
try:
- oq_flag = 'OpenQuake' in hazard_info['Scenario']['EqRupture']['Type'] or 'oqSourceXML' in hazard_info['Scenario']['EqRupture']['Type']
- except:
+ oq_flag = (
+ 'OpenQuake' in hazard_info['Scenario']['EqRupture']['Type']
+ or 'oqSourceXML' in hazard_info['Scenario']['EqRupture']['Type']
+ )
+ except: # noqa: E722
oq_flag = False
# dependencies
@@ -83,26 +87,30 @@
for p in packages:
if importlib.util.find_spec(p) is None:
# print(f"""The Python package {p} is required but not found.
- # Please install it by running
+ # Please install it by running
# "{sys.executable} -m pip install -q {p}"
# in your terminal or command prompt""")
- subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", p])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', '-q', p]) # noqa: S603
# set up environment
import socket
+
if 'stampede2' not in socket.gethostname():
if importlib.util.find_spec('jpype') is None:
- subprocess.check_call([sys.executable, "-m", "pip", "install", "JPype1"])
+ subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'JPype1']) # noqa: S603
import jpype
- from jpype import imports
- from jpype.types import *
- memory_total = psutil.virtual_memory().total/(1024.**3)
- memory_request = int(memory_total*0.75)
+ from jpype.types import * # noqa: F403
+
+ memory_total = psutil.virtual_memory().total / (1024.0**3)
+ memory_request = int(memory_total * 0.75)
jpype.addClassPath('./lib/OpenSHA-1.5.2.jar')
- jpype.startJVM("-Xmx{}G".format(memory_request), convertStrings=False)
+ jpype.startJVM(f'-Xmx{memory_request}G', convertStrings=False)
+ from CreateScenario import (
+ create_earthquake_scenarios,
+ create_wind_scenarios,
+ load_earthquake_scenarios,
+ )
from CreateStation import create_stations
- from CreateScenario import load_earthquake_scenarios, create_earthquake_scenarios,\
- create_wind_scenarios
# if oq_flag:
# # clear up old db.sqlite3 if any
# if os.path.isfile(os.path.expanduser('~/oqdata/db.sqlite3')):
@@ -115,22 +123,26 @@
# os.environ['OQ_DATADIR'] = os.path.join(os.path.abspath(output_dir), 'oqdata')
# print('HazardSimulation: local OQ_DATADIR = '+os.environ.get('OQ_DATADIR'))
# if os.path.exists(os.environ.get('OQ_DATADIR')):
- # print('HazardSimulation: local OQ folder already exists, overwiting it now...')
+ # print('HazardSimulation: local OQ folder already exists, overwriting it now...')
# shutil.rmtree(os.environ.get('OQ_DATADIR'))
# os.makedirs(f"{os.environ.get('OQ_DATADIR')}")
if oq_flag:
# import FetchOpenQuake
- from FetchOpenQuake import *
+ from FetchOpenQuake import * # noqa: F403
# untar site databases
- site_database = ['global_vs30_4km.tar.gz','global_zTR_4km.tar.gz','thompson_vs30_4km.tar.gz']
- print('HazardSimulation: Extracting site databases.')
- cwd = os.path.dirname(os.path.realpath(__file__))
+ site_database = [
+ 'global_vs30_4km.tar.gz',
+ 'global_zTR_4km.tar.gz',
+ 'thompson_vs30_4km.tar.gz',
+ ]
+ print('HazardSimulation: Extracting site databases.') # noqa: T201
+ cwd = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120
for cur_database in site_database:
# subprocess.run(["tar","-xvzf",cwd+"/database/site/"+cur_database,"-C",cwd+"/database/site/"])
- tar = tarfile.open(cwd+"/database/site/"+cur_database, "r:gz")
- tar.extractall(cwd+"/database/site/")
+ tar = tarfile.open(cwd + '/database/site/' + cur_database, 'r:gz')
+ tar.extractall(cwd + '/database/site/') # noqa: S202
tar.close()
# # Initial process list
@@ -138,7 +150,7 @@
# proc_list_init = [p.info for p in psutil.process_iter(attrs=['pid', 'name']) if 'python' in p.info['name']]
# Sites and stations
- print('HazardSimulation: creating stations.')
+ print('HazardSimulation: creating stations.') # noqa: T201
site_info = hazard_info['Site']
z1_tag = 0
z25_tag = 0
@@ -146,47 +158,63 @@
z1_tag = 1
z25_tag = 1
if opensha_flag:
- z1_tag = 2 # interpolate from openSHA default database
- z25_tag = 2 # interpolate from openSHA default database
+ z1_tag = 2 # interpolate from openSHA default database
+ z25_tag = 2 # interpolate from openSHA default database
# openSHA database: https://github.com/opensha/opensha/blob/16aaf6892fe2a31b5e497270429b8d899098361a/src/main/java/org/opensha/commons/data/siteData/OrderedSiteDataProviderList.java
- site_info['Z1pt0'].update({'z1_tag':z1_tag})
- site_info['Z2pt5'].update({'z25_tag':z25_tag})
+ site_info['Z1pt0'].update({'z1_tag': z1_tag})
+ site_info['Z2pt5'].update({'z25_tag': z25_tag})
if site_info['Type'] == 'From_CSV':
- input_file = os.path.join(input_dir,site_info['input_file'])
- output_file = site_info.get('output_file',False)
+ input_file = os.path.join(input_dir, site_info['input_file']) # noqa: PTH118
+ output_file = site_info.get('output_file', False)
if output_file:
- output_file = os.path.join(input_dir, output_file)
- filter = site_info['filter']
+ output_file = os.path.join(input_dir, output_file) # noqa: PTH118
+ filter = site_info['filter'] # noqa: A001
# Creating stations from the csv input file
- stations = create_stations(input_file, output_file, filter,
- site_info['Vs30'], site_info['Z1pt0'], site_info['Z2pt5'])
+ stations = create_stations(
+ input_file,
+ output_file,
+ filter,
+ site_info['Vs30'],
+ site_info['Z1pt0'],
+ site_info['Z2pt5'],
+ )
else:
- print("""Only From_CSV site_info['Type'] is supported now""")
+ print("""Only From_CSV site_info['Type'] is supported now""") # noqa: T201
if stations:
- print('ScenarioForecast: stations created.')
+ print('ScenarioForecast: stations created.') # noqa: T201
else:
- print('HazardSimulation: please check the "Input" directory in the configuration json file.')
- exit()
-
+ print( # noqa: T201
+ 'HazardSimulation: please check the "Input" directory in the configuration json file.'
+ )
+ exit() # noqa: PLR1722
+
# Scenarios
- print('HazardSimulation: creating scenarios.')
+ print('HazardSimulation: creating scenarios.') # noqa: T201
scenario_info = hazard_info['Scenario']
if scenario_info['Type'] == 'Earthquake':
# KZ-10/31/2022: checking user-provided scenarios
- user_scenarios = scenario_info.get('EqRupture').get('UserScenarioFile', False)
+ user_scenarios = scenario_info.get('EqRupture').get(
+ 'UserScenarioFile', False
+ )
if user_scenarios:
load_earthquake_scenarios(scenario_info, stations, input_dir)
# Creating earthquake scenarios
- elif scenario_info['EqRupture']['Type'] in ['PointSource', 'ERF', 'oqSourceXML']:
- create_earthquake_scenarios(scenario_info, stations, work_dir, hazard_info['Site']['output_file'])
+ elif scenario_info['EqRupture']['Type'] in [ # noqa: PLR6201
+ 'PointSource',
+ 'ERF',
+ 'oqSourceXML',
+ ]:
+ create_earthquake_scenarios(
+ scenario_info, stations, work_dir, hazard_info['Site']['output_file']
+ )
elif scenario_info['Type'] == 'Wind':
# Creating wind scenarios
create_wind_scenarios(scenario_info, stations, input_dir)
else:
- # TODO: extending this to other hazards
- print('HazardSimulation: currently only supports EQ and Wind simulations.')
- #print(scenarios)
- print('HazardSimulation: scenarios created.')
+ # TODO: extending this to other hazards # noqa: TD002
+ print('HazardSimulation: currently only supports EQ and Wind simulations.') # noqa: T201
+ # print(scenarios)
+ print('HazardSimulation: scenarios created.') # noqa: T201
# Closing the current process
- sys.exit(0)
\ No newline at end of file
+ sys.exit(0)
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/SelectGroundMotion.py b/modules/performRegionalEventSimulation/regionalGroundMotion/SelectGroundMotion.py
index 5180e788d..c7f5f79a6 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/SelectGroundMotion.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/SelectGroundMotion.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,35 +38,34 @@
#
import os
-import subprocess
-import time
-import glob
-import re
-import shutil
import sys
from pathlib import Path
+
R2D = True
if not R2D:
- from selenium import webdriver
-import json
-import random
-import numpy as np
-import pandas as pd
-import zipfile
-import csv
-import copy
+ pass
+import copy # noqa: E402
+import csv # noqa: E402
+import numpy as np # noqa: E402
+import pandas as pd # noqa: E402
-class GM_Selector:
-
- def __init__(self, gmdb_im_df=dict(), num_records=1, sf_min=None, sf_max=None, target_im=None):
+class GM_Selector: # noqa: D101
+ def __init__(
+ self,
+ gmdb_im_df=dict(), # noqa: B006, C408
+ num_records=1,
+ sf_min=None,
+ sf_max=None,
+ target_im=None,
+ ):
self.set_gmdb_im_df(gmdb_im_df)
self.set_num_records(num_records)
self.set_sf_range(sf_min, sf_max)
self.set_target_im(target_im)
- def set_gmdb_im_df(self, gmdb_im_df):
+ def set_gmdb_im_df(self, gmdb_im_df): # noqa: D102
self.gmdb_im_df = gmdb_im_df
self.num_gm = len(gmdb_im_df['RSN'])
tmp_list = list(gmdb_im_df.keys())
@@ -81,10 +79,10 @@ def set_gmdb_im_df(self, gmdb_im_df):
tmp_scalable.append(1)
self.scalable = tmp_scalable
- def set_num_records(self, num_records):
+ def set_num_records(self, num_records): # noqa: D102
self.num_records = num_records
- def set_sf_range(self, sf_min, sf_max):
+ def set_sf_range(self, sf_min, sf_max): # noqa: D102
if sf_min is None:
self.sf_min = 0.0001
else:
@@ -95,19 +93,20 @@ def set_sf_range(self, sf_min, sf_max):
self.sf_max = sf_max
self.sf_range = np.linspace(self.sf_min, self.sf_max, 100)
- def set_target_im(self, target_im):
+ def set_target_im(self, target_im): # noqa: D102
self.target_im = [target_im for k in range(self.num_gm)]
- def select_records(self):
-
- im_table = self.gmdb_im_df.iloc[:,1:]
+ def select_records(self): # noqa: D102
+ im_table = self.gmdb_im_df.iloc[:, 1:]
min_err = 1000000.0
for s in self.sf_range:
cur_im_table = copy.copy(im_table)
for i in range(cur_im_table.shape[1]):
if self.scalable[i]:
- cur_im_table.iloc[:,i] = cur_im_table.iloc[:,i]*s
- err = np.linalg.norm(np.exp(self.target_im) - cur_im_table.to_numpy(), axis = 1)
+ cur_im_table.iloc[:, i] = cur_im_table.iloc[:, i] * s # noqa: PLR6104
+ err = np.linalg.norm(
+ np.exp(self.target_im) - cur_im_table.to_numpy(), axis=1
+ )
if np.min(err) < min_err:
min_err = np.min(err)
tmp_tag = err.argmin()
@@ -115,142 +114,185 @@ def select_records(self):
self.loc_tag = tmp_tag
self.min_err = min_err
- self.rsn_tag = self.gmdb_im_df['RSN'].values.tolist()[tmp_tag]
+ self.rsn_tag = self.gmdb_im_df['RSN'].values.tolist()[tmp_tag] # noqa: PD011
self.sf = sf
-def select_ground_motion(im_list, target_ln_im, gmdb_file, sf_max, sf_min,
- output_dir, output_file, stations, eq_ids):
-
+def select_ground_motion( # noqa: C901, D103
+ im_list,
+ target_ln_im,
+ gmdb_file,
+ sf_max,
+ sf_min,
+ output_dir,
+ output_file,
+ stations,
+ eq_ids,
+):
# Loading gmdb
if gmdb_file == 'PEER NGA West 2':
- cwd = os.path.dirname(os.path.realpath(__file__))
- gmdb = pd.read_csv(cwd+'/database/gmdb/NGAWest2.csv', header = 0, index_col = None, low_memory=False)
+ cwd = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120
+ gmdb = pd.read_csv(
+ cwd + '/database/gmdb/NGAWest2.csv',
+ header=0,
+ index_col=None,
+ low_memory=False,
+ )
# Parsing spectral data
num_gm = len(gmdb['RecId'])
tmp = gmdb.keys()[37:147]
- T_db = [float(a.replace('T','').replace('S','')) for a in tmp]
+ T_db = [float(a.replace('T', '').replace('S', '')) for a in tmp] # noqa: N806
psa_db = gmdb.iloc[:, 37:147]
- pga = gmdb.iloc[:, 34]
- pgv = gmdb.iloc[:, 35]
- pgd = gmdb.iloc[:, 36]
+ pga = gmdb.iloc[:, 34] # noqa: F841
+ pgv = gmdb.iloc[:, 35] # noqa: F841
+ pgd = gmdb.iloc[:, 36] # noqa: F841
# Scaling factors
- sf_range = np.linspace(sf_min, sf_max, 100)
+ sf_range = np.linspace(sf_min, sf_max, 100) # noqa: F841
# Selected ground motion ID
gm_id = []
sf_data = []
filename = []
# get available key names
# Parese im_list
- target_period = []
- im_map = {"PGA": 34,
- "PGV": 35,
- "PGD": 36,
- "DS575H": 151,
- "DS595H": 152}
+ target_period = [] # noqa: F841
+ im_map = {'PGA': 34, 'PGV': 35, 'PGD': 36, 'DS575H': 151, 'DS595H': 152}
im_loc_tag = []
- gmdb_im_dict = dict()
- gmdb_im_dict.update({'RSN':gmdb['RecId'].values.tolist()})
+ gmdb_im_dict = dict() # noqa: C408
+ gmdb_im_dict.update({'RSN': gmdb['RecId'].values.tolist()}) # noqa: PD011
for cur_im in im_list:
if cur_im.startswith('SA'):
cur_period = float(cur_im[3:-1])
- gmdb_im_dict.update({cur_im:[np.interp(cur_period, T_db, psa_db.iloc[k, :]) for k in range(num_gm)]})
+ gmdb_im_dict.update(
+ {
+ cur_im: [
+ np.interp(cur_period, T_db, psa_db.iloc[k, :])
+ for k in range(num_gm)
+ ]
+ }
+ )
else:
- im_loc_tag.append(im_map.get(cur_im, None))
- gmdb_im_dict.update({cur_im:[x[0] for x in gmdb.iloc[:, im_loc_tag].values.tolist()]})
+ im_loc_tag.append(im_map.get(cur_im))
+ gmdb_im_dict.update(
+ {
+ cur_im: [
+ x[0]
+ for x in gmdb.iloc[:, im_loc_tag].values.tolist() # noqa: PD011
+ ]
+ }
+ )
# ground motion database intensity measure data frame
gmdb_im_df = pd.DataFrame.from_dict(gmdb_im_dict)
count = 0
# Looping over all scenarios
for cur_target in target_ln_im:
tmp_scen = eq_ids[count] + 1
- count = count + 1
- print('-Scenario #'+str(tmp_scen))
- num_stations, num_periods, num_simu = cur_target.shape
+ count = count + 1 # noqa: PLR6104
+ print('-Scenario #' + str(tmp_scen)) # noqa: T201
+ num_stations, num_periods, num_simu = cur_target.shape # noqa: F841
tmp_id = np.zeros((num_stations, num_simu))
tmp_sf = np.zeros((num_stations, num_simu))
tmp_min_err = np.zeros((num_stations, num_simu))
tmp_filename = []
for i in range(num_simu):
- print('--Realization #'+str(i+1))
+ print('--Realization #' + str(i + 1)) # noqa: T201
for j in range(num_stations):
# create a ground motion selector
- gm_selector = GM_Selector(gmdb_im_df=gmdb_im_df, num_records=1, sf_min=sf_min, sf_max=sf_max, target_im=cur_target[j,:,i])
+ gm_selector = GM_Selector(
+ gmdb_im_df=gmdb_im_df,
+ num_records=1,
+ sf_min=sf_min,
+ sf_max=sf_max,
+ target_im=cur_target[j, :, i],
+ )
# select records
gm_selector.select_records()
# collect results
tmp_min_err[j, i] = gm_selector.min_err
tmp_id[j, i] = int(gmdb['RecId'][gm_selector.loc_tag])
tmp_sf[j, i] = gm_selector.sf
- tmp_filename.append('RSN'+str(int(tmp_id[j,i]))+'_'+gmdb['FileNameHorizontal1'][gm_selector.loc_tag].replace("\\","_").replace("/","_"))
- tmp_filename.append('RSN'+str(int(tmp_id[j,i]))+'_'+gmdb['FileNameHorizontal2'][gm_selector.loc_tag].replace("\\","_").replace("/","_"))
- #print('---Station #'+str(j+1))
+ tmp_filename.append( # noqa: FURB113
+ 'RSN'
+ + str(int(tmp_id[j, i]))
+ + '_'
+ + gmdb['FileNameHorizontal1'][gm_selector.loc_tag]
+ .replace('\\', '_')
+ .replace('/', '_')
+ )
+ tmp_filename.append(
+ 'RSN'
+ + str(int(tmp_id[j, i]))
+ + '_'
+ + gmdb['FileNameHorizontal2'][gm_selector.loc_tag]
+ .replace('\\', '_')
+ .replace('/', '_')
+ )
+ # print('---Station #'+str(j+1))
# Collecting results in one scenario
gm_id.append(tmp_id)
sf_data.append(tmp_sf)
filename.extend(tmp_filename)
- #print(tmp_min_err)
+ # print(tmp_min_err)
else:
sys.exit('SelectGroundMotion: currently only supporting NGAWest2.')
-
# output data
- station_name = ['site'+str(j)+'.csv' for j in range(len(stations))]
+ station_name = ['site' + str(j) + '.csv' for j in range(len(stations))]
lat = [stations[j]['lat'] for j in range(len(stations))]
lon = [stations[j]['lon'] for j in range(len(stations))]
# vs30 = [stations[j]['vs30'] for j in range(len(stations))]
# DepthToRock is not used in NGA-West2 GMPEs and is not saved
# zTR = [stations[j]['DepthToRock'] for j in range(len(stations))]
- df = pd.DataFrame({
- 'GP_file': station_name,
- 'Longitude': lon,
- 'Latitude': lat
- # 'Vs30': vs30,
- # DepthToRock is not used in NGA-West2 GMPEs and is not saved
- # 'DepthToRock': zTR
- })
- output_dir = os.path.join(os.path.dirname(Path(output_dir)),
- os.path.basename(Path(output_dir)))
- df.to_csv(os.path.join(output_dir, output_file), index = False)
+ df = pd.DataFrame( # noqa: PD901
+ {
+ 'GP_file': station_name,
+ 'Longitude': lon,
+ 'Latitude': lat,
+ # 'Vs30': vs30,
+ # DepthToRock is not used in NGA-West2 GMPEs and is not saved
+ # 'DepthToRock': zTR
+ }
+ )
+ output_dir = os.path.join( # noqa: PTH118
+ os.path.dirname(Path(output_dir)), # noqa: PTH120
+ os.path.basename(Path(output_dir)), # noqa: PTH119
+ )
+ df.to_csv(os.path.join(output_dir, output_file), index=False) # noqa: PTH118
for cur_scen in range(len(gm_id)):
if len(gm_id) > 1:
- cur_scen_folder = 'scenario'+str(eq_ids[cur_scen]+1)
+ cur_scen_folder = 'scenario' + str(eq_ids[cur_scen] + 1)
try:
- os.mkdir(os.path.join(output_dir, cur_scen_folder))
- except:
- print('SelectGroundMotion: scenario folder already exists.')
- cur_output_dir = os.path.join(output_dir, cur_scen_folder)
+ os.mkdir(os.path.join(output_dir, cur_scen_folder)) # noqa: PTH102, PTH118
+ except: # noqa: E722
+ print('SelectGroundMotion: scenario folder already exists.') # noqa: T201
+ cur_output_dir = os.path.join(output_dir, cur_scen_folder) # noqa: PTH118
else:
cur_output_dir = output_dir
for i, site_id in enumerate(station_name):
- gm_file = ['RSN'+str(int(j)) for j in gm_id[cur_scen][i]]
- factor = [j for j in sf_data[cur_scen][i]]
- df = pd.DataFrame({
- 'TH_file': gm_file,
- 'factor': factor
- })
- df.to_csv(os.path.join(cur_output_dir, site_id), index = False)
+ gm_file = ['RSN' + str(int(j)) for j in gm_id[cur_scen][i]]
+ factor = [j for j in sf_data[cur_scen][i]] # noqa: C416
+ df = pd.DataFrame({'TH_file': gm_file, 'factor': factor}) # noqa: PD901
+ df.to_csv(os.path.join(cur_output_dir, site_id), index=False) # noqa: PTH118
# return
return gm_id, filename
-def output_all_ground_motion_info(gm_id, gm_file, output_dir, filename):
-
+def output_all_ground_motion_info(gm_id, gm_file, output_dir, filename): # noqa: D103
# Writing all record names to a csv file
- print(gm_file)
+ print(gm_file) # noqa: T201
try:
- with open(os.path.join(output_dir, filename), 'w') as f:
+ with open(os.path.join(output_dir, filename), 'w') as f: # noqa: PLW1514, PTH118, PTH123
w = csv.writer(f)
if gm_file:
w.writerow(gm_file)
- with open(os.path.join(output_dir, 'RSN.csv'), 'w') as f:
+ with open(os.path.join(output_dir, 'RSN.csv'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
w = csv.writer(f)
if gm_id:
w.writerow(gm_id)
- return 1
- except:
+ return 1 # noqa: TRY300
+ except: # noqa: E722
return 0
+
""" Uncommenting below if use this tool alone to download records from PEER
def download_ground_motion(gm_id, user_name, user_password, output_dir, spectra_only=False):
@@ -265,7 +307,7 @@ def download_ground_motion(gm_id, user_name, user_password, output_dir, spectra_
chromedriver = os.path.dirname(__file__) + '/bin/chromedriver/chromedriver_mac'
os.chmod(chromedriver, 755)
else:
- print('Currently supoorting win32, linux, and mac.')
+ print('Currently supporting win32, linux, and mac.')
chromeOptions = webdriver.ChromeOptions()
output_dir = os.path.join(os.path.dirname(Path(output_dir)),
os.path.basename(Path(output_dir)))
@@ -336,7 +378,7 @@ def readNGAWest2record(ngaW2FilePath):
series.extend([float(value) for value in line.split()])
elif("NPTS=" in line):
# sampling rate
- dt = float(re.match(r"NPTS=.+, DT=\s+([0-9\.]+)\s+SEC", line).group(1))
+ dt = float(re.match(r"NPTS=.+, DT=\\s+([0-9\\.]+)\\s+SEC", line).group(1))
data_flag = True
# return
return series, dt
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/USGS_API.py b/modules/performRegionalEventSimulation/regionalGroundMotion/USGS_API.py
index 5b7d496e9..6afe4846c 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/USGS_API.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/USGS_API.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,227 +37,256 @@
# Kuanshi Zhong
#
-import os, requests, json
-import numpy as np
-
-class USGS_HazardCurve:
-
- def __init__(self,
- longitude=None,
- latitude=None,
- vs30=None,
- edition='E2014',
- imt='PGA',
- tag=None):
+import json
+import os
+import numpy as np
+import requests
+
+
+class USGS_HazardCurve: # noqa: D101
+ def __init__(
+ self,
+ longitude=None,
+ latitude=None,
+ vs30=None,
+ edition='E2014',
+ imt='PGA',
+ tag=None,
+ ):
if self._load_config():
- print('USGS_HazardCurve.__init__: configuration loaded.')
+ print('USGS_HazardCurve.__init__: configuration loaded.') # noqa: T201
else:
- print('USGS_HazardCurve.__init__: error in loading configuration file.')
+ print('USGS_HazardCurve.__init__: error in loading configuration file.') # noqa: T201
return
if self._check_edition(edition):
self.edition = self._check_edition(edition)
else:
- print('USGS_HazardCurve.__init__: edition {} is not supported by USGS.'.format(edition))
+ print( # noqa: T201
+ f'USGS_HazardCurve.__init__: edition {edition} is not supported by USGS.'
+ )
return
- query_region = self._get_region(longitude,latitude)
+ query_region = self._get_region(longitude, latitude)
if query_region is None:
- print('USGS_HazardCurve.__init__: site (lon, lat) = ({},{}) is not supported.'.format(longitude,latitude))
+ print( # noqa: T201
+ f'USGS_HazardCurve.__init__: site (lon, lat) = ({longitude},{latitude}) is not supported.'
+ )
return
- else:
+ else: # noqa: RET505
self.longitude = longitude
self.latitude = latitude
self.region = query_region
- print('USGS_HazardCurve.__init__: site (lon, lat) = ({},{}) is found in USGS region {}.'.format(longitude,latitude,self.region))
-
+ print( # noqa: T201
+ f'USGS_HazardCurve.__init__: site (lon, lat) = ({longitude},{latitude}) is found in USGS region {self.region}.'
+ )
+
if self._check_region(self.region):
- print('USGS_HazardCurve.__init__: region {} is set up.'.format(self.region))
+ print(f'USGS_HazardCurve.__init__: region {self.region} is set up.') # noqa: T201
else:
- print('USGS_HazardCurve.__init__: region {} is not supported by edition {}.'.format(self.region,self.edition))
+ print( # noqa: T201
+ f'USGS_HazardCurve.__init__: region {self.region} is not supported by edition {self.edition}.'
+ )
return
if self._check_vs30(vs30):
self.vs30 = self._check_vs30(vs30)
else:
- print('USGS_HazardCurve.__init__: vs30 {} is not supported by edition {} and reigon {}.'.format(vs30,self.edition,self.region))
+ print( # noqa: T201
+ f'USGS_HazardCurve.__init__: vs30 {vs30} is not supported by edition {self.edition} and region {self.region}.'
+ )
return
if self._check_imt(imt):
self.imt = imt
else:
- print('USGS_HazardCurve.__init__: imt {} is not supported.'.format(imt))
+ print(f'USGS_HazardCurve.__init__: imt {imt} is not supported.') # noqa: T201
return
-
+
self.tag = tag
# return
- print('USGS_HazardCurve.__init__: configuration done.')
+ print('USGS_HazardCurve.__init__: configuration done.') # noqa: T201
return
def _load_config(self):
-
- cur_path = os.path.dirname(os.path.abspath(__file__))
- config_file = os.path.join(cur_path,'lib','USGS_HazardCurveConfig.json')
+ cur_path = os.path.dirname(os.path.abspath(__file__)) # noqa: PTH100, PTH120
+ config_file = os.path.join(cur_path, 'lib', 'USGS_HazardCurveConfig.json') # noqa: PTH118
try:
- with open(config_file,'r') as f:
+ with open(config_file) as f: # noqa: PLW1514, PTH123
self.config = json.load(f)
- return True
- except:
+ return True # noqa: TRY300
+ except: # noqa: E722
self.config = {}
return False
- def _check_edition(self, edition, auto_correction=True):
-
+ def _check_edition(self, edition, auto_correction=True): # noqa: FBT002
# available editions
ed_list = self.config.get('parameters').get('edition').get('values')
self.avail_editions = [x.get('value') for x in ed_list]
- print('USGS_HazardCurve._check_edition: available editions: {}'.format(self.avail_editions))
+ print( # noqa: T201
+ f'USGS_HazardCurve._check_edition: available editions: {self.avail_editions}'
+ )
# check
if edition in self.avail_editions:
return edition
+ elif auto_correction: # noqa: RET505
+ edition = self.avail_editions[0]
+ return edition # noqa: RET504
else:
- if auto_correction:
- edition = self.avail_editions[0]
- return edition
- else:
- return False
+ return False
def _get_region(self, long, lat):
-
- self.all_regions = [x['value'] for x in self.config.get('parameters').get('region').get('values')]
- for i in range(len(self.config.get('parameters').get('region').get('values'))):
+ self.all_regions = [
+ x['value']
+ for x in self.config.get('parameters').get('region').get('values')
+ ]
+ for i in range(
+ len(self.config.get('parameters').get('region').get('values'))
+ ):
cur_region = self.config.get('parameters').get('region').get('values')[i]
- if long >= cur_region.get('minlongitude') and long <= cur_region.get('maxlongitude'):
- if lat >= cur_region.get('minlatitude') and lat <= cur_region.get('maxlatitude'):
+ if long >= cur_region.get('minlongitude') and long <= cur_region.get(
+ 'maxlongitude'
+ ):
+ if lat >= cur_region.get('minlatitude') and lat <= cur_region.get(
+ 'maxlatitude'
+ ):
return self.all_regions[i]
# return empty
return None
def _check_region(self, region):
-
# available regions
- self.avail_regions = self.config.get('parameters').get('edition').get('values')[self.avail_editions.index(self.edition)].get('supports').get('region')
-
+ self.avail_regions = (
+ self.config.get('parameters')
+ .get('edition')
+ .get('values')[self.avail_editions.index(self.edition)]
+ .get('supports')
+ .get('region')
+ )
+
# check
- if region in self.avail_regions:
+ if region in self.avail_regions: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
def _check_vs30(self, vs30):
-
# get edition supported vs30
- vs30_avail_ed = [int(x) for x in self.config.get('parameters').get('edition').get('values')[self.avail_editions.index(self.edition)].get('supports').get('vs30')]
+ vs30_avail_ed = [
+ int(x)
+ for x in self.config.get('parameters')
+ .get('edition')
+ .get('values')[self.avail_editions.index(self.edition)]
+ .get('supports')
+ .get('vs30')
+ ]
# get region supported vs30
- #vs30_avail_rg = [int(x) for x in self.config.get('parameters').get('region').get('values')[self.avail_regions.index(self.region)].get('supports').get('vs30')]
+ # vs30_avail_rg = [int(x) for x in self.config.get('parameters').get('region').get('values')[self.avail_regions.index(self.region)].get('supports').get('vs30')]
vs30_avail_all = vs30_avail_ed
- vs30_id = np.argmin(np.abs([vs30-x for x in vs30_avail_all]))
+ vs30_id = np.argmin(np.abs([vs30 - x for x in vs30_avail_all]))
return str(vs30_avail_all[vs30_id])
return False
def _check_imt(self, imt):
-
# get supported imt:
- imt_available = self.config.get('parameters').get('region').get('values')[self.avail_regions.index(self.region)].get('supports').get('imt')
+ imt_available = (
+ self.config.get('parameters')
+ .get('region')
+ .get('values')[self.avail_regions.index(self.region)]
+ .get('supports')
+ .get('imt')
+ )
# get period in a double list:
- period_available = [float(x.replace('P','.')[2:]) for x in imt_available if x.startswith('SA')]
- print('Periods available = ',period_available)
+ period_available = [
+ float(x.replace('P', '.')[2:])
+ for x in imt_available
+ if x.startswith('SA')
+ ]
+ print('Periods available = ', period_available) # noqa: T201
if imt in imt_available:
self.imt_list = [imt]
return True
- else:
- cur_period = float(imt.replace('P','.')[2:])
- if cur_period < np.min(period_available) or cur_period > np.max(period_available):
+ else: # noqa: RET505
+ cur_period = float(imt.replace('P', '.')[2:])
+ if cur_period < np.min(period_available) or cur_period > np.max(
+ period_available
+ ):
return False
- else:
+ else: # noqa: RET505
# interpolate periods
self.period_list = []
for i, p in enumerate(period_available):
if p > cur_period:
- self.period_list.append(period_available[i-1])
+ self.period_list.append(period_available[i - 1])
self.period_list.append(p)
break
- self.imt_list = ['SA'+str(x).replace('.','P') for x in self.period_list]
- #print('self.imt_list = ',self.imt_list)
+ self.imt_list = [
+ 'SA' + str(x).replace('.', 'P') for x in self.period_list
+ ]
+ # print('self.imt_list = ',self.imt_list)
return True
-
-
- def fetch_url(self):
+ def fetch_url(self): # noqa: D102
self.res_json = []
-
- for cur_imt in self.imt_list:
+ for cur_imt in self.imt_list:
# set url
- usgs_url = 'https://earthquake.usgs.gov/nshmp-haz-ws/hazard/{}/{}/{}/{}/{}/{}'.format(self.edition,
- self.region,
- self.longitude,
- self.latitude,
- cur_imt,
- self.vs30)
-
- print('USGS_HazardCurve.fetch_url: {}.\n'.format(usgs_url))
+ usgs_url = f'https://earthquake.usgs.gov/nshmp-haz-ws/hazard/{self.edition}/{self.region}/{self.longitude}/{self.latitude}/{cur_imt}/{self.vs30}'
+
+ print(f'USGS_HazardCurve.fetch_url: {usgs_url}.\n') # noqa: T201
# request
- res = requests.get(usgs_url)
- if res.status_code == 200:
+ res = requests.get(usgs_url) # noqa: S113
+ if res.status_code == 200: # noqa: PLR2004
self.res_json.append(res.json())
- #print('USGS_HazardCurve.fetch_url: {}'.format(self.res_json))
+ # print('USGS_HazardCurve.fetch_url: {}'.format(self.res_json))
else:
# try 10 more times to overcome the api traffic issue
- for i in range(10):
- res = requests.get(usgs_url)
- if res.status_code == 200:
+ for i in range(10): # noqa: B007
+ res = requests.get(usgs_url) # noqa: S113
+ if res.status_code == 200: # noqa: PLR2004
self.res_json.append(res.json())
return True
- else:
- self.res_json.append(None)
- print('USGS_HazardCurve.fetch_url: cannot get the data')
- return False
-
- return True
-
-
+ self.res_json.append(None)
+ print('USGS_HazardCurve.fetch_url: cannot get the data') # noqa: T201
+ return False
- def get_hazard_curve(self):
+ return True
+ def get_hazard_curve(self): # noqa: D102
cur_ims = []
cur_mafs = []
cur_rps = []
-
+
for cur_res_json in self.res_json:
-
tmp_x = cur_res_json.get('response')[0].get('metadata').get('xvalues')
tmp_y = cur_res_json.get('response')[0].get('data')[0].get('yvalues')
- cur_ims.append([tmp_x[i] for i in range(len(tmp_x)) if tmp_y[i]>0])
+ cur_ims.append([tmp_x[i] for i in range(len(tmp_x)) if tmp_y[i] > 0])
cur_mafs.append([x for x in tmp_y if x > 0])
- cur_rps.append([1.0/x for x in cur_mafs[-1]])
+ cur_rps.append([1.0 / x for x in cur_mafs[-1]])
- if len(self.res_json)==1:
+ if len(self.res_json) == 1:
self.ims = cur_ims[0]
self.mafs = cur_mafs[0]
self.rps = cur_rps[0]
else:
- num_levels = np.min([len(cur_mafs[0]),len(cur_mafs[1])])
+ num_levels = np.min([len(cur_mafs[0]), len(cur_mafs[1])])
self.ims = cur_ims[0][0:num_levels]
- self.mafs = [np.interp(self.imt.replace('P','.')[2:],self.period_list,[cur_mafs[0][x],cur_mafs[1][x]]) for x in range(num_levels)]
+ self.mafs = [
+ np.interp(
+ self.imt.replace('P', '.')[2:],
+ self.period_list,
+ [cur_mafs[0][x], cur_mafs[1][x]],
+ )
+ for x in range(num_levels)
+ ]
self.mafs = [x for x in self.mafs if x > 0]
- self.rps = [1.0/x for x in self.mafs]
-
- dict_hc = {
- "SiteID": self.tag,
- "ReturnPeriod": self.rps,
- "IM": self.ims
- }
-
- return dict_hc
-
-
-
-
+ self.rps = [1.0 / x for x in self.mafs]
+ dict_hc = {'SiteID': self.tag, 'ReturnPeriod': self.rps, 'IM': self.ims}
+ return dict_hc # noqa: RET504
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/CorrelationModel.py b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/CorrelationModel.py
index f90cd56ca..5c2ea75c3 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/CorrelationModel.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/CorrelationModel.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: N999, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,13 +38,14 @@
#
import os
+
import numpy as np
import pandas as pd
from scipy.interpolate import interp1d, interp2d
-def baker_jayaram_correlation_2008(im1, im2, flag_orth = False):
- """
- Computing inter-event correlation coeffcieint between Sa of two periods
+
+def baker_jayaram_correlation_2008(im1, im2, flag_orth=False): # noqa: FBT002, C901
+ """Computing inter-event correlation coeffcieint between Sa of two periods
Reference:
Baker and Jayaram (2008) Correlation of Spectral Acceleration
Values from NGA Ground Motion Models
@@ -58,59 +58,58 @@ def baker_jayaram_correlation_2008(im1, im2, flag_orth = False):
rho: correlation coefficient
Note:
The valid range of T1 and T2 is 0.01s ~ 10.0s
- """
-
+ """ # noqa: D205, D400, D401
# Parse periods from im1 and im2
if im1.startswith('SA'):
- T1 = float(im1[3:-1])
+ T1 = float(im1[3:-1]) # noqa: N806
elif im1.startswith('PGA'):
- T1 = 0.0
+ T1 = 0.0 # noqa: N806
else:
return 0.0
if im2.startswith('SA'):
- T2 = float(im2[3:-1])
+ T2 = float(im2[3:-1]) # noqa: N806
elif im2.startswith('PGA'):
- T2 = 0.0
+ T2 = 0.0 # noqa: N806
else:
return 0.0
# Compute Tmin and Tmax (lower bounds 0.01 for T < 0.01)
- Tmin = max(min([T1, T2]), 0.01)
- Tmax = max(max([T1, T2]), 0.01)
- # Cofficient C1
- C1 = 1.0 - np.cos(np.pi / 2.0 - 0.366 * np.log(Tmax / max([Tmin, 0.109])))
- # Cofficient C2
- if Tmax < 0.2:
- C2 = 1.0 - 0.105 * (1.0 - 1.0 / (1.0 + np.exp(100.0 * Tmax - 5.0))) * \
- (Tmax - Tmin) / (Tmax - 0.0099)
+ Tmin = max(min([T1, T2]), 0.01) # noqa: N806
+ Tmax = max(max([T1, T2]), 0.01) # noqa: N806, PLW3301
+ # Coefficient C1
+ C1 = 1.0 - np.cos(np.pi / 2.0 - 0.366 * np.log(Tmax / max([Tmin, 0.109]))) # noqa: N806
+ # Coefficient C2
+ if Tmax < 0.2: # noqa: PLR2004
+ C2 = 1.0 - 0.105 * (1.0 - 1.0 / (1.0 + np.exp(100.0 * Tmax - 5.0))) * ( # noqa: N806
+ Tmax - Tmin
+ ) / (Tmax - 0.0099)
else:
- C2 = 0.0
- # Cofficient C3
- if Tmax < 0.109:
- C3 = C2
+ C2 = 0.0 # noqa: N806
+ # Coefficient C3
+ if Tmax < 0.109: # noqa: PLR2004
+ C3 = C2 # noqa: N806
else:
- C3 = C1
- # Cofficient C4
- C4 = C1 + 0.5 * (np.sqrt(C3) - C3) * (1.0 + np.cos(np.pi * Tmin / 0.109))
- # rho for a singe component
- if Tmax <= 0.109:
+ C3 = C1 # noqa: N806
+ # Coefficient C4
+ C4 = C1 + 0.5 * (np.sqrt(C3) - C3) * (1.0 + np.cos(np.pi * Tmin / 0.109)) # noqa: N806
+ # rho for a single component
+ if Tmax <= 0.109: # noqa: PLR2004
rho = C2
- elif Tmin > 0.109:
+ elif Tmin > 0.109: # noqa: PLR2004
rho = C1
- elif Tmax < 0.2:
+ elif Tmax < 0.2: # noqa: PLR2004
rho = min([C2, C4])
else:
rho = C4
- # rho for orthogonal components Cofficient C1
+ # rho for orthogonal components Coefficient C1
if flag_orth:
- rho = rho * (0.79 - 0.023 * np.log(np.sqrt(Tmin * Tmax)))
+ rho = rho * (0.79 - 0.023 * np.log(np.sqrt(Tmin * Tmax))) # noqa: PLR6104
return rho
-def bradley_correlation_2011(IM, T = None, flag_Ds = True):
- """
- Computing inter-event correlation coeffcieint between Sa(T) and Ds575/D595
+def bradley_correlation_2011(IM, T=None, flag_Ds=True): # noqa: FBT002, C901, N803, PLR0911
+ """Computing inter-event correlation coeffcieint between Sa(T) and Ds575/D595
Reference:
Bradley (2011) Correlation of Significant Duration with Amplitude and
Cumulative Intensity Measures and Its Use in Ground Motion Selection
@@ -123,75 +122,106 @@ def bradley_correlation_2011(IM, T = None, flag_Ds = True):
rho: correlation coefficient
Note:
The valid range of T is 0.01s ~ 10.0s
- """
+ """ # noqa: D205, D400, D401
# PGA
- if IM == 'PGA':
+ if IM == 'PGA': # noqa: RET503
if flag_Ds:
return -0.442
- else:
+ else: # noqa: RET505
return -0.305
elif IM == 'PGV':
if flag_Ds:
return -0.259
- else:
+ else: # noqa: RET505
return -0.211
elif IM == 'ASI':
if flag_Ds:
return -0.411
- else:
+ else: # noqa: RET505
return -0.370
elif IM == 'SI':
if flag_Ds:
return -0.131
- else:
+ else: # noqa: RET505
return -0.079
elif IM == 'DSI':
if flag_Ds:
return 0.074
- else:
+ else: # noqa: RET505
return 0.163
elif IM == 'CAV':
if flag_Ds:
return 0.077
- else:
+ else: # noqa: RET505
return 0.122
elif IM == 'Ds595':
if flag_Ds:
return 0.843
- else:
+ else: # noqa: RET505
return None
elif IM == 'Sa':
if flag_Ds:
- if T < 0.09:
- a_p = -0.45; a_c = -0.39; b_p = 0.01; b_c = 0.09
- elif T < 0.30:
- a_p = -0.39; a_c = -0.39; b_p = 0.09; b_c = 0.30
- elif T < 1.40:
- a_p = -0.39; a_c = -0.06; b_p = 0.30; b_c = 1.40
- elif T < 6.50:
- a_p = -0.06; a_c = 0.16; b_p = 1.40; b_c = 6.50
- elif T <= 10.0:
- a_p = 0.16; a_c = 0.00; b_p = 6.50; b_c = 10.00
- else:
- if T < 0.04:
- a_p = -0.41; a_c = -0.41; b_p = 0.01; b_c = 0.04
- elif T < 0.08:
- a_p = -0.41; a_c = -0.38; b_p = 0.04; b_c = 0.08
- elif T < 0.26:
- a_p = -0.38; a_c = -0.35; b_p = 0.08; b_c = 0.26
- elif T < 1.40:
- a_p = -0.35; a_c = -0.02; b_p = 0.26; b_c = 1.40
- elif T <= 6.00:
- a_p = -0.02; a_c = 0.23; b_p = 1.40; b_c = 6.00
- elif T <= 10.00:
- a_p = 0.23; a_c = 0.02; b_p = 6.00; b_c = 10.0
+ if T < 0.09: # noqa: PLR2004
+ a_p = -0.45
+ a_c = -0.39
+ b_p = 0.01
+ b_c = 0.09
+ elif T < 0.30: # noqa: PLR2004
+ a_p = -0.39
+ a_c = -0.39
+ b_p = 0.09
+ b_c = 0.30
+ elif T < 1.40: # noqa: PLR2004
+ a_p = -0.39
+ a_c = -0.06
+ b_p = 0.30
+ b_c = 1.40
+ elif T < 6.50: # noqa: PLR2004
+ a_p = -0.06
+ a_c = 0.16
+ b_p = 1.40
+ b_c = 6.50
+ elif T <= 10.0: # noqa: PLR2004
+ a_p = 0.16
+ a_c = 0.00
+ b_p = 6.50
+ b_c = 10.00
+ elif T < 0.04: # noqa: PLR2004
+ a_p = -0.41
+ a_c = -0.41
+ b_p = 0.01
+ b_c = 0.04
+ elif T < 0.08: # noqa: PLR2004
+ a_p = -0.41
+ a_c = -0.38
+ b_p = 0.04
+ b_c = 0.08
+ elif T < 0.26: # noqa: PLR2004
+ a_p = -0.38
+ a_c = -0.35
+ b_p = 0.08
+ b_c = 0.26
+ elif T < 1.40: # noqa: PLR2004
+ a_p = -0.35
+ a_c = -0.02
+ b_p = 0.26
+ b_c = 1.40
+ elif T <= 6.00: # noqa: PLR2004
+ a_p = -0.02
+ a_c = 0.23
+ b_p = 1.40
+ b_c = 6.00
+ elif T <= 10.00: # noqa: PLR2004
+ a_p = 0.23
+ a_c = 0.02
+ b_p = 6.00
+ b_c = 10.0
rho = a_p + np.log(T / b_p) / np.log(b_c / b_p) * (a_c - a_p)
- return rho
+ return rho # noqa: RET504
-def jayaram_baker_correlation_2009(im, h, flag_clustering = False):
- """
- Computing intra-event correlation coeffcieint between Sa(T) at two sites
+def jayaram_baker_correlation_2009(im, h, flag_clustering=False): # noqa: FBT002
+ """Computing intra-event correlation coeffcieint between Sa(T) at two sites
Reference:
Jayaram and Baker (2009) Correlation model for spatially distributed
ground-motion intensities
@@ -202,32 +232,31 @@ def jayaram_baker_correlation_2009(im, h, flag_clustering = False):
the region (default: false)
Output:
rho: correlation between normalized intra-event residuals
- """
-
+ """ # noqa: D205, D400, D401
# parse period form im
try:
# for Sa
if im.startswith('SA'):
- T = float(im[3:-1])
+ T = float(im[3:-1]) # noqa: N806
elif im.startswith('PGA'):
- T = 0.0
+ T = 0.0 # noqa: N806
except ValueError:
- print('CorrelationModel.jayaram_baker_correlation_2009: error - cannot handle {}'.format(im))
+ print( # noqa: T201
+ f'CorrelationModel.jayaram_baker_correlation_2009: error - cannot handle {im}'
+ )
if T >= 1.0:
b = 22.0 + 3.7 * T
+ elif flag_clustering:
+ b = 8.5 + 17.2 * T
else:
- if flag_clustering:
- b = 8.5 + 17.2 * T
- else:
- b = 40.7 - 15.0 * T
+ b = 40.7 - 15.0 * T
rho = np.exp(-3.0 * h / b)
- return rho
+ return rho # noqa: RET504
def load_loth_baker_correlation_2013(datapath):
- """
- Loading the three matrices in the Loth-Baker correaltion model (2013)
+ """Loading the three matrices in the Loth-Baker correaltion model (2013)
Reference:
Loth and Baker (2013) A spatial cross-correlation model of spectral
accelerations at multiple periods (with the Erratum)
@@ -237,16 +266,15 @@ def load_loth_baker_correlation_2013(datapath):
B1: short-range coregionalization matrix
B2: long-range coregionalization matrix
B3: Nugget effect correlationalization matrix
- """
- B2 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B2.csv', header = 0)
- B1 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B1.csv', header = 0)
- B3 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B3.csv', header = 0)
+ """ # noqa: D205, D400, D401
+ B2 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B2.csv', header=0) # noqa: N806
+ B1 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B1.csv', header=0) # noqa: N806
+ B3 = pd.read_csv(datapath + 'loth_baker_correlation_2013_B3.csv', header=0) # noqa: N806
return B1, B2, B3
-def compute_rho_loth_baker_correlation_2013(T1, T2, h, B1, B2, B3):
- """
- Computing intra-event correlation coeffcieint between Sa(Ti) and Sa(Tj)
+def compute_rho_loth_baker_correlation_2013(T1, T2, h, B1, B2, B3): # noqa: N803
+ """Computing intra-event correlation coeffcieint between Sa(Ti) and Sa(Tj)
at two sites
Reference:
Loth and Baker (2013) A spatial cross-correlation model of spectral
@@ -262,7 +290,7 @@ def compute_rho_loth_baker_correlation_2013(T1, T2, h, B1, B2, B3):
rho: correlation between Sa(Ti) and Sa(Tj) at two sites
Note:
The valid range for T1 and T2 is 0.01s ~ 10.0s
- """
+ """ # noqa: D205, D400, D401
# Interpolation functions
f1 = interp2d(B1['Period (s)'], B1['Period (s)'], B1.iloc[:, 1:])
f2 = interp2d(B2['Period (s)'], B2['Period (s)'], B2.iloc[:, 1:])
@@ -272,15 +300,14 @@ def compute_rho_loth_baker_correlation_2013(T1, T2, h, B1, B2, B3):
b2 = f2(T1, T2)
b3 = f3(T1, T2)
# Covariance functions
- Ch = b1 * np.exp(-3.0 * h / 20.0) + b2 * np.exp(-3.0 * h / 70.0) + b3 * (h == 0)
+ Ch = b1 * np.exp(-3.0 * h / 20.0) + b2 * np.exp(-3.0 * h / 70.0) + b3 * (h == 0) # noqa: N806
# Correlation coefficient
rho = Ch
- return rho
+ return rho # noqa: RET504
-def loth_baker_correlation_2013(stations, im_name_list, num_simu):
- """
- Simulating intra-event residuals
+def loth_baker_correlation_2013(stations, im_name_list, num_simu): # noqa: C901
+ """Simulating intra-event residuals
Reference:
Loth and Baker (2013) A spatial cross-correlation model of spectral
accelerations at multiple periods (with the Erratum)
@@ -292,7 +319,7 @@ def loth_baker_correlation_2013(stations, im_name_list, num_simu):
residuals: intra-event residuals
Note:
The valid range for T1 and T2 is 0.01s ~ 10.0s
- """
+ """ # noqa: D205, D400, D401
# Parse periods from intensity measure list
periods = []
for cur_im in im_name_list:
@@ -301,27 +328,33 @@ def loth_baker_correlation_2013(stations, im_name_list, num_simu):
periods.append(float(cur_im[3:-1]))
elif cur_im.startswith('PGA'):
periods.append(0.0)
- except ValueError:
- print('CorrelationModel.loth_baker_correlation_2013: error - cannot handle {}'.format(cur_im))
+ except ValueError: # noqa: PERF203
+ print( # noqa: T201
+ f'CorrelationModel.loth_baker_correlation_2013: error - cannot handle {cur_im}'
+ )
# Loading modeling coefficients
- B1, B2, B3 = load_loth_baker_correlation_2013(os.path.dirname(__file__) + '/data/')
+ B1, B2, B3 = load_loth_baker_correlation_2013( # noqa: N806
+ os.path.dirname(__file__) + '/data/' # noqa: PTH120
+ )
# Computing distance matrix
num_stations = len(stations)
stn_dist = np.zeros((num_stations, num_stations))
for i in range(num_stations):
- loc_i = np.array([stations[i]['Latitude'],
- stations[i]['Longitude']])
+ loc_i = np.array([stations[i]['Latitude'], stations[i]['Longitude']])
for j in range(num_stations):
- loc_j = np.array([stations[j]['Latitude'],
- stations[j]['Longitude']])
+ loc_j = np.array([stations[j]['Latitude'], stations[j]['Longitude']])
stn_dist[i, j] = get_distance_from_lat_lon(loc_i, loc_j)
# Creating a covariance matrices for each of the principal components
num_periods = len(periods)
- covMatrix = np.zeros((num_stations * num_periods, num_stations * num_periods))
+ covMatrix = np.zeros((num_stations * num_periods, num_stations * num_periods)) # noqa: N806
for i in range(num_periods):
for j in range(num_periods):
- covMatrix[num_stations * i:num_stations * (i + 1), num_stations * j:num_stations * (j + 1)] = \
- compute_rho_loth_baker_correlation_2013(periods[i], periods[j], stn_dist, B1, B2, B3)
+ covMatrix[
+ num_stations * i : num_stations * (i + 1),
+ num_stations * j : num_stations * (j + 1),
+ ] = compute_rho_loth_baker_correlation_2013(
+ periods[i], periods[j], stn_dist, B1, B2, B3
+ )
mu = np.zeros(num_stations * num_periods)
residuals_raw = np.random.multivariate_normal(mu, covMatrix, num_simu)
@@ -331,17 +364,20 @@ def loth_baker_correlation_2013(stations, im_name_list, num_simu):
tmp = []
for j in range(num_stations):
for k in range(num_periods):
- tmp.append(residuals_raw[i,j+k*num_stations])
+ tmp.append(residuals_raw[i, j + k * num_stations]) # noqa: PERF401
residuals_reorder.append(tmp)
residuals_reorder = np.array(residuals_reorder)
- residuals = residuals_reorder.reshape(num_simu, num_stations, num_periods).swapaxes(0,1).swapaxes(1,2)
+ residuals = (
+ residuals_reorder.reshape(num_simu, num_stations, num_periods)
+ .swapaxes(0, 1)
+ .swapaxes(1, 2)
+ )
# return
- return residuals
+ return residuals # noqa: RET504
def load_markhvida_ceferino_baker_correlation_2017(datapath):
- """
- Loading the three matrices in the Markhivida et al. correaltion model (2017)
+ """Loading the three matrices in the Markhivida et al. correaltion model (2017)
Reference:
Markhvida et al. (2017) Modeling spatially correlated spectral
accelerations at multiple periods using principal component analysis
@@ -352,19 +388,32 @@ def load_markhvida_ceferino_baker_correlation_2017(datapath):
B1: short-range coregionalization matrix
B2: long-range coregionalization matrix
B3: Nugget effect correlationalization matrix
- """
- MCB_model = pd.read_csv(datapath + 'markhvida_ceferino_baker_correlation_2017_model_coeff.csv',
- index_col = None, header = 0)
- MCB_pca = pd.read_csv(datapath + 'markhvida_ceferino_baker_correlation_2017_pca_coeff.csv',
- index_col = None, header = 0)
- MCB_var = pd.read_csv(datapath + 'markhvida_ceferino_baker_correlation_2017_var_scale.csv',
- index_col = None, header = 0)
+ """ # noqa: D205, D400, D401
+ MCB_model = pd.read_csv( # noqa: N806
+ datapath + 'markhvida_ceferino_baker_correlation_2017_model_coeff.csv',
+ index_col=None,
+ header=0,
+ )
+ MCB_pca = pd.read_csv( # noqa: N806
+ datapath + 'markhvida_ceferino_baker_correlation_2017_pca_coeff.csv',
+ index_col=None,
+ header=0,
+ )
+ MCB_var = pd.read_csv( # noqa: N806
+ datapath + 'markhvida_ceferino_baker_correlation_2017_var_scale.csv',
+ index_col=None,
+ header=0,
+ )
return MCB_model, MCB_pca, MCB_var
-def markhvida_ceferino_baker_correlation_2017(stations, im_name_list, num_simu, num_pc=19):
- """
- Simulating intra-event residuals
+def markhvida_ceferino_baker_correlation_2017( # noqa: C901
+ stations,
+ im_name_list,
+ num_simu,
+ num_pc=19,
+):
+ """Simulating intra-event residuals
Reference:
Markhvida et al. (2017) Modeling spatially correlated spectral
accelerations at multiple periods using principal component analysis
@@ -378,7 +427,7 @@ def markhvida_ceferino_baker_correlation_2017(stations, im_name_list, num_simu,
residuals: intra-event residuals
Note:
The valid range for T1 and T2 is 0.01s ~ 5.0s
- """
+ """ # noqa: D205, D400, D401
# Parse periods from intensity measure list
periods = []
for cur_im in im_name_list:
@@ -388,12 +437,17 @@ def markhvida_ceferino_baker_correlation_2017(stations, im_name_list, num_simu,
elif cur_im.startswith('PGA'):
periods.append(0.0)
else:
- raise ValueError('CorrelationModel Markhvida et al. (2017): error - cannot handle {}'.format(cur_im))
- except ValueError:
- print('CorrelationModel.loth_baker_correlation_2013: error - cannot handle {}'.format(cur_im))
+ raise ValueError( # noqa: DOC501, TRY003, TRY301
+ f'CorrelationModel Markhvida et al. (2017): error - cannot handle {cur_im}' # noqa: EM102
+ )
+ except ValueError: # noqa: PERF203
+ print( # noqa: T201
+ f'CorrelationModel.loth_baker_correlation_2013: error - cannot handle {cur_im}'
+ )
# Loading factors
- MCB_model, MCB_pca, MCB_var = \
- load_markhvida_ceferino_baker_correlation_2017(os.path.dirname(__file__) + '/data/')
+ MCB_model, MCB_pca, MCB_var = load_markhvida_ceferino_baker_correlation_2017( # noqa: N806
+ os.path.dirname(__file__) + '/data/' # noqa: PTH120
+ )
c0 = MCB_model.loc[MCB_model['Type'] == 'c0']
c0 = c0[c0.keys()[1:]]
c1 = MCB_model.loc[MCB_model['Type'] == 'c1']
@@ -405,40 +459,42 @@ def markhvida_ceferino_baker_correlation_2017(stations, im_name_list, num_simu,
a2 = MCB_model.loc[MCB_model['Type'] == 'a2']
a2 = a2[a2.keys()[1:]]
model_periods = MCB_pca['Period (s)']
- model_coef = MCB_pca.iloc[:, 1:num_pc + 1]
+ model_coef = MCB_pca.iloc[:, 1 : num_pc + 1]
# Computing distance matrix
num_stations = len(stations)
stn_dist = np.zeros((num_stations, num_stations))
for i in range(num_stations):
- loc_i = np.array([stations[i]['lat'],
- stations[i]['lon']])
+ loc_i = np.array([stations[i]['lat'], stations[i]['lon']])
for j in range(num_stations):
- loc_j = np.array([stations[j]['lat'],
- stations[j]['lon']])
+ loc_j = np.array([stations[j]['lat'], stations[j]['lon']])
stn_dist[i, j] = get_distance_from_lat_lon(loc_i, loc_j)
# Scaling variance if less than 19 principal components are used
- c0 = c0 / MCB_var.iloc[0, num_pc - 1]
- c1 = c1 / MCB_var.iloc[0, num_pc - 1]
- c2 = c2 / MCB_var.iloc[0, num_pc - 1]
+ c0 = c0 / MCB_var.iloc[0, num_pc - 1] # noqa: PLR6104
+ c1 = c1 / MCB_var.iloc[0, num_pc - 1] # noqa: PLR6104
+ c2 = c2 / MCB_var.iloc[0, num_pc - 1] # noqa: PLR6104
# Creating a covariance matrices for each of the principal components
- covMatrix = np.zeros((num_stations, num_stations, num_pc))
+ covMatrix = np.zeros((num_stations, num_stations, num_pc)) # noqa: N806
for i in range(num_pc):
if c1.iloc[0, i] == 0:
# nug
covMatrix[:, :, i] = np.eye(num_stations) * c0.iloc[0, i]
else:
# iso nest
- covMatrix[:, :, i] = c0.iloc[0, i] * (stn_dist == 0)+ \
- c1.iloc[0, i] * np.exp(-3.0 * stn_dist / a1.iloc[0, i]) + \
- c2.iloc[0, i] * np.exp(-3.0 * stn_dist / a2.iloc[0, i])
+ covMatrix[:, :, i] = (
+ c0.iloc[0, i] * (stn_dist == 0)
+ + c1.iloc[0, i] * np.exp(-3.0 * stn_dist / a1.iloc[0, i])
+ + c2.iloc[0, i] * np.exp(-3.0 * stn_dist / a2.iloc[0, i])
+ )
# Simulating residuals
residuals_pca = np.zeros((num_stations, num_simu, num_pc))
mu = np.zeros(num_stations)
for i in range(num_pc):
- residuals_pca[:, :, i] = np.random.multivariate_normal(mu, covMatrix[:, :, i], num_simu).T
+ residuals_pca[:, :, i] = np.random.multivariate_normal(
+ mu, covMatrix[:, :, i], num_simu
+ ).T
# Interpolating model_coef by periods
- interp_fun = interp1d(model_periods, model_coef, axis = 0)
- model_Tmax = 5.0
+ interp_fun = interp1d(model_periods, model_coef, axis=0)
+ model_Tmax = 5.0 # noqa: N806
simu_periods = [i for i in periods if i <= model_Tmax]
if (len(simu_periods) == 1) and (simu_periods[0] == 0):
# for PGA only (using 0.01 sec as the approxiamate)
@@ -450,26 +506,29 @@ def markhvida_ceferino_baker_correlation_2017(stations, im_name_list, num_simu,
num_periods = len(simu_periods)
residuals = np.empty([num_stations, num_periods, num_simu])
for i in range(num_simu):
- residuals[:, :, i] = np.reshape(np.matmul(residuals_pca[:, i, :], simu_coef.T), residuals[:, :, i].shape)
+ residuals[:, :, i] = np.reshape(
+ np.matmul(residuals_pca[:, i, :], simu_coef.T), residuals[:, :, i].shape
+ )
# Appending residuals for periods greater than model_Tmax (fixing at 5.0)
if max(periods) > model_Tmax:
- Tmax_coef = interp_fun(model_Tmax)
- Tmax_residuals = np.empty([num_stations, 1, num_simu])
+ Tmax_coef = interp_fun(model_Tmax) # noqa: N806
+ Tmax_residuals = np.empty([num_stations, 1, num_simu]) # noqa: N806
for i in range(num_simu):
- Tmax_residuals[:, :, i] = np.matmul(residuals_pca[:, i, :], np.matrix(Tmax_coef).T)
+ Tmax_residuals[:, :, i] = np.matmul(
+ residuals_pca[:, i, :], np.matrix(Tmax_coef).T
+ )
for tmp_periods in periods:
if tmp_periods > model_Tmax:
- residuals = np.concatenate((residuals, Tmax_residuals), axis = 1)
+ residuals = np.concatenate((residuals, Tmax_residuals), axis=1)
# return
return residuals
def load_du_ning_correlation_2021(datapath):
- """
- Loading the three matrices in the Du and Ning correlation model (2021)
+ """Loading the three matrices in the Du and Ning correlation model (2021)
Reference:
- Du and Ning (2021) Modeling spatial cross-correlation of multiple
- ground motion intensity measures (SAs, PGA, PGV, Ia, CAV, and significant
+ Du and Ning (2021) Modeling spatial cross-correlation of multiple
+ ground motion intensity measures (SAs, PGA, PGV, Ia, CAV, and significant
durations) based on principal component and geostatistical analyses
Input:
datapath: the path to the files (optional)
@@ -477,22 +536,26 @@ def load_du_ning_correlation_2021(datapath):
DN_model: model coeff.
DN_pca: pca coeff.
DN_var: var of pca
- """
- DN_model = pd.read_csv(datapath + 'du_ning_correlation_2021_model_coeff.csv',
- index_col = None, header = 0)
- DN_pca = pd.read_csv(datapath + 'du_ning_correlation_2021_pca_coeff.csv',
- index_col = None, header = 0)
- DN_var = pd.read_csv(datapath + 'du_ning_correlation_2021_var_scale.csv',
- index_col = None, header = 0)
+ """ # noqa: D205, D400, D401
+ DN_model = pd.read_csv( # noqa: N806
+ datapath + 'du_ning_correlation_2021_model_coeff.csv',
+ index_col=None,
+ header=0,
+ )
+ DN_pca = pd.read_csv( # noqa: N806
+ datapath + 'du_ning_correlation_2021_pca_coeff.csv', index_col=None, header=0
+ )
+ DN_var = pd.read_csv( # noqa: N806
+ datapath + 'du_ning_correlation_2021_var_scale.csv', index_col=None, header=0
+ )
return DN_model, DN_pca, DN_var
def du_ning_correlation_2021(stations, im_name_list, num_simu, num_pc=23):
- """
- Simulating intra-event residuals
+ """Simulating intra-event residuals
Reference:
- Du and Ning (2021) Modeling spatial cross-correlation of multiple
- ground motion intensity measures (SAs, PGA, PGV, Ia, CAV, and significant
+ Du and Ning (2021) Modeling spatial cross-correlation of multiple
+ ground motion intensity measures (SAs, PGA, PGV, Ia, CAV, and significant
durations) based on principal component and geostatistical analyses
Input:
stations: stations coordinates
@@ -503,7 +566,7 @@ def du_ning_correlation_2021(stations, im_name_list, num_simu, num_pc=23):
residuals: intra-event residuals
Note:
The valid range for T1 and T2 is 0.01s ~ 5.0s
- """
+ """ # noqa: D205, D400, D401
# Parse periods_ims from intensity measure list
periods_ims = []
for cur_im in im_name_list:
@@ -512,8 +575,9 @@ def du_ning_correlation_2021(stations, im_name_list, num_simu, num_pc=23):
else:
periods_ims.append(cur_im)
# Loading factors
- DN_model, DN_pca, DN_var = \
- load_du_ning_correlation_2021(os.path.dirname(__file__) + '/data/')
+ DN_model, DN_pca, DN_var = load_du_ning_correlation_2021( # noqa: N806
+ os.path.dirname(__file__) + '/data/' # noqa: PTH120
+ )
c1 = DN_model.loc[DN_model['Type'] == 'c1']
c1 = c1[c1.keys()[1:]]
a1 = DN_model.loc[DN_model['Type'] == 'a1']
@@ -524,51 +588,60 @@ def du_ning_correlation_2021(stations, im_name_list, num_simu, num_pc=23):
a2 = a2[a2.keys()[1:]]
b2 = DN_model.loc[DN_model['Type'] == 'b2']
b2 = b2[b2.keys()[1:]]
- # model_periods is pseduo periods and PGA, PGV, Ia, CAV, DS575H, DS595H
+ # model_periods is pseudo periods and PGA, PGV, Ia, CAV, DS575H, DS595H
model_periods = DN_pca['Period&IM']
model_ims_list = ['PGA', 'PGV', 'Ia', 'CAV', 'DS575H', 'DS595H']
- ims_map = {'PGA':11, 'PGV':12, 'Ia':13, 'CAV':14, 'DS575H':15, 'DS595H':16}
+ ims_map = {'PGA': 11, 'PGV': 12, 'Ia': 13, 'CAV': 14, 'DS575H': 15, 'DS595H': 16}
# convert periods to float
- model_periods = [float(x) for x in model_periods if x not in model_ims_list]+ \
- [x for x in model_periods if x in model_ims_list]
- model_coef = DN_pca.iloc[:, 1:num_pc + 1]
+ model_periods = [float(x) for x in model_periods if x not in model_ims_list] + [
+ x for x in model_periods if x in model_ims_list
+ ]
+ model_coef = DN_pca.iloc[:, 1 : num_pc + 1]
# Computing distance matrix
num_stations = len(stations)
stn_dist = np.zeros((num_stations, num_stations))
for i in range(num_stations):
- loc_i = np.array([stations[i]['lat'],
- stations[i]['lon']])
+ loc_i = np.array([stations[i]['lat'], stations[i]['lon']])
for j in range(num_stations):
- loc_j = np.array([stations[j]['lat'],
- stations[j]['lon']])
+ loc_j = np.array([stations[j]['lat'], stations[j]['lon']])
stn_dist[i, j] = get_distance_from_lat_lon(loc_i, loc_j)
# Scaling variance if less than 23 principal components are used
- c1 = c1 / DN_var.iloc[0, num_pc - 1]
- a1 = a1 / DN_var.iloc[0, num_pc - 1]
- a2 = a2 / DN_var.iloc[0, num_pc - 1]
+ c1 = c1 / DN_var.iloc[0, num_pc - 1] # noqa: PLR6104
+ a1 = a1 / DN_var.iloc[0, num_pc - 1] # noqa: PLR6104
+ a2 = a2 / DN_var.iloc[0, num_pc - 1] # noqa: PLR6104
# Creating a covariance matrices for each of the principal components
- covMatrix = np.zeros((num_stations, num_stations, num_pc))
+ covMatrix = np.zeros((num_stations, num_stations, num_pc)) # noqa: N806
for i in range(num_pc):
if a1.iloc[0, i] == 0:
# nug
covMatrix[:, :, i] = np.eye(num_stations) * c1.iloc[0, i]
else:
# iso nest
- covMatrix[:, :, i] = c1.iloc[0, i] * (stn_dist == 0) + \
- a1.iloc[0, i] * np.exp(-3.0 * stn_dist / b1.iloc[0, i]) + \
- a2.iloc[0, i] * np.exp(-3.0 * stn_dist / b2.iloc[0, i])
+ covMatrix[:, :, i] = (
+ c1.iloc[0, i] * (stn_dist == 0)
+ + a1.iloc[0, i] * np.exp(-3.0 * stn_dist / b1.iloc[0, i])
+ + a2.iloc[0, i] * np.exp(-3.0 * stn_dist / b2.iloc[0, i])
+ )
# Simulating residuals
residuals_pca = np.zeros((num_stations, num_simu, num_pc))
mu = np.zeros(num_stations)
for i in range(num_pc):
- residuals_pca[:, :, i] = np.random.multivariate_normal(mu, covMatrix[:, :, i], num_simu).T
+ residuals_pca[:, :, i] = np.random.multivariate_normal(
+ mu, covMatrix[:, :, i], num_simu
+ ).T
# Interpolating model_coef by periods
- pseudo_periods = [x for x in model_periods if type(x)==float]+ \
- [ims_map[x] for x in model_periods if type(x)==str]
- interp_fun = interp1d(pseudo_periods, model_coef, axis = 0)
- model_Tmax = 10.0
- simu_periods = [min(i,model_Tmax) for i in periods_ims if type(i)==float]+ \
- [ims_map[i] for i in periods_ims if type(i)==str]
+ pseudo_periods = [x for x in model_periods if type(x) == float] + [ # noqa: E721
+ ims_map[x]
+ for x in model_periods
+ if type(x) == str # noqa: E721
+ ]
+ interp_fun = interp1d(pseudo_periods, model_coef, axis=0)
+ model_Tmax = 10.0 # noqa: N806
+ simu_periods = [min(i, model_Tmax) for i in periods_ims if type(i) == float] + [ # noqa: E721
+ ims_map[i]
+ for i in periods_ims
+ if type(i) == str # noqa: E721
+ ]
if (len(simu_periods) == 1) and (simu_periods[0] == 0):
# for PGA only (using 0.01 sec as the approximate)
simu_coef = model_coef.iloc[0, :]
@@ -579,15 +652,16 @@ def du_ning_correlation_2021(stations, im_name_list, num_simu, num_pc=23):
num_periods = len(simu_periods)
residuals = np.empty([num_stations, num_periods, num_simu])
for i in range(num_simu):
- residuals[:, :, i] = np.reshape(np.matmul(residuals_pca[:, i, :], simu_coef.T), residuals[:, :, i].shape)
-
+ residuals[:, :, i] = np.reshape(
+ np.matmul(residuals_pca[:, i, :], simu_coef.T), residuals[:, :, i].shape
+ )
+
# return
return residuals
-def baker_bradley_correlation_2017(im1=None, im2=None):
- """
- Correlation between Sa and other IMs
+def baker_bradley_correlation_2017(im1=None, im2=None): # noqa: C901
+ """Correlation between Sa and other IMs
Baker, J. W., and Bradley, B. A. (2017). “Intensity measure correlations observed in
the NGA-West2 database, and dependence of correlations on rupture and site parameters.”
Based on the script: https://github.com/bakerjw/NGAW2_correlations/blob/master/corrPredictions.m
@@ -597,10 +671,9 @@ def baker_bradley_correlation_2017(im1=None, im2=None):
im2: 2nd intensity measure name
Output:
rho: correlation coefficient
- """
-
+ """ # noqa: D205, D400
# im map:
- im_map = {'DS575H': 0, 'DS595H':1, 'PGA': 2, 'PGV': 3}
+ im_map = {'DS575H': 0, 'DS595H': 1, 'PGA': 2, 'PGV': 3}
period_list = []
im_list = []
@@ -610,7 +683,9 @@ def baker_bradley_correlation_2017(im1=None, im2=None):
else:
tmp_tag = im_map.get(im1.upper(), None)
if tmp_tag is None:
- print("CorrelationModel.baker_bradley_correlation_2017: warning - return 0.0 for unknown {}".format(im1))
+ print( # noqa: T201
+ f'CorrelationModel.baker_bradley_correlation_2017: warning - return 0.0 for unknown {im1}'
+ )
return 0.0
im_list.append(tmp_tag)
period_list.append(None)
@@ -620,71 +695,81 @@ def baker_bradley_correlation_2017(im1=None, im2=None):
else:
tmp_tag = im_map.get(im2.upper(), None)
if tmp_tag is None:
- print("CorrelationModel.baker_bradley_correlation_2017: warning - return 0.0 for unknown {}".format(im2))
+ print( # noqa: T201
+ f'CorrelationModel.baker_bradley_correlation_2017: warning - return 0.0 for unknown {im2}'
+ )
return 0.0
im_list.append(tmp_tag)
if im1.startswith('SA') and im2.startswith('SA'):
# two Sa intensities
return baker_jayaram_correlation_2008(im1, im2)
-
+
if 'SA' not in im_list:
# two non-Sa intensities
# rho matrix
- rho_mat = [[1.000, 0.843, -0.442, -0.259],
- [0.843, 1.000, -0.405, -0.211],
- [-0.442, -0.405, 1.000, 0.733],
- [-0.259, -0.211, 0.733, 1.000]]
+ rho_mat = [
+ [1.000, 0.843, -0.442, -0.259],
+ [0.843, 1.000, -0.405, -0.211],
+ [-0.442, -0.405, 1.000, 0.733],
+ [-0.259, -0.211, 0.733, 1.000],
+ ]
# return
return rho_mat[im_list[0]][im_list[1]]
# one Sa + one non-Sa
im_list.remove('SA')
im_tag = im_list[0]
- T = [x for x in period_list if x is not None][0]
+ T = [x for x in period_list if x is not None][0] # noqa: N806, RUF015
# modeling coefficients
- a = [[0.00, -0.45, -0.39, -0.39, -0.06, 0.16],
- [0.00, -0.41, -0.41, -0.38, -0.35, 0.02, 0.23],
- [1.00, 0.97],
- [0.73, 0.54, 0.80, 0.76]]
- b = [[0.00, -0.39, -0.39, -0.06, 0.16, 0.00],
- [0.00, -0.41, -0.38, -0.35, -0.02, 0.23, 0.02],
- [0.895, 0.25],
- [0.54, 0.81, 0.76, 0.70]]
- c = [[],[],
- [0.06, 0.80],
- [0.045, 0.28, 1.10, 5.00]]
- d = [[],[],
- [1.6, 0.8],
- [1.8, 1.5, 3.0, 3.2]]
- e = [[0.01, 0.09, 0.30, 1.40, 6.50 ,10.00],
- [0.01, 0.04, 0.08, 0.26, 1.40, 6.00, 10.00],
- [0.20, 10.00],
- [0.10, 0.75, 2.50, 10.00]]
+ a = [
+ [0.00, -0.45, -0.39, -0.39, -0.06, 0.16],
+ [0.00, -0.41, -0.41, -0.38, -0.35, 0.02, 0.23],
+ [1.00, 0.97],
+ [0.73, 0.54, 0.80, 0.76],
+ ]
+ b = [
+ [0.00, -0.39, -0.39, -0.06, 0.16, 0.00],
+ [0.00, -0.41, -0.38, -0.35, -0.02, 0.23, 0.02],
+ [0.895, 0.25],
+ [0.54, 0.81, 0.76, 0.70],
+ ]
+ c = [[], [], [0.06, 0.80], [0.045, 0.28, 1.10, 5.00]]
+ d = [[], [], [1.6, 0.8], [1.8, 1.5, 3.0, 3.2]]
+ e = [
+ [0.01, 0.09, 0.30, 1.40, 6.50, 10.00],
+ [0.01, 0.04, 0.08, 0.26, 1.40, 6.00, 10.00],
+ [0.20, 10.00],
+ [0.10, 0.75, 2.50, 10.00],
+ ]
# rho
- if im_tag < 2:
- for j in range(1,len(e[im_tag])):
- if T <= e[im_tag][j]:
- rho = a[im_tag][j]+(b[im_tag][j]-a[im_tag][j])/np.log(e[im_tag][j]/e[im_tag][j-1])*np.log(T/e[im_tag][j-1])
+ if im_tag < 2: # noqa: PLR2004
+ for j in range(1, len(e[im_tag])):
+ if e[im_tag][j] >= T:
+ rho = a[im_tag][j] + (b[im_tag][j] - a[im_tag][j]) / np.log(
+ e[im_tag][j] / e[im_tag][j - 1]
+ ) * np.log(T / e[im_tag][j - 1])
break
else:
for j in range(len(e[im_tag])):
- if T <= e[im_tag][j]:
- rho = (a[im_tag][j]+b[im_tag][j])/2-(a[im_tag][j]-b[im_tag][j])/2*np.tanh(d[im_tag][j]*np.log(T/c[im_tag][j]))
+ if e[im_tag][j] >= T:
+ rho = (a[im_tag][j] + b[im_tag][j]) / 2 - (
+ a[im_tag][j] - b[im_tag][j]
+ ) / 2 * np.tanh(d[im_tag][j] * np.log(T / c[im_tag][j]))
break
-
+
# return
return rho
-def get_distance_from_lat_lon(site_loc1, site_loc2):
+def get_distance_from_lat_lon(site_loc1, site_loc2): # noqa: D103
# earth radius (km)
earth_radius_avg = 6371.0
# site lat and lon
lat1, lon1 = site_loc1
lat2, lon2 = site_loc2
- # covert to radians
+ # convert to radians
lat1 = np.radians(lat1)
lon1 = np.radians(lon1)
lat2 = np.radians(lat2)
@@ -692,6 +777,15 @@ def get_distance_from_lat_lon(site_loc1, site_loc2):
# calculate haversine
dlat = lat2 - lat1
dlon = lon2 - lon1
- dist = 2.0*earth_radius_avg*np.arcsin(np.sqrt(np.sin(0.5*dlat)**2+np.cos(lat1)*np.cos(lat2)*np.sin(0.5*dlon)**2))
+ dist = (
+ 2.0
+ * earth_radius_avg
+ * np.arcsin(
+ np.sqrt(
+ np.sin(0.5 * dlat) ** 2
+ + np.cos(lat1) * np.cos(lat2) * np.sin(0.5 * dlon) ** 2
+ )
+ )
+ )
# return
- return dist
\ No newline at end of file
+ return dist # noqa: RET504
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/SignificantDurationModel.py b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/SignificantDurationModel.py
index 6e136d1d2..57f02c96b 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/SignificantDurationModel.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/SignificantDurationModel.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: N999, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -36,14 +35,18 @@
#
# Contributors:
# Kuanshi Zhong
-# Based on the script by Baker Research Group: https://www.jackwbaker.com/GMMs_archive.html
+# Based on the script by Baker Research Group: https://www.jackwbaker.com/GMMs_archive.html
import numpy as np
-def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type='DS575H'):
-
- """
- Significant duration model by Abrahamson and Silva (1996) Empirical ground motion
+
+def abrahamson_silva_ds_1999(
+ magnitude=7.0,
+ distance=10.0,
+ soil=True, # noqa: FBT002
+ duration_type='DS575H',
+):
+ """Significant duration model by Abrahamson and Silva (1996) Empirical ground motion
models, report prepared for Brookhaven National Laboratory.
Input
magnitude: earthquake magnitude
@@ -54,17 +57,14 @@ def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type
Output:
log(ds_median): log(median) significant duration prediction
ds_sigma: logarithmic standard deviation of the prediction
- """
-
-
+ """ # noqa: D205, D400
# map the duration_type to integer key
- dur_map = {'DS575H': 0,
- 'DS575V': 1,
- 'DS595H': 2,
- 'DS595V': 3}
- dur_tag = dur_map.get(duration_type.upper(),None)
+ dur_map = {'DS575H': 0, 'DS575V': 1, 'DS595H': 2, 'DS595V': 3}
+ dur_tag = dur_map.get(duration_type.upper(), None)
if dur_tag is None:
- print("SignificantDurationModel.abrahamson_silva_ds_1999: duration_type='DS575H','DS575V','DS595H','DS595V'?")
+ print( # noqa: T201
+ "SignificantDurationModel.abrahamson_silva_ds_1999: duration_type='DS575H','DS575V','DS595H','DS595V'?"
+ )
return None, None
# modeling coefficients
beta = [3.2, 3.2, 3.2, 3.2]
@@ -74,19 +74,36 @@ def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type
c1 = [0.805, 1.076, 0.805, 1.076]
c2 = [0.063, 0.107, 0.063, 0.107]
rc = [10, 10, 10, 10]
- Drat = [0.000, 0.000, 0.845, 0.646]
+ Drat = [0.000, 0.000, 0.845, 0.646] # noqa: N806
sigma = [0.55, 0.46, 0.49, 0.45]
# median
if distance > rc[dur_tag]:
- ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \
- (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \
- 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \
- c1[dur_tag]+c2[dur_tag]*(distance-rc[dur_tag]))+Drat[dur_tag])
+ ds_median = np.exp(
+ np.log(
+ (
+ np.exp(b1[dur_tag] + b2[dur_tag] * (magnitude - m_star[dur_tag]))
+ / (10 ** (1.5 * magnitude + 16.05))
+ )
+ ** (-1 / 3)
+ / (4.9e6 * beta[dur_tag])
+ + soil * c1[dur_tag]
+ + c2[dur_tag] * (distance - rc[dur_tag])
+ )
+ + Drat[dur_tag]
+ )
else:
- ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \
- (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \
- 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \
- c1[dur_tag])+Drat[dur_tag])
+ ds_median = np.exp(
+ np.log(
+ (
+ np.exp(b1[dur_tag] + b2[dur_tag] * (magnitude - m_star[dur_tag]))
+ / (10 ** (1.5 * magnitude + 16.05))
+ )
+ ** (-1 / 3)
+ / (4.9e6 * beta[dur_tag])
+ + soil * c1[dur_tag]
+ )
+ + Drat[dur_tag]
+ )
# sigma
ds_sigma = sigma[dur_tag]
@@ -94,10 +111,15 @@ def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type
return np.log(ds_median), ds_sigma
-def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, ztor=0.0, duration_type='DS575H'):
- """
- Singificant duration model by Bommer, Stafford, Alarcon (2009) Empirical
- Equations for the Prediction of the Significant, Bracketed, and Uniform
+def bommer_stafford_alarcon_ds_2009(
+ magnitude=7.0,
+ distance=10.0,
+ vs30=760.0,
+ ztor=0.0,
+ duration_type='DS575H',
+):
+ """Significant duration model by Bommer, Stafford, Alarcon (2009) Empirical
+ Equations for the Prediction of the Significant, Bracketed, and Uniform
Duration of Earthquake Ground Motion
Input
magnitude: earthquake magnitude
@@ -110,15 +132,16 @@ def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, zt
ds_sigma: logarithmic standard deviation of the prediction
ds_tau: within-event logarithmic standard deviation
ds_phi: between-event logarithmic standard deviation
- """
-
+ """ # noqa: D205, D400
# duration type map
- dur_map = {'DS575H':0, 'DS595H': 1}
+ dur_map = {'DS575H': 0, 'DS595H': 1}
dur_tag = dur_map.get(duration_type.upper(), None)
if dur_tag is None:
- print("SignificantDurationModel.bommer_stafford_alarcon_ds_2009: duration_type='DS575H','DS595H'?")
+ print( # noqa: T201
+ "SignificantDurationModel.bommer_stafford_alarcon_ds_2009: duration_type='DS575H','DS595H'?"
+ )
return None, None, None, None
-
+
# modeling coefficients
c0 = [-5.6298, -2.2393]
m1 = [1.2619, 0.9368]
@@ -127,15 +150,20 @@ def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, zt
h1 = [-2.3316, 2.5000]
v1 = [-0.2900, -0.3478]
z1 = [-0.0522, -0.0365]
- tauCoeff = [0.3527, 0.3252]
- phiCoeff = [0.4304, 0.3460]
- sigma_c = [0.1729, 0.1114]
- sigma_Tgm = [0.5289, 0.4616]
+ tauCoeff = [0.3527, 0.3252] # noqa: N806
+ phiCoeff = [0.4304, 0.3460] # noqa: N806
+ sigma_c = [0.1729, 0.1114] # noqa: F841
+ sigma_Tgm = [0.5289, 0.4616] # noqa: N806
# median
- ds_median = np.exp(c0[dur_tag]+m1[dur_tag]*magnitude+(r1[dur_tag]+ \
- r2[dur_tag]*magnitude)*np.log(np.sqrt(distance**2+h1[dur_tag]**2))+ \
- v1[dur_tag]*np.log(vs30)+z1[dur_tag]*ztor)
+ ds_median = np.exp(
+ c0[dur_tag]
+ + m1[dur_tag] * magnitude
+ + (r1[dur_tag] + r2[dur_tag] * magnitude)
+ * np.log(np.sqrt(distance**2 + h1[dur_tag] ** 2))
+ + v1[dur_tag] * np.log(vs30)
+ + z1[dur_tag] * ztor
+ )
# standard deviations
ds_sigma = sigma_Tgm[dur_tag]
ds_tau = tauCoeff[dur_tag]
@@ -145,10 +173,16 @@ def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, zt
return np.log(ds_median), ds_sigma, ds_tau, ds_phi
-def afshari_stewart_ds_2016(magnitude=7.0, distance=10.0, vs30=760.0, mechanism='unknown',
- z1=None, region='california', duration_type='DS575H'):
- """
- Significant duration model by Afshari and Stewart (2016) hysically Parameterized
+def afshari_stewart_ds_2016( # noqa: C901
+ magnitude=7.0,
+ distance=10.0,
+ vs30=760.0,
+ mechanism='unknown',
+ z1=None,
+ region='california',
+ duration_type='DS575H',
+):
+ """Significant duration model by Afshari and Stewart (2016) hysically Parameterized
Prediction Equations for Significant Duration in Active Crustal Regions
Input
magnitude: earthquake magnitude
@@ -163,51 +197,60 @@ def afshari_stewart_ds_2016(magnitude=7.0, distance=10.0, vs30=760.0, mechanism=
ds_sigma: logarithmic standard deviation of the prediction
ds_tau: within-event logarithmic standard deviation
ds_phi: between-event logarithmic standard deviation
- """
-
+ """ # noqa: D205, D400
# mechanism map
- mech_map = {'unknown':0, 'normal': 1, 'reverse': 2, 'strike-slip': 3}
+ mech_map = {'unknown': 0, 'normal': 1, 'reverse': 2, 'strike-slip': 3}
mech_tag = mech_map.get(mechanism.lower(), None)
if mech_tag is None:
- print("SignificantDurationModel.afshari_stewart_ds_2016: mechanism='unknown','normal','reverse','strike-slip'?")
+ print( # noqa: T201
+ "SignificantDurationModel.afshari_stewart_ds_2016: mechanism='unknown','normal','reverse','strike-slip'?"
+ )
return None, None, None, None
# region map
- reg_map = {'california':0, 'japan': 1, 'other': 2}
+ reg_map = {'california': 0, 'japan': 1, 'other': 2}
reg_tag = reg_map.get(region.lower(), None)
if reg_tag is None:
- print("SignificantDurationModel.afshari_stewart_ds_2016: region='california', 'japan', 'other'?")
+ print( # noqa: T201
+ "SignificantDurationModel.afshari_stewart_ds_2016: region='california', 'japan', 'other'?"
+ )
return None, None, None, None
# duration type map
- dur_map = {'DS575H':0, 'DS595H': 1, 'DS2080H': 2}
+ dur_map = {'DS575H': 0, 'DS595H': 1, 'DS2080H': 2}
dur_tag = dur_map.get(duration_type.upper(), None)
if dur_tag is None:
- print("SignificantDurationModel.afshari_stewart_ds_2016: duration_type='DS575H','DS595H','DS2080H'?")
+ print( # noqa: T201
+ "SignificantDurationModel.afshari_stewart_ds_2016: duration_type='DS575H','DS595H','DS2080H'?"
+ )
return None, None, None, None
-
+
# source coefficients
- M1 = [5.35, 5.20, 5.20]
- M2 = [7.15, 7.40, 7.40]
- b0 = [[1.2800, 2.1820, 0.8822],
- [1.5550, 2.5410, 1.4090],
- [0.7806, 1.6120, 0.7729],
- [1.2790, 2.3020, 0.8804]]
- b1 = [[5.576, 3.628, 6.182],
- [4.992, 3.170, 4.778],
- [7.061, 4.536, 6.579],
- [5.578, 3.467, 6.188]]
+ M1 = [5.35, 5.20, 5.20] # noqa: N806
+ M2 = [7.15, 7.40, 7.40] # noqa: N806
+ b0 = [
+ [1.2800, 2.1820, 0.8822],
+ [1.5550, 2.5410, 1.4090],
+ [0.7806, 1.6120, 0.7729],
+ [1.2790, 2.3020, 0.8804],
+ ]
+ b1 = [
+ [5.576, 3.628, 6.182],
+ [4.992, 3.170, 4.778],
+ [7.061, 4.536, 6.579],
+ [5.578, 3.467, 6.188],
+ ]
b2 = [0.9011, 0.9443, 0.7414]
b3 = [-1.684, -3.911, -3.164]
- Mstar = [6, 6, 6]
+ Mstar = [6, 6, 6] # noqa: N806
# path coefficients
c1 = [0.1159, 0.3165, 0.0646]
- RR1 = [10, 10, 10]
- RR2 = [50, 50, 50]
+ RR1 = [10, 10, 10] # noqa: N806
+ RR2 = [50, 50, 50] # noqa: N806
c2 = [0.1065, 0.2539, 0.0865]
c3 = [0.0682, 0.0932, 0.0373]
# site coefficients
c4 = [-0.2246, -0.3183, -0.4237]
- Vref = [368.2, 369.9, 369.6]
- V1 = [600, 600, 600]
+ Vref = [368.2, 369.9, 369.6] # noqa: N806
+ V1 = [600, 600, 600] # noqa: N806
c5 = [0.0006, 0.0006, 0.0005]
dz1ref = [200, 200, 200]
# standard deviation coefficients
@@ -218,69 +261,82 @@ def afshari_stewart_ds_2016(magnitude=7.0, distance=10.0, vs30=760.0, mechanism=
# basin depth
if reg_tag == 0:
- mu_z1 = np.exp(-7.15/4*np.log((vs30**4+570.94**4)/(1360**4+570.94**4)))
+ mu_z1 = np.exp(
+ -7.15 / 4 * np.log((vs30**4 + 570.94**4) / (1360**4 + 570.94**4))
+ )
else:
- mu_z1 = np.exp(-5.23/4*np.log((vs30**4+412.39**4)/(1360**4+412.39**4)))
+ mu_z1 = np.exp(
+ -5.23 / 4 * np.log((vs30**4 + 412.39**4) / (1360**4 + 412.39**4))
+ )
# differential basin depth
- if z1 is None or z1 < 0 or reg_tag == 2:
+ if z1 is None or z1 < 0 or reg_tag == 2: # noqa: PLR2004
dz1 = 0
else:
- dz1 = z1-mu_z1
+ dz1 = z1 - mu_z1
# source term
if magnitude < M1[dur_tag]:
- F_E = b0[mech_tag][dur_tag]
+ F_E = b0[mech_tag][dur_tag] # noqa: N806
else:
if magnitude < M2[dur_tag]:
- deltaSigma = np.exp(b1[mech_tag][dur_tag]+b2[dur_tag]*(magnitude-Mstar[dur_tag]))
+ deltaSigma = np.exp( # noqa: N806
+ b1[mech_tag][dur_tag] + b2[dur_tag] * (magnitude - Mstar[dur_tag])
+ )
else:
- deltaSigma = np.exp(b1[mech_tag][dur_tag]+b2[dur_tag]*(M2[dur_tag]-Mstar[dur_tag])+ \
- b3[dur_tag]*(magnitude-M2[dur_tag]))
-
- M_0 = 10**(1.5*magnitude+16.05)
- f_0 = 4.9e6*3.2*(deltaSigma/M_0)**(1/3)
- F_E = 1/f_0
+ deltaSigma = np.exp( # noqa: N806
+ b1[mech_tag][dur_tag]
+ + b2[dur_tag] * (M2[dur_tag] - Mstar[dur_tag])
+ + b3[dur_tag] * (magnitude - M2[dur_tag])
+ )
+
+ M_0 = 10 ** (1.5 * magnitude + 16.05) # noqa: N806
+ f_0 = 4.9e6 * 3.2 * (deltaSigma / M_0) ** (1 / 3)
+ F_E = 1 / f_0 # noqa: N806
# path term
if distance < RR1[dur_tag]:
- F_P = c1[dur_tag]*distance
+ F_P = c1[dur_tag] * distance # noqa: N806
elif distance < RR2[dur_tag]:
- F_P = c1[dur_tag]*RR1[dur_tag]+c2[dur_tag]*(distance-RR1[dur_tag])
+ F_P = c1[dur_tag] * RR1[dur_tag] + c2[dur_tag] * (distance - RR1[dur_tag]) # noqa: N806
else:
- F_P = c1[dur_tag]*RR1[dur_tag]+c2[dur_tag]*(RR2[dur_tag]-RR1[dur_tag])+c3[dur_tag]*(distance-RR2[dur_tag])
+ F_P = ( # noqa: N806
+ c1[dur_tag] * RR1[dur_tag]
+ + c2[dur_tag] * (RR2[dur_tag] - RR1[dur_tag])
+ + c3[dur_tag] * (distance - RR2[dur_tag])
+ )
# F_deltaz term
if dz1 <= dz1ref[dur_tag]:
- F_deltaz = c5[dur_tag]*dz1
+ F_deltaz = c5[dur_tag] * dz1 # noqa: N806
else:
- F_deltaz = c5[dur_tag]*dz1ref[dur_tag]
+ F_deltaz = c5[dur_tag] * dz1ref[dur_tag] # noqa: N806
# site term
if vs30 < V1[dur_tag]:
- F_S = c4[dur_tag]*np.log(vs30/Vref[dur_tag])+F_deltaz
+ F_S = c4[dur_tag] * np.log(vs30 / Vref[dur_tag]) + F_deltaz # noqa: N806
else:
- F_S = c4[dur_tag]*np.log(V1[dur_tag]/Vref[dur_tag])+F_deltaz
+ F_S = c4[dur_tag] * np.log(V1[dur_tag] / Vref[dur_tag]) + F_deltaz # noqa: N806
# median
- ds_median = np.exp(np.log(F_E+F_P)+F_S)
+ ds_median = np.exp(np.log(F_E + F_P) + F_S)
# standard deviations
# between event
- if magnitude < 5.5:
+ if magnitude < 5.5: # noqa: PLR2004
ds_phi = phi1[dur_tag]
- elif magnitude < 5.75:
- ds_phi = phi1[dur_tag]+(phi2[dur_tag]-phi1[dur_tag])*(magnitude-5.5)/(5.75-5.5)
+ elif magnitude < 5.75: # noqa: PLR2004
+ ds_phi = phi1[dur_tag] + (phi2[dur_tag] - phi1[dur_tag]) * (
+ magnitude - 5.5
+ ) / (5.75 - 5.5)
else:
ds_phi = phi2[dur_tag]
# within event
- if magnitude < 6.5:
+ if magnitude < 6.5: # noqa: PLR2004
ds_tau = tau1[dur_tag]
- elif magnitude < 7:
- ds_tau = tau1[dur_tag]+(tau2[dur_tag]-tau1[dur_tag])*(magnitude-6.5)/(7-6.5)
+ elif magnitude < 7: # noqa: PLR2004
+ ds_tau = tau1[dur_tag] + (tau2[dur_tag] - tau1[dur_tag]) * (
+ magnitude - 6.5
+ ) / (7 - 6.5)
else:
ds_tau = tau2[dur_tag]
# total
- ds_sigma = np.sqrt(ds_phi**2+ds_tau**2)
+ ds_sigma = np.sqrt(ds_phi**2 + ds_tau**2)
# return
return np.log(ds_median), ds_sigma, ds_tau, ds_phi
-
-
-
-
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/__init__.py b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/__init__.py
index 49dc6cfa6..0915621dd 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/__init__.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/__init__.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: D104
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,4 +37,4 @@
# Kuanshi Zhong
#
-__all__ = ['CorrelationModel','SignificantDurationModel']
+__all__ = ['CorrelationModel', 'SignificantDurationModel'] # noqa: F822
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/openSHAGMPE.py b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/openSHAGMPE.py
index 702cf693a..61ce5c1ed 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/openSHAGMPE.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/gmpe/openSHAGMPE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: N999, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -36,159 +35,177 @@
#
# Contributors:
# Jinyan Zhao
-# Transfered from openSHA to achieve better performance in r2d
+# Transferred from openSHA to achieve better performance in r2d
-import numpy as np
-import pandas as pd
import os
-import time
import sys
-############### Chiou and Young (2014)
-class chiou_youngs_2013():
- timeSetImt = 0
- timeCalc = 0
- supportedImt = None
+import time
+
+import numpy as np
+import pandas as pd
+
+
+# Chiou and Young (2014)
+class chiou_youngs_2013: # noqa: D101
+ timeSetImt = 0 # noqa: N815
+ timeCalc = 0 # noqa: N815
+ supportedImt = None # noqa: N815
+
def __init__(self):
- self.coeff = pd.read_csv(os.path.join(os.path.dirname(__file__),'data','CY14.csv'))
- self.coeff.iloc[:-2,0] = self.coeff.iloc[:-2,0].apply(lambda x: float(x))
+ self.coeff = pd.read_csv(
+ os.path.join(os.path.dirname(__file__), 'data', 'CY14.csv') # noqa: PTH118, PTH120
+ )
+ self.coeff.iloc[:-2, 0] = self.coeff.iloc[:-2, 0].apply(lambda x: float(x)) # noqa: PLW0108
self.coeff = self.coeff.set_index('T')
self.supportedImt = list(self.coeff.index)
self.coeff = self.coeff.to_dict()
-
+
# Constants same for all periods
- self.C2 = self.coeff["c2"]["PGA"]
- self.C4 = self.coeff["c4"]["PGA"]
- self.C4a = self.coeff["c4a"]["PGA"]
+ self.C2 = self.coeff['c2']['PGA']
+ self.C4 = self.coeff['c4']['PGA']
+ self.C4a = self.coeff['c4a']['PGA']
self.dC4 = self.C4a - self.C4
- self.C11 = self.coeff["c11"]["PGA"]
- self.CRB = self.coeff["cRB"]["PGA"]
- self.PHI6 = self.coeff["phi6"]["PGA"]
+ self.C11 = self.coeff['c11']['PGA']
+ self.CRB = self.coeff['cRB']['PGA']
+ self.PHI6 = self.coeff['phi6']['PGA']
self.A = np.power(571, 4)
self.B = np.power(1360, 4) + self.A
self.CRBsq = self.CRB * self.CRB
-
- def setIMT(self, imt):
+
+ def setIMT(self, imt): # noqa: N802, D102
if imt not in self.supportedImt:
- sys.exit(f"The imt {imt} is not supported by Chiou and Young (2014)")
+ sys.exit(f'The imt {imt} is not supported by Chiou and Young (2014)')
return False
- self.c1 = self.coeff["c1"][imt]
- self.c1a = self.coeff["c1a"][imt]
- self.c1b = self.coeff["c1b"][imt]
- self.c1c = self.coeff["c1c"][imt]
- self.c1d = self.coeff["c1d"][imt]
- self.c3 = self.coeff["c3"][imt]
- self.c5 = self.coeff["c5"][imt]
- self.c6 = self.coeff["c6"][imt]
- self.c7 = self.coeff["c7"][imt]
- self.c7b = self.coeff["c7b"][imt]
- self.c8b = self.coeff["c8b"][imt]
- self.c9 = self.coeff["c9"][imt]
- self.c9a = self.coeff["c9a"][imt]
- self.c9b = self.coeff["c9b"][imt]
- self.c11b = self.coeff["c11b"][imt]
- self.cn = self.coeff["cn"][imt]
- self.cM = self.coeff["cM"][imt]
- self.cHM = self.coeff["cHM"][imt]
- self.cgamma1 = self.coeff["cgamma1"][imt]
- self.cgamma2 = self.coeff["cgamma2"][imt]
- self.cgamma3 = self.coeff["cgamma3"][imt]
- self.phi1 = self.coeff["phi1"][imt]
- self.phi2 = self.coeff["phi2"][imt]
- self.phi3 = self.coeff["phi3"][imt]
- self.phi4 = self.coeff["phi4"][imt]
- self.phi5 = self.coeff["phi5"][imt]
- self.tau1 = self.coeff["tau1"][imt]
- self.tau2 = self.coeff["tau2"][imt]
- self.sigma1 = self.coeff["sigma1"][imt]
- self.sigma2 = self.coeff["sigma2"][imt]
- self.sigma3 = self.coeff["sigma3"][imt]
+ self.c1 = self.coeff['c1'][imt]
+ self.c1a = self.coeff['c1a'][imt]
+ self.c1b = self.coeff['c1b'][imt]
+ self.c1c = self.coeff['c1c'][imt]
+ self.c1d = self.coeff['c1d'][imt]
+ self.c3 = self.coeff['c3'][imt]
+ self.c5 = self.coeff['c5'][imt]
+ self.c6 = self.coeff['c6'][imt]
+ self.c7 = self.coeff['c7'][imt]
+ self.c7b = self.coeff['c7b'][imt]
+ self.c8b = self.coeff['c8b'][imt]
+ self.c9 = self.coeff['c9'][imt]
+ self.c9a = self.coeff['c9a'][imt]
+ self.c9b = self.coeff['c9b'][imt]
+ self.c11b = self.coeff['c11b'][imt]
+ self.cn = self.coeff['cn'][imt]
+ self.cM = self.coeff['cM'][imt]
+ self.cHM = self.coeff['cHM'][imt]
+ self.cgamma1 = self.coeff['cgamma1'][imt]
+ self.cgamma2 = self.coeff['cgamma2'][imt]
+ self.cgamma3 = self.coeff['cgamma3'][imt]
+ self.phi1 = self.coeff['phi1'][imt]
+ self.phi2 = self.coeff['phi2'][imt]
+ self.phi3 = self.coeff['phi3'][imt]
+ self.phi4 = self.coeff['phi4'][imt]
+ self.phi5 = self.coeff['phi5'][imt]
+ self.tau1 = self.coeff['tau1'][imt]
+ self.tau2 = self.coeff['tau2'][imt]
+ self.sigma1 = self.coeff['sigma1'][imt]
+ self.sigma2 = self.coeff['sigma2'][imt]
+ self.sigma3 = self.coeff['sigma3'][imt]
return True
# Center zTop on the zTop-M relation -- Equations 4, 5
- def calcMwZtop(self,style, Mw):
- mzTop = 0.0
- if style == "REVERSE":
- if Mw<=5.849:
- mzTop = 2.704
+ def calcMwZtop(self, style, Mw): # noqa: D102, N802, N803, PLR6301
+ mzTop = 0.0 # noqa: N806
+ if style == 'REVERSE':
+ if Mw <= 5.849: # noqa: PLR2004
+ mzTop = 2.704 # noqa: N806
else:
- mzTop = max(2.704 - 1.226 * (Mw - 5.849), 0)
+ mzTop = max(2.704 - 1.226 * (Mw - 5.849), 0) # noqa: N806
else:
- mzTop = 2.673 if (Mw <= 4.970) else max(2.673 - 1.136 * (Mw - 4.970), 0)
+ mzTop = 2.673 if (Mw <= 4.970) else max(2.673 - 1.136 * (Mw - 4.970), 0) # noqa: N806, PLR2004
return mzTop * mzTop
-
- def calcSAref(self, Mw, rJB, rRup, rX, dip, zTop, style):
+
+ def calcSAref(self, Mw, rJB, rRup, rX, dip, zTop, style): # noqa: N802, N803, D102
# Magnitude scaling
- r1 = self.c1 + self.C2 * (Mw - 6.0) + ((self.C2 - self.c3) / self.cn) * np.log(1.0 + np.exp(self.cn * (self.cM - Mw)))
+ r1 = (
+ self.c1
+ + self.C2 * (Mw - 6.0)
+ + ((self.C2 - self.c3) / self.cn)
+ * np.log(1.0 + np.exp(self.cn * (self.cM - Mw)))
+ )
# Near-field magnitude and distance scaling
- r2 = self.C4 * np.log(rRup + self.c5 * np.cosh(self.c6 * max(Mw - self.cHM, 0.0)))
+ r2 = self.C4 * np.log(
+ rRup + self.c5 * np.cosh(self.c6 * max(Mw - self.cHM, 0.0))
+ )
# Far-field distance scaling
- gamma = (self.cgamma1 + self.cgamma2 / np.cosh(max(Mw - self.cgamma3, 0.0)))
+ gamma = self.cgamma1 + self.cgamma2 / np.cosh(max(Mw - self.cgamma3, 0.0))
r3 = self.dC4 * np.log(np.sqrt(rRup * rRup + self.CRBsq)) + rRup * gamma
# Scaling with other source variables
- coshM = np.cosh(2 * max(Mw - 4.5, 0))
- cosDelta = np.cos(dip * np.pi/180.0)
+ coshM = np.cosh(2 * max(Mw - 4.5, 0)) # noqa: N806
+ cosDelta = np.cos(dip * np.pi / 180.0) # noqa: N806
# Center zTop on the zTop-M relation
- deltaZtop = zTop - self.calcMwZtop(style, Mw)
- r4 = (self.c7 + self.c7b / coshM) * deltaZtop + (self.C11 + self.c11b / coshM) * cosDelta * cosDelta
- if style == "REVERSE":
+ deltaZtop = zTop - self.calcMwZtop(style, Mw) # noqa: N806
+ r4 = (self.c7 + self.c7b / coshM) * deltaZtop + (
+ self.C11 + self.c11b / coshM
+ ) * cosDelta * cosDelta
+ if style == 'REVERSE':
r4 += self.c1a + self.c1c / coshM
- elif style == "NORMAL":
+ elif style == 'NORMAL':
r4 += self.c1b + self.c1d / coshM
else:
r4 += 0.0
# Hanging-wall effect
r5 = 0.0
- if rX>=0.0:
- r5 = self.c9 * np.cos(dip * np.pi/180.0) * (self.c9a + (1.0 - self.c9a) * np.tanh(rX / self.c9b)) * (1 - np.sqrt(rJB * rJB + zTop * zTop) / (rRup + 1.0))
+ if rX >= 0.0:
+ r5 = (
+ self.c9
+ * np.cos(dip * np.pi / 180.0)
+ * (self.c9a + (1.0 - self.c9a) * np.tanh(rX / self.c9b))
+ * (1 - np.sqrt(rJB * rJB + zTop * zTop) / (rRup + 1.0))
+ )
return np.exp(r1 + r2 + r3 + r4 + r5)
-
- def calcSoilNonLin(self, vs30):
+
+ def calcSoilNonLin(self, vs30): # noqa: N802, D102
exp1 = np.exp(self.phi3 * (min(vs30, 1130.0) - 360.0))
exp2 = np.exp(self.phi3 * (1130.0 - 360.0))
return self.phi2 * (exp1 - exp2)
-
- def calcZ1ref(self, vs30):
- # -- Equation 18
- vsPow4 = vs30 * vs30 * vs30 * vs30
- return np.exp(-7.15 / 4 * np.log((vsPow4 + self.A) / self.B)) / 1000.0 # km
-
- def calcDeltaZ1(self, z1p0, vs30):
- if (np.isnan(z1p0)):
+
+ def calcZ1ref(self, vs30): # noqa: N802, D102
+ # -- Equation 18
+ vsPow4 = vs30 * vs30 * vs30 * vs30 # noqa: N806
+ return np.exp(-7.15 / 4 * np.log((vsPow4 + self.A) / self.B)) / 1000.0 # km
+
+ def calcDeltaZ1(self, z1p0, vs30): # noqa: N802, D102
+ if np.isnan(z1p0):
return 0.0
return 1000.0 * (z1p0 - self.calcZ1ref(vs30))
# Mean ground motion model -- Equation 12
- def calcMean(self, vs30, z1p0, snl, saRef):
+ def calcMean(self, vs30, z1p0, snl, saRef): # noqa: N802, N803, D102
# Soil effect: linear response
sl = self.phi1 * min(np.log(vs30 / 1130.0), 0.0)
# Soil effect: nonlinear response (base passed in)
snl *= np.log((saRef + self.phi4) / self.phi4)
# Soil effect: sediment thickness
- dZ1 = self.calcDeltaZ1(z1p0, vs30)
+ dZ1 = self.calcDeltaZ1(z1p0, vs30) # noqa: N806
rkdepth = self.phi5 * (1.0 - np.exp(-dZ1 / self.PHI6))
return np.log(saRef) + sl + snl + rkdepth
-
- def calcNLOsq(self, snl, saRef):
- NL0 = snl * saRef / (saRef + self.phi4)
- NL0sq = (1 + NL0) * (1 + NL0)
- return NL0sq
-
- def calcTauSq(self, NL0sq, mTest):
+
+ def calcNLOsq(self, snl, saRef): # noqa: N802, N803, D102
+ NL0 = snl * saRef / (saRef + self.phi4) # noqa: N806
+ NL0sq = (1 + NL0) * (1 + NL0) # noqa: N806
+ return NL0sq # noqa: RET504
+
+ def calcTauSq(self, NL0sq, mTest): # noqa: N802, N803, D102
tau = self.tau1 + (self.tau2 - self.tau1) / 1.5 * mTest
- tauSq = tau * tau * NL0sq
- return tauSq
-
- def calcPhiSq(self, vsInf, NL0sq, mTest):
- sigmaNL0 = self.sigma1 + (self.sigma2 - self.sigma1) / 1.5 * mTest
- vsTerm = self.sigma3 if vsInf else 0.7
- sigmaNL0 *= np.sqrt(vsTerm + NL0sq)
- phiSq = sigmaNL0 * sigmaNL0
- return phiSq
-
- def calc(self, Mw, rJB, rRup, rX, dip, zTop, vs30, vsInf,
- z1p0, style):
- """
- Preliminary implementation of the Chiou & Youngs (2013) next generation
+ tauSq = tau * tau * NL0sq # noqa: N806
+ return tauSq # noqa: RET504
+
+ def calcPhiSq(self, vsInf, NL0sq, mTest): # noqa: N802, N803, D102
+ sigmaNL0 = self.sigma1 + (self.sigma2 - self.sigma1) / 1.5 * mTest # noqa: N806
+ vsTerm = self.sigma3 if vsInf else 0.7 # noqa: N806
+ sigmaNL0 *= np.sqrt(vsTerm + NL0sq) # noqa: N806
+ phiSq = sigmaNL0 * sigmaNL0 # noqa: N806
+ return phiSq # noqa: RET504
+
+ def calc(self, Mw, rJB, rRup, rX, dip, zTop, vs30, vsInf, z1p0, style): # noqa: N803
+ """Preliminary implementation of the Chiou & Youngs (2013) next generation
attenuation relationship developed as part of NGA West II.
Input
imt intensity measure type ['PGA', 'PGV',1.0]
@@ -207,91 +224,107 @@ def calc(self, Mw, rJB, rRup, rX, dip, zTop, vs30, vsInf,
TotalStdDev
InterEvStdDev
IntraEvStdDev
- """
- saRef = self.calcSAref(Mw, rJB, rRup, rX, dip, zTop, style)
- soilNonLin = self.calcSoilNonLin(vs30)
+ """ # noqa: D205, D400
+ saRef = self.calcSAref(Mw, rJB, rRup, rX, dip, zTop, style) # noqa: N806
+ soilNonLin = self.calcSoilNonLin(vs30) # noqa: N806
mean = self.calcMean(vs30, z1p0, soilNonLin, saRef)
# Aleatory uncertainty model -- Equation 3.9
# Response Term - linear vs. non-linear
- NL0sq = self.calcNLOsq(soilNonLin, saRef)
+ NL0sq = self.calcNLOsq(soilNonLin, saRef) # noqa: N806
# Magnitude thresholds
- mTest = min(max(Mw, 5.0), 6.5) - 5.0
+ mTest = min(max(Mw, 5.0), 6.5) - 5.0 # noqa: N806
# Inter-event Term
- tauSq = self.calcTauSq(NL0sq, mTest)
+ tauSq = self.calcTauSq(NL0sq, mTest) # noqa: N806
# Intra-event term
- phiSq = self.calcPhiSq(vsInf, NL0sq, mTest)
+ phiSq = self.calcPhiSq(vsInf, NL0sq, mTest) # noqa: N806
- stdDev = np.sqrt(tauSq + phiSq)
+ stdDev = np.sqrt(tauSq + phiSq) # noqa: N806
return mean, stdDev, np.sqrt(tauSq), np.sqrt(phiSq)
- #https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/imr/attenRelImpl/ngaw2/NGAW2_Wrapper.java#L220
- def getFaultFromRake(self,rake):
- if(rake >= 135 or rake <= -135):
- return "STRIKE_SLIP"
- elif rake>=-45 and rake <=45:
- return "STRIKE_SLIP"
- elif rake>=45 and rake <=135:
- return "REVERSE"
+
+ # https://github.com/opensha/opensha/blob/master/src/main/java/org/opensha/sha/imr/attenRelImpl/ngaw2/NGAW2_Wrapper.java#L220
+ def getFaultFromRake(self, rake): # noqa: D102, N802, PLR6301
+ if rake >= 135 or rake <= -135 or (rake >= -45 and rake <= 45): # noqa: PLR2004
+ return 'STRIKE_SLIP'
+ elif rake >= 45 and rake <= 135: # noqa: RET505, PLR2004
+ return 'REVERSE'
else:
- return "NORMAL"
-
- def get_IM(self, Mw, site_rup_dict, site_info, im_info):
- vsInf = bool(site_info["vsInferred"])
- style = self.getFaultFromRake(site_rup_dict["aveRake"])
+ return 'NORMAL'
+
+ def get_IM(self, Mw, site_rup_dict, site_info, im_info): # noqa: N802, N803, D102
+ vsInf = bool(site_info['vsInferred']) # noqa: N806
+ style = self.getFaultFromRake(site_rup_dict['aveRake'])
if 'SA' in im_info['Type']:
- cur_T = im_info.get('Periods', None)
+ cur_T = im_info.get('Periods', None) # noqa: N806
elif im_info['Type'] == 'PGA':
- cur_T = ['PGA']
+ cur_T = ['PGA'] # noqa: N806
elif im_info['Type'] == 'PGV':
- cur_T = ['PGV']
+ cur_T = ['PGV'] # noqa: N806
else:
- print(f'The IM type {im_info["Type"]} is not supported')
- meanList = []
- stdDevList = []
- InterEvStdDevList = []
- IntraEvStdDevList = []
- for Tj in cur_T:
+ print(f'The IM type {im_info["Type"]} is not supported') # noqa: T201
+ meanList = [] # noqa: N806
+ stdDevList = [] # noqa: N806
+ InterEvStdDevList = [] # noqa: N806
+ IntraEvStdDevList = [] # noqa: N806
+ for Tj in cur_T: # noqa: N806
start = time.process_time_ns()
self.setIMT(Tj)
self.timeSetImt += time.process_time_ns() - start
start = time.process_time_ns()
- mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calc(Mw, site_info["rJB"], site_info["rRup"], site_info["rX"], site_rup_dict["dip"], site_rup_dict["zTop"], site_info["vs30"], vsInf, site_info["z1pt0"]/1000.0, style)
+ mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calc( # noqa: N806
+ Mw,
+ site_info['rJB'],
+ site_info['rRup'],
+ site_info['rX'],
+ site_rup_dict['dip'],
+ site_rup_dict['zTop'],
+ site_info['vs30'],
+ vsInf,
+ site_info['z1pt0'] / 1000.0,
+ style,
+ )
self.timeCalc += time.process_time_ns() - start
meanList.append(mean)
stdDevList.append(stdDev)
InterEvStdDevList.append(InterEvStdDev)
IntraEvStdDevList.append(IntraEvStdDev)
- saResult = {'Mean': meanList,
- 'TotalStdDev': stdDevList,
- 'InterEvStdDev': InterEvStdDevList,
- 'IntraEvStdDev': IntraEvStdDevList}
- return saResult
-
+ saResult = { # noqa: N806
+ 'Mean': meanList,
+ 'TotalStdDev': stdDevList,
+ 'InterEvStdDev': InterEvStdDevList,
+ 'IntraEvStdDev': IntraEvStdDevList,
+ }
+ return saResult # noqa: RET504
+
# Station
# if station_info['Type'] == 'SiteList':
# siteSpec = station_info['SiteList']
# for i in range(len(site_list)):
-############## Abrahamson, Silva, and Kamai (2014)
-class abrahamson_silva_kamai_2014():
- timeSetImt = 0
- timeCalc = 0
- supportedImt = None
+
+# Abrahamson, Silva, and Kamai (2014)
+class abrahamson_silva_kamai_2014: # noqa: D101
+ timeSetImt = 0 # noqa: N815
+ timeCalc = 0 # noqa: N815
+ supportedImt = None # noqa: N815
+
def __init__(self):
- self.coeff = pd.read_csv(os.path.join(os.path.dirname(__file__),'data','ASK14.csv'))
- self.coeff.iloc[:-2,0] = self.coeff.iloc[:-2,0].apply(lambda x: float(x))
+ self.coeff = pd.read_csv(
+ os.path.join(os.path.dirname(__file__), 'data', 'ASK14.csv') # noqa: PTH118, PTH120
+ )
+ self.coeff.iloc[:-2, 0] = self.coeff.iloc[:-2, 0].apply(lambda x: float(x)) # noqa: PLW0108
self.coeff = self.coeff.set_index('T')
self.supportedImt = list(self.coeff.index)
self.coeff = self.coeff.to_dict()
-
+
# Authors declared constants
self.A3 = 0.275
self.A4 = -0.1
self.A5 = -0.41
self.M2 = 5.0
- self.N = 1.5
+ self.N = 1.5
self.C4 = 4.5
-
+
# implementation constants
self.A = np.power(610, 4)
self.B = np.power(1360, 4) + self.A
@@ -302,134 +335,169 @@ def __init__(self):
self.H3 = -0.75
self.PHI_AMP_SQ = 0.16
- def setIMT(self, imt):
+ def setIMT(self, imt): # noqa: N802, D102
if imt not in self.supportedImt:
- sys.exit(f"The imt {imt} is not supported by Abrahamson, Silva, and Kamai (2014)")
- return None
+ sys.exit(
+ f'The imt {imt} is not supported by Abrahamson, Silva, and Kamai (2014)'
+ )
+ return
self.imt = imt
- self.a1 = self.coeff['a1'][imt]
- self.a2 = self.coeff['a2'][imt]
- self.a6 = self.coeff['a6'][imt]
- self.a8 = self.coeff['a8'][imt]
- self.a10 = self.coeff['a10'][imt]
+ self.a1 = self.coeff['a1'][imt]
+ self.a2 = self.coeff['a2'][imt]
+ self.a6 = self.coeff['a6'][imt]
+ self.a8 = self.coeff['a8'][imt]
+ self.a10 = self.coeff['a10'][imt]
self.a12 = self.coeff['a12'][imt]
self.a13 = self.coeff['a13'][imt]
self.a14 = self.coeff['a14'][imt]
- self.a15 = self.coeff['a15'][imt]
+ self.a15 = self.coeff['a15'][imt]
self.a17 = self.coeff['a17'][imt]
self.a43 = self.coeff['a43'][imt]
self.a44 = self.coeff['a44'][imt]
- self.a45 = self.coeff['a45'][imt]
+ self.a45 = self.coeff['a45'][imt]
self.a46 = self.coeff['a46'][imt]
- self.b = self.coeff['b'][imt]
- self.c = self.coeff['c'][imt]
+ self.b = self.coeff['b'][imt]
+ self.c = self.coeff['c'][imt]
self.s1e = self.coeff['s1e'][imt]
self.s2e = self.coeff['s2e'][imt]
- self.s3 = self.coeff['s3'][imt]
- self.s4 = self.coeff['s4'][imt]
+ self.s3 = self.coeff['s3'][imt]
+ self.s4 = self.coeff['s4'][imt]
self.s1m = self.coeff['s1m'][imt]
self.s2m = self.coeff['s2m'][imt]
- self.s5 = self.coeff['s5'][imt]
- self.s6 = self.coeff['s6'][imt]
- self.M1 = self.coeff['M1'][imt]
- self.Vlin= self.coeff['Vlin'][imt]
+ self.s5 = self.coeff['s5'][imt]
+ self.s6 = self.coeff['s6'][imt]
+ self.M1 = self.coeff['M1'][imt]
+ self.Vlin = self.coeff['Vlin'][imt]
- def getV1(self):
+ def getV1(self): # noqa: N802, D102
try:
- if self.imt == "PGA" or self.imt == "PGV":
+ if self.imt == 'PGA' or self.imt == 'PGV': # noqa: PLR1714
return 1500.0
- if self.imt >= 3.0:
+ if self.imt >= 3.0: # noqa: PLR2004
return 800.0
- if self.imt > 0.5:
- return np.exp( -0.35 * np.log(self.imt / 0.5) + np.log(1500.0))
- return 1500.0
- except:
+ if self.imt > 0.5: # noqa: PLR2004
+ return np.exp(-0.35 * np.log(self.imt / 0.5) + np.log(1500.0))
+ return 1500.0 # noqa: TRY300
+ except: # noqa: E722
return 1500.0
- def calcZ1ref(self, vs30):
- vsPow4 = vs30 * vs30 * vs30 * vs30
+
+ def calcZ1ref(self, vs30): # noqa: N802, D102
+ vsPow4 = vs30 * vs30 * vs30 * vs30 # noqa: N806
return np.exp(-7.67 / 4.0 * np.log((vsPow4 + self.A) / self.B)) / 1000.0
- def calcSoilTerm(self, vs30, z1p0):
+ def calcSoilTerm(self, vs30, z1p0): # noqa: N802, D102
if np.isnan(z1p0):
return 0.0
z1ref = self.calcZ1ref(vs30)
- vsCoeff = np.array([self.a43, self.a44, self.a45, self.a46, self.a46])
- VS_BINS = np.array([150.0, 250.0, 400.0, 700.0, 1000.0])
+ vsCoeff = np.array([self.a43, self.a44, self.a45, self.a46, self.a46]) # noqa: N806
+ VS_BINS = np.array([150.0, 250.0, 400.0, 700.0, 1000.0]) # noqa: N806
z1c = np.interp(vs30, VS_BINS, vsCoeff)
return z1c * np.log((z1p0 + 0.01) / (z1ref + 0.01))
- def getPhiA(self, Mw, s1, s2):
- if Mw < 4.0:
+ def getPhiA(self, Mw, s1, s2): # noqa: D102, N802, N803, PLR6301
+ if Mw < 4.0: # noqa: PLR2004
return s1
- if Mw > 6.0:
+ if Mw > 6.0: # noqa: PLR2004
return s2
- else:
+ else: # noqa: RET505
return s1 + ((s2 - s1) / 2) * (Mw - 4.0)
- def getTauA(self, Mw, s3, s4):
- if Mw < 5.0:
+ def getTauA(self, Mw, s3, s4): # noqa: D102, N802, N803, PLR6301
+ if Mw < 5.0: # noqa: PLR2004
return s3
- if Mw > 7.0:
+ if Mw > 7.0: # noqa: PLR2004
return s4
return s3 + ((s4 - s3) / 2) * (Mw - 5.0)
-
- def get_dAmp(self, b, c, vLin, vs30, saRock):
- if (vs30 >= vLin):
+
+ def get_dAmp(self, b, c, vLin, vs30, saRock): # noqa: N802, N803, D102
+ if vs30 >= vLin:
return 0.0
- return (-b * saRock) / (saRock + c) + (b * saRock) / (saRock + c * np.power(vs30 / vLin, self.N))
-
- def calcValues(self, Mw, rJB, rRup, rX, rY0, dip, width, zTop, vs30, vsInferred, z1p0, style):
- if Mw > 5:
+ return (-b * saRock) / (saRock + c) + (b * saRock) / (
+ saRock + c * np.power(vs30 / vLin, self.N)
+ )
+
+ def calcValues( # noqa: C901, N802, D102
+ self,
+ Mw, # noqa: N803
+ rJB, # noqa: N803
+ rRup, # noqa: N803
+ rX, # noqa: N803
+ rY0, # noqa: ARG002, N803
+ dip,
+ width,
+ zTop, # noqa: N803
+ vs30,
+ vsInferred, # noqa: N803
+ z1p0,
+ style,
+ ):
+ if Mw > 5: # noqa: PLR2004
c4mag = self.C4
- elif Mw > 4:
+ elif Mw > 4: # noqa: PLR2004
c4mag = self.C4 - (self.C4 - 1.0) * (5.0 - Mw)
else:
c4mag = 1.0
# -- Equation 3
- R = np.sqrt(rRup * rRup + c4mag * c4mag)
+ R = np.sqrt(rRup * rRup + c4mag * c4mag) # noqa: N806
# -- Equation 2
- MaxMwSq = (8.5 - Mw) * (8.5 - Mw)
- MwM1 = Mw - self.M1
+ MaxMwSq = (8.5 - Mw) * (8.5 - Mw) # noqa: N806
+ MwM1 = Mw - self.M1 # noqa: N806
f1 = self.a1 + self.a17 * rRup
if Mw > self.M1:
- f1 += self.A5 * MwM1 + self.a8 * MaxMwSq + (self.a2 + self.A3 * MwM1) * np.log(R)
+ f1 += (
+ self.A5 * MwM1
+ + self.a8 * MaxMwSq
+ + (self.a2 + self.A3 * MwM1) * np.log(R)
+ )
elif Mw >= self.M2:
- f1 += self.A4 * MwM1 + self.a8 * MaxMwSq + (self.a2 + self.A3 * MwM1) * np.log(R)
+ f1 += (
+ self.A4 * MwM1
+ + self.a8 * MaxMwSq
+ + (self.a2 + self.A3 * MwM1) * np.log(R)
+ )
else:
- M2M1 = self.M2 - self.M1
- MaxM2Sq = (8.5 - self.M2) * (8.5 - self.M2)
- MwM2 = Mw - self.M2
- f1 += self.A4 * M2M1 + self.a8 * MaxM2Sq + self.a6 * MwM2 + (self.a2 + self.A3 * M2M1) * np.log(R)
-
+ M2M1 = self.M2 - self.M1 # noqa: N806
+ MaxM2Sq = (8.5 - self.M2) * (8.5 - self.M2) # noqa: N806
+ MwM2 = Mw - self.M2 # noqa: N806
+ f1 += (
+ self.A4 * M2M1
+ + self.a8 * MaxM2Sq
+ + self.a6 * MwM2
+ + (self.a2 + self.A3 * M2M1) * np.log(R)
+ )
+
# Hanging Wall Model
f4 = 0.0
- if rJB < 30 and rX >= 0.0 and Mw > 5.5 and zTop <= 10.0:
- T1 = ((90.0 - dip) / 45 if (dip > 30.0) else 1.33333333)
- dM = Mw - 6.5
- T2 = (1 + self.A2_HW * dM if Mw>=6.5 else 1 + self.A2_HW * dM - (1 - self.A2_HW) * dM * dM)
- T3 = 0.0
- r1 = width * np.cos(dip * np.pi/180.0)
+ if rJB < 30 and rX >= 0.0 and Mw > 5.5 and zTop <= 10.0: # noqa: PLR2004
+ T1 = (90.0 - dip) / 45 if (dip > 30.0) else 1.33333333 # noqa: N806, PLR2004
+ dM = Mw - 6.5 # noqa: N806
+ T2 = ( # noqa: N806
+ 1 + self.A2_HW * dM
+ if Mw >= 6.5 # noqa: PLR2004
+ else 1 + self.A2_HW * dM - (1 - self.A2_HW) * dM * dM
+ )
+ T3 = 0.0 # noqa: N806
+ r1 = width * np.cos(dip * np.pi / 180.0)
r2 = 3 * r1
if rX <= r1:
- rXr1 = rX / r1
- T3 = self.H1 + self.H2 * rXr1 + self.H3 * rXr1 * rXr1
+ rXr1 = rX / r1 # noqa: N806
+ T3 = self.H1 + self.H2 * rXr1 + self.H3 * rXr1 * rXr1 # noqa: N806
elif rX <= r2:
- T3 = 1-(rX-r1)/(r2-r1)
- T4 = 1 - (zTop * zTop) / 100.0
- T5 = (1.0 if rJB == 0.0 else 1-rJB/30.0)
+ T3 = 1 - (rX - r1) / (r2 - r1) # noqa: N806
+ T4 = 1 - (zTop * zTop) / 100.0 # noqa: N806
+ T5 = 1.0 if rJB == 0.0 else 1 - rJB / 30.0 # noqa: N806
f4 = self.a13 * T1 * T2 * T3 * T4 * T5
f6 = self.a15
- if zTop < 20.0:
+ if zTop < 20.0: # noqa: PLR2004
f6 *= zTop / 20.0
- if style == "NORMAL":
- if Mw > 5.0:
+ if style == 'NORMAL':
+ if Mw > 5.0: # noqa: PLR2004
f78 = self.a12
+ elif Mw >= 4.0: # noqa: PLR2004
+ f78 = self.a12 * (Mw - 4)
else:
- if Mw >= 4.0:
- f78 = self.a12 * (Mw - 4)
- else:
- f78 = 0.0
+ f78 = 0.0
else:
f78 = 0.0
# -- Equation 17
@@ -437,19 +505,23 @@ def calcValues(self, Mw, rJB, rRup, rX, rY0, dip, width, zTop, vs30, vsInferred,
# Site Response Model
f5 = 0.0
- v1 = self.getV1() # -- Equation 9
- vs30s = (vs30 if (vs30 < v1) else v1) # -- Equation 8
+ v1 = self.getV1() # -- Equation 9
+ vs30s = min(v1, vs30) # -- Equation 8
# Site term -- Equation 7
- saRock = 0.0; # calc Sa1180 (rock reference) if necessary
+ saRock = 0.0 # calc Sa1180 (rock reference) if necessary # noqa: N806
if vs30 < self.Vlin:
- if self.VS_RK < v1:
+ if v1 > self.VS_RK:
vs30s_rk = self.VS_RK
else:
vs30s_rk = v1
f5_rk = (self.a10 + self.b * self.N) * np.log(vs30s_rk / self.Vlin)
- saRock = np.exp(f1 + f78 + f5_rk + f4 + f6)
- f5 = self.a10 * np.log(vs30s / self.Vlin) - self.b * np.log(saRock + self.c) + self.b * np.log(saRock + self.c * pow(vs30s / self.Vlin, self.N))
+ saRock = np.exp(f1 + f78 + f5_rk + f4 + f6) # noqa: N806
+ f5 = (
+ self.a10 * np.log(vs30s / self.Vlin)
+ - self.b * np.log(saRock + self.c)
+ + self.b * np.log(saRock + self.c * pow(vs30s / self.Vlin, self.N))
+ )
else:
f5 = (self.a10 + self.b * self.N) * np.log(vs30s / self.Vlin)
# total model (no aftershock f11) -- Equation 1
@@ -458,80 +530,97 @@ def calcValues(self, Mw, rJB, rRup, rX, rY0, dip, width, zTop, vs30, vsInferred,
# ****** Aleatory uncertainty model ******
# Intra-event term -- Equation 24
if vsInferred:
- phiAsq = self.getPhiA(Mw, self.s1e, self.s2e)
+ phiAsq = self.getPhiA(Mw, self.s1e, self.s2e) # noqa: N806
else:
- phiAsq = self.getPhiA(Mw, self.s1m, self.s2m)
- phiAsq *= phiAsq
+ phiAsq = self.getPhiA(Mw, self.s1m, self.s2m) # noqa: N806
+ phiAsq *= phiAsq # noqa: N806
# Inter-event term -- Equation 25
- tauB = self.getTauA(Mw, self.s3, self.s4)
+ tauB = self.getTauA(Mw, self.s3, self.s4) # noqa: N806
# Intra-event term with site amp variability removed -- Equation 27
- phiBsq = phiAsq - self.PHI_AMP_SQ
- # Parital deriv. of ln(soil amp) w.r.t. ln(SA1180) -- Equation 30
- # saRock subject to same vs30 < Vlin test as in mean model
- dAmp_p1 = self.get_dAmp(self.b, self.c, self.Vlin, vs30, saRock) + 1.0
+ phiBsq = phiAsq - self.PHI_AMP_SQ # noqa: N806
+ # Partial deriv. of ln(soil amp) w.r.t. ln(SA1180) -- Equation 30
+ # saRock subject to same vs30 < Vlin test as in mean model
+ dAmp_p1 = self.get_dAmp(self.b, self.c, self.Vlin, vs30, saRock) + 1.0 # noqa: N806
# phi squared, with non-linear effects -- Equation 28
- phiSq = phiBsq * dAmp_p1 * dAmp_p1 + self.PHI_AMP_SQ
+ phiSq = phiBsq * dAmp_p1 * dAmp_p1 + self.PHI_AMP_SQ # noqa: N806
# tau squared, with non-linear effects -- Equation 29
tau = tauB * dAmp_p1
# total std dev
- stdDev = np.sqrt(phiSq + tau * tau)
+ stdDev = np.sqrt(phiSq + tau * tau) # noqa: N806
return mean, stdDev, np.sqrt(phiSq), tau
-
- def getFaultFromRake(self,rake):
- if(rake >= 135 or rake <= -135):
- return "STRIKE_SLIP"
- elif rake>=-45 and rake <=45:
- return "STRIKE_SLIP"
- elif rake>=45 and rake <=135:
- return "REVERSE"
+
+ def getFaultFromRake(self, rake): # noqa: D102, N802, PLR6301
+ if rake >= 135 or rake <= -135 or (rake >= -45 and rake <= 45): # noqa: PLR2004
+ return 'STRIKE_SLIP'
+ elif rake >= 45 and rake <= 135: # noqa: RET505, PLR2004
+ return 'REVERSE'
else:
- return "NORMAL"
-
- def get_IM(self, Mw, site_rup_dict, site_info, im_info):
- vsInf = bool(site_info["vsInferred"])
- style = self.getFaultFromRake(site_rup_dict["aveRake"])
+ return 'NORMAL'
+
+ def get_IM(self, Mw, site_rup_dict, site_info, im_info): # noqa: N802, N803, D102
+ vsInf = bool(site_info['vsInferred']) # noqa: N806
+ style = self.getFaultFromRake(site_rup_dict['aveRake'])
if 'SA' in im_info['Type']:
- cur_T = im_info.get('Periods', None)
+ cur_T = im_info.get('Periods', None) # noqa: N806
elif im_info['Type'] == 'PGA':
- cur_T = ['PGA']
+ cur_T = ['PGA'] # noqa: N806
elif im_info['Type'] == 'PGV':
- cur_T = ['PGV']
+ cur_T = ['PGV'] # noqa: N806
else:
- print(f'The IM type {im_info["Type"]} is not supported')
- meanList = []
- stdDevList = []
- InterEvStdDevList = []
- IntraEvStdDevList = []
- for Tj in cur_T:
+ print(f'The IM type {im_info["Type"]} is not supported') # noqa: T201
+ meanList = [] # noqa: N806
+ stdDevList = [] # noqa: N806
+ InterEvStdDevList = [] # noqa: N806
+ IntraEvStdDevList = [] # noqa: N806
+ for Tj in cur_T: # noqa: N806
start = time.process_time_ns()
self.setIMT(Tj)
self.timeSetImt += time.process_time_ns() - start
start = time.process_time_ns()
- mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calcValues(Mw, site_info["rJB"], site_info["rRup"], site_info["rX"], -1, site_rup_dict["dip"], site_rup_dict["width"], site_rup_dict["zTop"], site_info["vs30"], vsInf, site_info["z1pt0"]/1000.0, style)
+ mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calcValues( # noqa: N806
+ Mw,
+ site_info['rJB'],
+ site_info['rRup'],
+ site_info['rX'],
+ -1,
+ site_rup_dict['dip'],
+ site_rup_dict['width'],
+ site_rup_dict['zTop'],
+ site_info['vs30'],
+ vsInf,
+ site_info['z1pt0'] / 1000.0,
+ style,
+ )
self.timeCalc += time.process_time_ns() - start
meanList.append(mean)
stdDevList.append(stdDev)
InterEvStdDevList.append(InterEvStdDev)
IntraEvStdDevList.append(IntraEvStdDev)
- saResult = {'Mean': meanList,
- 'TotalStdDev': stdDevList,
- 'InterEvStdDev': InterEvStdDevList,
- 'IntraEvStdDev': IntraEvStdDevList}
- return saResult
-
-############### Boore, Stewart, Seyhan, Atkinson (2014)
-class boore_etal_2014():
- timeSetImt = 0
- timeCalc = 0
- supportedImt = None
+ saResult = { # noqa: N806
+ 'Mean': meanList,
+ 'TotalStdDev': stdDevList,
+ 'InterEvStdDev': InterEvStdDevList,
+ 'IntraEvStdDev': IntraEvStdDevList,
+ }
+ return saResult # noqa: RET504
+
+
+# Boore, Stewart, Seyhan, Atkinson (2014)
+class boore_etal_2014: # noqa: D101
+ timeSetImt = 0 # noqa: N815
+ timeCalc = 0 # noqa: N815
+ supportedImt = None # noqa: N815
+
def __init__(self):
- self.coeff = pd.read_csv(os.path.join(os.path.dirname(__file__),'data','BSSA14.csv'))
- self.coeff.iloc[:-2,0] = self.coeff.iloc[:-2,0].apply(lambda x: float(x))
+ self.coeff = pd.read_csv(
+ os.path.join(os.path.dirname(__file__), 'data', 'BSSA14.csv') # noqa: PTH118, PTH120
+ )
+ self.coeff.iloc[:-2, 0] = self.coeff.iloc[:-2, 0].apply(lambda x: float(x)) # noqa: PLW0108
self.coeff = self.coeff.set_index('T')
self.supportedImt = list(self.coeff.index)
self.coeff = self.coeff.to_dict()
-
+
# Constants same for all periods
self.A = np.power(570.94, 4)
self.B = np.power(1360, 4) + self.A
@@ -544,442 +633,517 @@ def __init__(self):
self.V1 = 225
self.V2 = 300
self.imt = 'PGA'
-
- def setIMT(self, imt):
+
+ def setIMT(self, imt): # noqa: N802, D102
if imt not in self.supportedImt:
- sys.exit(f"The imt {imt} is not supported by Boore, Stewart, Seyhan & Atkinson (2014)")
- return None
+ sys.exit(
+ f'The imt {imt} is not supported by Boore, Stewart, Seyhan & Atkinson (2014)'
+ )
+ return
self.imt = imt
- self.e0 = self.coeff["e0"][imt]
- self.e1 = self.coeff["e1"][imt]
- self.e2 = self.coeff["e2"][imt]
- self.e3 = self.coeff["e3"][imt]
- self.e4 = self.coeff["e4"][imt]
- self.e5 = self.coeff["e5"][imt]
- self.e6 = self.coeff["e6"][imt]
- self.Mh = self.coeff["Mh"][imt]
- self.c1 = self.coeff["c1"][imt]
- self.c2 = self.coeff["c2"][imt]
- self.c3 = self.coeff["c3"][imt]
- self.h = self.coeff["h"][imt]
- self.c = self.coeff["c"][imt]
- self.Vc = self.coeff["Vc"][imt]
- self.f4 = self.coeff["f4"][imt]
- self.f5 = self.coeff["f5"][imt]
- self.f6 = self.coeff["f6"][imt]
- self.f7 = self.coeff["f7"][imt]
- self.R1 = self.coeff["R1"][imt]
- self.R2 = self.coeff["R2"][imt]
- self.dPhiR = self.coeff["dPhiR"][imt]
- self.dPhiV = self.coeff["dPhiV"][imt]
- self.phi1 = self.coeff["phi1"][imt]
- self.phi2 = self.coeff["phi2"][imt]
- self.tau1 = self.coeff["tau1"][imt]
- self.tau2 = self.coeff["tau2"][imt]
-
- def getFaultFromRake(self,rake):
- if(rake >= 135 or rake <= -135):
- return "STRIKE_SLIP"
- elif rake>=-45 and rake <=45:
- return "STRIKE_SLIP"
- elif rake>=45 and rake <=135:
- return "REVERSE"
+ self.e0 = self.coeff['e0'][imt]
+ self.e1 = self.coeff['e1'][imt]
+ self.e2 = self.coeff['e2'][imt]
+ self.e3 = self.coeff['e3'][imt]
+ self.e4 = self.coeff['e4'][imt]
+ self.e5 = self.coeff['e5'][imt]
+ self.e6 = self.coeff['e6'][imt]
+ self.Mh = self.coeff['Mh'][imt]
+ self.c1 = self.coeff['c1'][imt]
+ self.c2 = self.coeff['c2'][imt]
+ self.c3 = self.coeff['c3'][imt]
+ self.h = self.coeff['h'][imt]
+ self.c = self.coeff['c'][imt]
+ self.Vc = self.coeff['Vc'][imt]
+ self.f4 = self.coeff['f4'][imt]
+ self.f5 = self.coeff['f5'][imt]
+ self.f6 = self.coeff['f6'][imt]
+ self.f7 = self.coeff['f7'][imt]
+ self.R1 = self.coeff['R1'][imt]
+ self.R2 = self.coeff['R2'][imt]
+ self.dPhiR = self.coeff['dPhiR'][imt]
+ self.dPhiV = self.coeff['dPhiV'][imt]
+ self.phi1 = self.coeff['phi1'][imt]
+ self.phi2 = self.coeff['phi2'][imt]
+ self.tau1 = self.coeff['tau1'][imt]
+ self.tau2 = self.coeff['tau2'][imt]
+
+ def getFaultFromRake(self, rake): # noqa: D102, N802, PLR6301
+ if rake >= 135 or rake <= -135 or (rake >= -45 and rake <= 45): # noqa: PLR2004
+ return 'STRIKE_SLIP'
+ elif rake >= 45 and rake <= 135: # noqa: RET505, PLR2004
+ return 'REVERSE'
else:
- return "NORMAL"
-
- def calcSourceTerm(self, Mw, style):
- if style == "STRIKE_SLIP":
- Fe = self.e1
- elif style =="REVERSE":
- Fe = self.e3
- elif style == "NORMAL":
- Fe = self.e2
+ return 'NORMAL'
+
+ def calcSourceTerm(self, Mw, style): # noqa: N802, N803, D102
+ if style == 'STRIKE_SLIP':
+ Fe = self.e1 # noqa: N806
+ elif style == 'REVERSE':
+ Fe = self.e3 # noqa: N806
+ elif style == 'NORMAL':
+ Fe = self.e2 # noqa: N806
else:
- Fe = self.e0
- MwMh = Mw - self.Mh
+ Fe = self.e0 # noqa: N806
+ MwMh = Mw - self.Mh # noqa: N806
if Mw <= self.Mh:
- Fe = Fe + self.e4 * MwMh + self.e5 * MwMh * MwMh
+ Fe = Fe + self.e4 * MwMh + self.e5 * MwMh * MwMh # noqa: N806
else:
- Fe = Fe + self.e6 * MwMh
+ Fe = Fe + self.e6 * MwMh # noqa: N806, PLR6104
return Fe
-
- def calcPathTerm(self, Mw, R):
- return (self.c1 + self.c2 * (Mw - self.M_REF)) * np.log(R / self.R_REF)+\
- (self.c3 + self.DC3_CA_TW) * (R - self.R_REF)
+ def calcPathTerm(self, Mw, R): # noqa: N802, N803, D102
+ return (self.c1 + self.c2 * (Mw - self.M_REF)) * np.log(R / self.R_REF) + (
+ self.c3 + self.DC3_CA_TW
+ ) * (R - self.R_REF)
- def calcPGArock(self, Mw, rJB, style):
- FePGA = self.calcSourceTerm(Mw, style)
- R = np.sqrt(rJB * rJB + self.h * self.h)
- FpPGA = self.calcPathTerm(Mw, R)
+ def calcPGArock(self, Mw, rJB, style): # noqa: N802, N803, D102
+ FePGA = self.calcSourceTerm(Mw, style) # noqa: N806
+ R = np.sqrt(rJB * rJB + self.h * self.h) # noqa: N806
+ FpPGA = self.calcPathTerm(Mw, R) # noqa: N806
return np.exp(FePGA + FpPGA)
-
- def calcLnFlin(self, vs30):
- vsLin = vs30 if vs30 <= self.Vc else self.Vc
- lnFlin = self.c * np.log(vsLin / self.V_REF)
- return lnFlin
-
- def calcF2(self, vs30):
- f2 = self.f4 * (np.exp(self.f5 * (min(vs30, 760.0) - 360.0)) -
- np.exp(self.f5 * (760.0 - 360.0)))
- return f2
-
- def calcFdz1(self, vs30, z1p0):
- DZ1 = self.calcDeltaZ1(z1p0, vs30)
- if self.imt!='PGA' and self.imt!='PGV' and self.imt >= 0.65:
- if DZ1 <= (self.f7/self.f6):
- Fdz1 = self.f6*DZ1
+
+ def calcLnFlin(self, vs30): # noqa: N802, D102
+ vsLin = min(vs30, self.Vc) # noqa: N806
+ lnFlin = self.c * np.log(vsLin / self.V_REF) # noqa: N806
+ return lnFlin # noqa: RET504
+
+ def calcF2(self, vs30): # noqa: N802, D102
+ f2 = self.f4 * (
+ np.exp(self.f5 * (min(vs30, 760.0) - 360.0))
+ - np.exp(self.f5 * (760.0 - 360.0))
+ )
+ return f2 # noqa: RET504
+
+ def calcFdz1(self, vs30, z1p0): # noqa: N802, D102
+ DZ1 = self.calcDeltaZ1(z1p0, vs30) # noqa: N806
+ if self.imt != 'PGA' and self.imt != 'PGV' and self.imt >= 0.65: # noqa: PLR1714, PLR2004
+ if (self.f7 / self.f6) >= DZ1:
+ Fdz1 = self.f6 * DZ1 # noqa: N806
else:
- Fdz1 = self.f7
+ Fdz1 = self.f7 # noqa: N806
else:
- Fdz1 = 0.0
+ Fdz1 = 0.0 # noqa: N806
return Fdz1
- def calcDeltaZ1(self, z1p0, vs30):
+
+ def calcDeltaZ1(self, z1p0, vs30): # noqa: N802, D102
if np.isnan(z1p0):
return 0.0
return z1p0 - self.calcZ1ref(vs30)
- def calcZ1ref(self, vs30):
- vsPow4 = np.power(vs30, 4)
- return np.exp(-7.15/4.0*np.log((vsPow4+self.A)/self.B))/1000.0
-
- def calcMean(self, Mw, rJB, vs30, z1p0, style, pgaRock):
- Fe = self.calcSourceTerm(Mw, style)
- R = np.sqrt(rJB * rJB + self.h * self.h)
- Fp = self.calcPathTerm(Mw, R)
- lnFlin = self.calcLnFlin(vs30)
+
+ def calcZ1ref(self, vs30): # noqa: N802, D102
+ vsPow4 = np.power(vs30, 4) # noqa: N806
+ return np.exp(-7.15 / 4.0 * np.log((vsPow4 + self.A) / self.B)) / 1000.0
+
+ def calcMean(self, Mw, rJB, vs30, z1p0, style, pgaRock): # noqa: N802, N803, D102
+ Fe = self.calcSourceTerm(Mw, style) # noqa: N806
+ R = np.sqrt(rJB * rJB + self.h * self.h) # noqa: N806
+ Fp = self.calcPathTerm(Mw, R) # noqa: N806
+ lnFlin = self.calcLnFlin(vs30) # noqa: N806
f2 = self.calcF2(vs30)
- lnFnl = self.F1 + f2 * np.log((pgaRock + self.F3) / self.F3)
- Fdz1 = self.calcFdz1(vs30, z1p0)
- Fs = lnFlin + lnFnl + Fdz1
+ lnFnl = self.F1 + f2 * np.log((pgaRock + self.F3) / self.F3) # noqa: N806
+ Fdz1 = self.calcFdz1(vs30, z1p0) # noqa: N806
+ Fs = lnFlin + lnFnl + Fdz1 # noqa: N806
return Fe + Fp + Fs
-
- def calcPhi(self, Mw, rJB, vs30):
- if Mw >= 5.5:
- phiM = self.phi2
- elif Mw <=4.5:
- phiM = self.phi1
+
+ def calcPhi(self, Mw, rJB, vs30): # noqa: N802, N803, D102
+ if Mw >= 5.5: # noqa: PLR2004
+ phiM = self.phi2 # noqa: N806
+ elif Mw <= 4.5: # noqa: PLR2004
+ phiM = self.phi1 # noqa: N806
else:
- phiM = self.phi1 + (self.phi2 - self.phi1) * (Mw - 4.5)
- phiMR = phiM
- if (rJB > self.R2):
- phiMR += self.dPhiR
- elif (rJB > self.R1):
- phiMR += self.dPhiR * (np.log(rJB / self.R1) / np.log(self.R2 / self.R1))
- phiMRV = phiMR
+ phiM = self.phi1 + (self.phi2 - self.phi1) * (Mw - 4.5) # noqa: N806
+ phiMR = phiM # noqa: N806
+ if rJB > self.R2:
+ phiMR += self.dPhiR # noqa: N806
+ elif rJB > self.R1:
+ phiMR += self.dPhiR * (np.log(rJB / self.R1) / np.log(self.R2 / self.R1)) # noqa: N806
+ phiMRV = phiMR # noqa: N806
if vs30 <= self.V1:
- phiMRV -= self.dPhiV
+ phiMRV -= self.dPhiV # noqa: N806
elif vs30 < self.V2:
- phiMRV -= self.dPhiV * (np.log(self.V2 / vs30) / np.log(self.V2 / self.V1))
+ phiMRV -= self.dPhiV * ( # noqa: N806
+ np.log(self.V2 / vs30) / np.log(self.V2 / self.V1)
+ )
return phiMRV
- def calcTau(self, Mw):
- if Mw >= 5.5:
+ def calcTau(self, Mw): # noqa: N802, N803, D102
+ if Mw >= 5.5: # noqa: PLR2004
tau = self.tau2
- elif Mw <= 4.5:
+ elif Mw <= 4.5: # noqa: PLR2004
tau = self.tau1
else:
tau = self.tau1 + (self.tau2 - self.tau1) * (Mw - 4.5)
return tau
-
+
# def calcStdDev(self, Mw, rJB, vs30):
# tau = self.calcTau(Mw)
# phiMRV = self.calcPhi(Mw, rJB, vs30)
# return np.sqrt(phiMRV * phiMRV + tau * tau)
- def calcStdDev(self, phiMRV, tau):
+ def calcStdDev(self, phiMRV, tau): # noqa: D102, N802, N803, PLR6301
return np.sqrt(phiMRV * phiMRV + tau * tau)
- def calc(self, Mw, rJB, vs30, z1p0, style):
+ def calc(self, Mw, rJB, vs30, z1p0, style): # noqa: N803, D102
imt_tmp = self.imt
self.setIMT('PGA')
- pgaRock = self.calcPGArock(Mw, rJB, style)
+ pgaRock = self.calcPGArock(Mw, rJB, style) # noqa: N806
self.setIMT(imt_tmp)
mean = self.calcMean(Mw, rJB, vs30, z1p0, style, pgaRock)
phi = self.calcPhi(Mw, rJB, vs30)
tau = self.calcTau(Mw)
- stdDev = self.calcStdDev(phi, tau)
+ stdDev = self.calcStdDev(phi, tau) # noqa: N806
return mean, stdDev, tau, phi
-
- def get_IM(self, Mw, site_rup_dict, site_info, im_info):
- vsInf = bool(site_info["vsInferred"])
- style = self.getFaultFromRake(site_rup_dict["aveRake"])
+
+ def get_IM(self, Mw, site_rup_dict, site_info, im_info): # noqa: N802, N803, D102
+ vsInf = bool(site_info['vsInferred']) # noqa: N806, F841
+ style = self.getFaultFromRake(site_rup_dict['aveRake'])
if 'SA' in im_info['Type']:
- cur_T = im_info.get('Periods', None)
+ cur_T = im_info.get('Periods', None) # noqa: N806
elif im_info['Type'] == 'PGA':
- cur_T = ['PGA']
+ cur_T = ['PGA'] # noqa: N806
elif im_info['Type'] == 'PGV':
- cur_T = ['PGV']
+ cur_T = ['PGV'] # noqa: N806
else:
- print(f'The IM type {im_info["Type"]} is not supported')
- meanList = []
- stdDevList = []
- InterEvStdDevList = []
- IntraEvStdDevList = []
- for Tj in cur_T:
+ print(f'The IM type {im_info["Type"]} is not supported') # noqa: T201
+ meanList = [] # noqa: N806
+ stdDevList = [] # noqa: N806
+ InterEvStdDevList = [] # noqa: N806
+ IntraEvStdDevList = [] # noqa: N806
+ for Tj in cur_T: # noqa: N806
start = time.process_time_ns()
self.setIMT(Tj)
self.timeSetImt += time.process_time_ns() - start
start = time.process_time_ns()
- mean, stdDev, InterEvStdDev, IntraEvStdDev = \
- self.calc(Mw, site_info["rJB"], site_info["vs30"], site_info["z1pt0"]/1000.0, style)
+ mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calc( # noqa: N806
+ Mw,
+ site_info['rJB'],
+ site_info['vs30'],
+ site_info['z1pt0'] / 1000.0,
+ style,
+ )
self.timeCalc += time.process_time_ns() - start
meanList.append(mean)
stdDevList.append(stdDev)
InterEvStdDevList.append(InterEvStdDev)
IntraEvStdDevList.append(IntraEvStdDev)
- saResult = {'Mean': meanList,
- 'TotalStdDev': stdDevList,
- 'InterEvStdDev': InterEvStdDevList,
- 'IntraEvStdDev': IntraEvStdDevList}
- return saResult
-
-
-############### Campbell & Bozorgnia (2014)
-class campbell_bozorgnia_2014():
- timeSetImt = 0
- timeCalc = 0
- supportedImt = None
+ saResult = { # noqa: N806
+ 'Mean': meanList,
+ 'TotalStdDev': stdDevList,
+ 'InterEvStdDev': InterEvStdDevList,
+ 'IntraEvStdDev': IntraEvStdDevList,
+ }
+ return saResult # noqa: RET504
+
+
+# Campbell & Bozorgnia (2014)
+class campbell_bozorgnia_2014: # noqa: D101
+ timeSetImt = 0 # noqa: N815
+ timeCalc = 0 # noqa: N815
+ supportedImt = None # noqa: N815
+
def __init__(self):
- self.coeff = pd.read_csv(os.path.join(os.path.dirname(__file__),'data','CB14.csv'))
- self.coeff.iloc[:-2,0] = self.coeff.iloc[:-2,0].apply(lambda x: float(x))
+ self.coeff = pd.read_csv(
+ os.path.join(os.path.dirname(__file__), 'data', 'CB14.csv') # noqa: PTH118, PTH120
+ )
+ self.coeff.iloc[:-2, 0] = self.coeff.iloc[:-2, 0].apply(lambda x: float(x)) # noqa: PLW0108
self.coeff = self.coeff.set_index('T')
self.supportedImt = list(self.coeff.index)
self.coeff = self.coeff.to_dict()
-
+
# Constants same for all periods
self.H4 = 1.0
self.C = 1.88
self.N = 1.18
self.PHI_LNAF_SQ = 0.09
self.imt = 'PGA'
- self.tau_hi_PGA = self.coeff["tau2"]["PGA"]
- self.tau_lo_PGA = self.coeff["tau1"]["PGA"]
- self.phi_hi_PGA = self.coeff["phi2"]["PGA"]
- self.phi_lo_PGA = self.coeff["phi1"]["PGA"]
-
- def setIMT(self, imt):
+ self.tau_hi_PGA = self.coeff['tau2']['PGA']
+ self.tau_lo_PGA = self.coeff['tau1']['PGA']
+ self.phi_hi_PGA = self.coeff['phi2']['PGA']
+ self.phi_lo_PGA = self.coeff['phi1']['PGA']
+
+ def setIMT(self, imt): # noqa: N802, D102
if imt not in self.supportedImt:
- sys.exit(f"The imt {imt} is not supported by Campbell & Bozorgnia (2014)")
- return None
+ sys.exit(
+ f'The imt {imt} is not supported by Campbell & Bozorgnia (2014)'
+ )
+ return
self.imt = imt
- self.c0 = self.coeff["c0"][imt]
- self.c1 = self.coeff["c1"][imt]
- self.c2 = self.coeff["c2"][imt]
- self.c3 = self.coeff["c3"][imt]
- self.c4 = self.coeff["c4"][imt]
- self.c5 = self.coeff["c5"][imt]
- self.c6 = self.coeff["c6"][imt]
- self.c7 = self.coeff["c7"][imt]
- self.c8 = self.coeff["c8"][imt]
- self.c9 = self.coeff["c9"][imt]
- self.c10 = self.coeff["c10"][imt]
- self.c11 = self.coeff["c11"][imt]
- self.c12 = self.coeff["c12"][imt]
- self.c13 = self.coeff["c13"][imt]
- self.c14 = self.coeff["c14"][imt]
- self.c15 = self.coeff["c15"][imt]
- self.c16 = self.coeff["c16"][imt]
- self.c17 = self.coeff["c17"][imt]
- self.c18 = self.coeff["c18"][imt]
- self.c19 = self.coeff["c19"][imt]
- self.c20 = self.coeff["c20"][imt]
- self.a2 = self.coeff["a2"][imt]
- self.h1 = self.coeff["h1"][imt]
- self.h2 = self.coeff["h2"][imt]
- self.h3 = self.coeff["h3"][imt]
- self.h5 = self.coeff["h5"][imt]
- self.h6 = self.coeff["h6"][imt]
- self.k1 = self.coeff["k1"][imt]
- self.k2 = self.coeff["k2"][imt]
- self.k3 = self.coeff["k3"][imt]
- self.phi1 = self.coeff["phi1"][imt]
- self.phi2 = self.coeff["phi2"][imt]
- self.tau1 = self.coeff["tau1"][imt]
- self.tau2 = self.coeff["tau2"][imt]
- self.rho = self.coeff["rho"][imt]
-
-
- def getFaultFromRake(self,rake):
- if(rake >= 135 or rake <= -135):
- return "STRIKE_SLIP"
- elif rake>=-45 and rake <=45:
- return "STRIKE_SLIP"
- elif rake>=45 and rake <=135:
- return "REVERSE"
+ self.c0 = self.coeff['c0'][imt]
+ self.c1 = self.coeff['c1'][imt]
+ self.c2 = self.coeff['c2'][imt]
+ self.c3 = self.coeff['c3'][imt]
+ self.c4 = self.coeff['c4'][imt]
+ self.c5 = self.coeff['c5'][imt]
+ self.c6 = self.coeff['c6'][imt]
+ self.c7 = self.coeff['c7'][imt]
+ self.c8 = self.coeff['c8'][imt]
+ self.c9 = self.coeff['c9'][imt]
+ self.c10 = self.coeff['c10'][imt]
+ self.c11 = self.coeff['c11'][imt]
+ self.c12 = self.coeff['c12'][imt]
+ self.c13 = self.coeff['c13'][imt]
+ self.c14 = self.coeff['c14'][imt]
+ self.c15 = self.coeff['c15'][imt]
+ self.c16 = self.coeff['c16'][imt]
+ self.c17 = self.coeff['c17'][imt]
+ self.c18 = self.coeff['c18'][imt]
+ self.c19 = self.coeff['c19'][imt]
+ self.c20 = self.coeff['c20'][imt]
+ self.a2 = self.coeff['a2'][imt]
+ self.h1 = self.coeff['h1'][imt]
+ self.h2 = self.coeff['h2'][imt]
+ self.h3 = self.coeff['h3'][imt]
+ self.h5 = self.coeff['h5'][imt]
+ self.h6 = self.coeff['h6'][imt]
+ self.k1 = self.coeff['k1'][imt]
+ self.k2 = self.coeff['k2'][imt]
+ self.k3 = self.coeff['k3'][imt]
+ self.phi1 = self.coeff['phi1'][imt]
+ self.phi2 = self.coeff['phi2'][imt]
+ self.tau1 = self.coeff['tau1'][imt]
+ self.tau2 = self.coeff['tau2'][imt]
+ self.rho = self.coeff['rho'][imt]
+
+ def getFaultFromRake(self, rake): # noqa: D102, N802, PLR6301
+ if rake >= 135 or rake <= -135 or (rake >= -45 and rake <= 45): # noqa: PLR2004
+ return 'STRIKE_SLIP'
+ elif rake >= 45 and rake <= 135: # noqa: RET505, PLR2004
+ return 'REVERSE'
else:
- return "NORMAL"
+ return 'NORMAL'
- def calcZ25ref(self, vs30):
+ def calcZ25ref(self, vs30): # noqa: D102, N802, PLR6301
return np.exp(7.089 - 1.144 * np.log(vs30))
-
- def calcMean(self, Mw, rJB, rRup, rX, dip, width, zTop,\
- zHyp, vs30, z2p5, style, pgaRock):
- Fmag = self.c0 + self.c1 * Mw
- if (Mw > 6.5):
- Fmag += self.c2 * (Mw - 4.5) + self.c3 * (Mw - 5.5) + self.c4 * (Mw - 6.5)
- elif (Mw > 5.5):
- Fmag += self.c2 * (Mw - 4.5) + self.c3 * (Mw - 5.5)
- elif (Mw > 4.5):
- Fmag += self.c2 * (Mw - 4.5)
+
+ def calcMean( # noqa: C901, N802, D102
+ self,
+ Mw, # noqa: N803
+ rJB, # noqa: N803
+ rRup, # noqa: N803
+ rX, # noqa: N803
+ dip,
+ width,
+ zTop, # noqa: N803
+ zHyp, # noqa: N803
+ vs30,
+ z2p5,
+ style,
+ pgaRock, # noqa: N803
+ ):
+ Fmag = self.c0 + self.c1 * Mw # noqa: N806
+ if Mw > 6.5: # noqa: PLR2004
+ Fmag += ( # noqa: N806
+ self.c2 * (Mw - 4.5) + self.c3 * (Mw - 5.5) + self.c4 * (Mw - 6.5)
+ )
+ elif Mw > 5.5: # noqa: PLR2004
+ Fmag += self.c2 * (Mw - 4.5) + self.c3 * (Mw - 5.5) # noqa: N806
+ elif Mw > 4.5: # noqa: PLR2004
+ Fmag += self.c2 * (Mw - 4.5) # noqa: N806
r = np.sqrt(rRup * rRup + self.c7 * self.c7)
- Fr = (self.c5 + self.c6 * Mw) * np.log(r)
- Fflt = 0.0
- if style == "NORMAL" and Mw > 4.5:
- Fflt = self.c9
- if (Mw <= 5.5):
- Fflt *= (Mw - 4.5)
- Fhw = 0.0
- if (rX >= 0.0 and Mw > 5.5 and zTop <= 16.66):
+ Fr = (self.c5 + self.c6 * Mw) * np.log(r) # noqa: N806
+ Fflt = 0.0 # noqa: N806
+ if style == 'NORMAL' and Mw > 4.5: # noqa: PLR2004
+ Fflt = self.c9 # noqa: N806
+ if Mw <= 5.5: # noqa: PLR2004
+ Fflt *= Mw - 4.5 # noqa: N806
+ Fhw = 0.0 # noqa: N806
+ if rX >= 0.0 and Mw > 5.5 and zTop <= 16.66: # noqa: PLR2004
r1 = width * np.cos(np.radians(dip))
r2 = 62.0 * Mw - 350.0
- rXr1 = rX / r1
- rXr2r1 = (rX - r1) / (r2 - r1)
- f1_rX = self.h1 + self.h2 * rXr1 + self.h3 * (rXr1 * rXr1)
- f2_rX = self.H4 + self.h5 * (rXr2r1) + self.h6 * rXr2r1 * rXr2r1
- Fhw_rX = max(f2_rX, 0.0) if (rX >= r1) else f1_rX
- Fhw_rRup = 1.0 if (rRup == 0.0) else (rRup - rJB) / rRup
- Fhw_m = 1.0 + self.a2 * (Mw - 6.5)
- if (Mw <= 6.5):
- Fhw_m *= (Mw - 5.5)
- Fhw_z = 1.0 - 0.06 * zTop
- Fhw_d = (90.0 - dip) / 45.0
- Fhw = self.c10 * Fhw_rX * Fhw_rRup * Fhw_m * Fhw_z * Fhw_d
+ rXr1 = rX / r1 # noqa: N806
+ rXr2r1 = (rX - r1) / (r2 - r1) # noqa: N806
+ f1_rX = self.h1 + self.h2 * rXr1 + self.h3 * (rXr1 * rXr1) # noqa: N806
+ f2_rX = self.H4 + self.h5 * (rXr2r1) + self.h6 * rXr2r1 * rXr2r1 # noqa: N806
+ Fhw_rX = max(f2_rX, 0.0) if (rX >= r1) else f1_rX # noqa: N806
+ Fhw_rRup = 1.0 if (rRup == 0.0) else (rRup - rJB) / rRup # noqa: N806
+ Fhw_m = 1.0 + self.a2 * (Mw - 6.5) # noqa: N806
+ if Mw <= 6.5: # noqa: PLR2004
+ Fhw_m *= Mw - 5.5 # noqa: N806
+ Fhw_z = 1.0 - 0.06 * zTop # noqa: N806
+ Fhw_d = (90.0 - dip) / 45.0 # noqa: N806
+ Fhw = self.c10 * Fhw_rX * Fhw_rRup * Fhw_m * Fhw_z * Fhw_d # noqa: N806
vsk1 = vs30 / self.k1
- if vs30<=self.k1:
- Fsite = self.c11 * np.log(vsk1) + self.k2 * (np.log(pgaRock + \
- self.C * np.power(vsk1, self.N)) - np.log(pgaRock + self.C))
+ if vs30 <= self.k1:
+ Fsite = self.c11 * np.log(vsk1) + self.k2 * ( # noqa: N806
+ np.log(pgaRock + self.C * np.power(vsk1, self.N))
+ - np.log(pgaRock + self.C)
+ )
else:
- Fsite = (self.c11 + self.k2 * self.N) * np.log(vsk1)
- if (np.isnan(z2p5)):
+ Fsite = (self.c11 + self.k2 * self.N) * np.log(vsk1) # noqa: N806
+ if np.isnan(z2p5):
z2p5 = self.calcZ25ref(vs30)
- Fsed = 0.0
- if (z2p5 <= 1.0):
- Fsed = self.c14 * (z2p5 - 1.0)
- elif (z2p5 > 3.0):
- Fsed = self.c16 * self.k3 * np.exp(-0.75) * \
- (1.0 - np.exp(-0.25 * (z2p5 - 3.0)))
- if zHyp <= 7.0:
- Fhyp = 0.0
- elif zHyp <= 20.0:
- Fhyp = zHyp - 7.0
+ Fsed = 0.0 # noqa: N806
+ if z2p5 <= 1.0:
+ Fsed = self.c14 * (z2p5 - 1.0) # noqa: N806
+ elif z2p5 > 3.0: # noqa: PLR2004
+ Fsed = ( # noqa: N806
+ self.c16
+ * self.k3
+ * np.exp(-0.75)
+ * (1.0 - np.exp(-0.25 * (z2p5 - 3.0)))
+ )
+ if zHyp <= 7.0: # noqa: PLR2004
+ Fhyp = 0.0 # noqa: N806
+ elif zHyp <= 20.0: # noqa: PLR2004
+ Fhyp = zHyp - 7.0 # noqa: N806
else:
- Fhyp = 13.0
- if (Mw <= 5.5):
- Fhyp *= self.c17
- elif (Mw <= 6.5):
- Fhyp *= (self.c17 + (self.c18 - self.c17) * (Mw - 5.5))
+ Fhyp = 13.0 # noqa: N806
+ if Mw <= 5.5: # noqa: PLR2004
+ Fhyp *= self.c17 # noqa: N806
+ elif Mw <= 6.5: # noqa: PLR2004
+ Fhyp *= self.c17 + (self.c18 - self.c17) * (Mw - 5.5) # noqa: N806
else:
- Fhyp *= self.c18
- if Mw > 5.5:
- Fdip = 0.0
- elif Mw > 4.5:
- Fdip = self.c19 * (5.5 - Mw) * dip
+ Fhyp *= self.c18 # noqa: N806
+ if Mw > 5.5: # noqa: PLR2004
+ Fdip = 0.0 # noqa: N806
+ elif Mw > 4.5: # noqa: PLR2004
+ Fdip = self.c19 * (5.5 - Mw) * dip # noqa: N806
else:
- Fdip = self.c19 * dip
- if rRup > 80.0:
- Fatn = self.c20 * (rRup - 80.0)
+ Fdip = self.c19 * dip # noqa: N806
+ if rRup > 80.0: # noqa: PLR2004
+ Fatn = self.c20 * (rRup - 80.0) # noqa: N806
else:
- Fatn = 0.0
+ Fatn = 0.0 # noqa: N806
return Fmag + Fr + Fflt + Fhw + Fsite + Fsed + Fhyp + Fdip + Fatn
- def calcAlpha(self, vs30, pgaRock):
+ def calcAlpha(self, vs30, pgaRock): # noqa: N802, N803, D102
vsk1 = vs30 / self.k1
if vs30 < self.k1:
- alpha = self.k2 * pgaRock * (1 / (pgaRock + self.C * pow(vsk1, self.N))\
- - 1 / (pgaRock + self.C))
+ alpha = (
+ self.k2
+ * pgaRock
+ * (
+ 1 / (pgaRock + self.C * pow(vsk1, self.N))
+ - 1 / (pgaRock + self.C)
+ )
+ )
else:
alpha = 0.0
return alpha
- def stdMagDep(self, lo, hi, Mw):
+ def stdMagDep(self, lo, hi, Mw): # noqa: D102, N802, N803, PLR6301
return hi + (lo - hi) * (5.5 - Mw)
-
- def calcPhiSq(self, Mw, alpha):
- if (Mw <= 4.5):
- phi_lnY = self.phi1
- phi_lnPGAB = self.phi_lo_PGA
- elif Mw < 5.5:
- phi_lnY = self.stdMagDep(self.phi1, self.phi2, Mw)
- phi_lnPGAB = self.stdMagDep(self.phi_lo_PGA, self.phi_hi_PGA, Mw)
+
+ def calcPhiSq(self, Mw, alpha): # noqa: N802, N803, D102
+ if Mw <= 4.5: # noqa: PLR2004
+ phi_lnY = self.phi1 # noqa: N806
+ phi_lnPGAB = self.phi_lo_PGA # noqa: N806
+ elif Mw < 5.5: # noqa: PLR2004
+ phi_lnY = self.stdMagDep(self.phi1, self.phi2, Mw) # noqa: N806
+ phi_lnPGAB = self.stdMagDep(self.phi_lo_PGA, self.phi_hi_PGA, Mw) # noqa: N806
else:
- phi_lnY = self.phi2
- phi_lnPGAB = self.phi_hi_PGA
- phi_lnYB = np.sqrt(phi_lnY * phi_lnY - self.PHI_LNAF_SQ)
- phi_lnPGAB = np.sqrt(phi_lnPGAB * phi_lnPGAB - self.PHI_LNAF_SQ)
- aPhi_lnPGAB = alpha * phi_lnPGAB
- phiSq = phi_lnY * phi_lnY + aPhi_lnPGAB * aPhi_lnPGAB + \
- 2.0 * self.rho * phi_lnYB * aPhi_lnPGAB
- return phiSq
-
- def calcTauSq(self, Mw, alpha):
- if (Mw <= 4.5):
- tau_lnYB = self.tau1
- tau_lnPGAB = self.tau_lo_PGA
- elif (Mw < 5.5):
- tau_lnYB = self.stdMagDep(self.tau1, self.tau2, Mw)
- tau_lnPGAB = self.stdMagDep(self.tau_lo_PGA, self.tau_hi_PGA, Mw)
+ phi_lnY = self.phi2 # noqa: N806
+ phi_lnPGAB = self.phi_hi_PGA # noqa: N806
+ phi_lnYB = np.sqrt(phi_lnY * phi_lnY - self.PHI_LNAF_SQ) # noqa: N806
+ phi_lnPGAB = np.sqrt(phi_lnPGAB * phi_lnPGAB - self.PHI_LNAF_SQ) # noqa: N806
+ aPhi_lnPGAB = alpha * phi_lnPGAB # noqa: N806
+ phiSq = ( # noqa: N806
+ phi_lnY * phi_lnY
+ + aPhi_lnPGAB * aPhi_lnPGAB
+ + 2.0 * self.rho * phi_lnYB * aPhi_lnPGAB
+ )
+ return phiSq # noqa: RET504
+
+ def calcTauSq(self, Mw, alpha): # noqa: N802, N803, D102
+ if Mw <= 4.5: # noqa: PLR2004
+ tau_lnYB = self.tau1 # noqa: N806
+ tau_lnPGAB = self.tau_lo_PGA # noqa: N806
+ elif Mw < 5.5: # noqa: PLR2004
+ tau_lnYB = self.stdMagDep(self.tau1, self.tau2, Mw) # noqa: N806
+ tau_lnPGAB = self.stdMagDep(self.tau_lo_PGA, self.tau_hi_PGA, Mw) # noqa: N806
else:
- tau_lnYB = self.tau2
- tau_lnPGAB = self.tau_hi_PGA
- alphaTau = alpha * tau_lnPGAB
- tauSq = tau_lnYB * tau_lnYB + alphaTau * alphaTau + \
- 2.0 * alpha * self.rho * tau_lnYB * tau_lnPGAB
- return tauSq
-
-
- def calc(self, Mw, rJB, rRup, rX, dip, width, zTop, zHyp, vs30, z2p5, style):
+ tau_lnYB = self.tau2 # noqa: N806
+ tau_lnPGAB = self.tau_hi_PGA # noqa: N806
+ alphaTau = alpha * tau_lnPGAB # noqa: N806
+ tauSq = ( # noqa: N806
+ tau_lnYB * tau_lnYB
+ + alphaTau * alphaTau
+ + 2.0 * alpha * self.rho * tau_lnYB * tau_lnPGAB
+ )
+ return tauSq # noqa: RET504
+
+ def calc(self, Mw, rJB, rRup, rX, dip, width, zTop, zHyp, vs30, z2p5, style): # noqa: N803, D102
if vs30 < self.k1:
imt_tmp = self.imt
self.setIMT('PGA')
- pgaRock = np.exp(self.calcMean(Mw, rJB,
- rRup, rX, dip, width, zTop, zHyp, 1100.0, 0.398, style, 0.0))
+ pgaRock = np.exp( # noqa: N806
+ self.calcMean(
+ Mw,
+ rJB,
+ rRup,
+ rX,
+ dip,
+ width,
+ zTop,
+ zHyp,
+ 1100.0,
+ 0.398,
+ style,
+ 0.0,
+ )
+ )
self.setIMT(imt_tmp)
else:
- pgaRock = 0.0
- mean = self.calcMean(Mw, rJB, rRup, rX, dip, width, zTop,
- zHyp, vs30, z2p5, style, pgaRock)
- if (self.imt != 'PGA' and self.imt != 'PGV' and self.imt <= 0.25):
+ pgaRock = 0.0 # noqa: N806
+ mean = self.calcMean(
+ Mw, rJB, rRup, rX, dip, width, zTop, zHyp, vs30, z2p5, style, pgaRock
+ )
+ if self.imt != 'PGA' and self.imt != 'PGV' and self.imt <= 0.25: # noqa: PLR1714, PLR2004
imt_tmp = self.imt
self.setIMT('PGA')
- pgaMean = self.calcMean(Mw, rJB, rRup, rX, dip,width, zTop, zHyp, vs30, z2p5, style, pgaRock)
+ pgaMean = self.calcMean( # noqa: N806
+ Mw, rJB, rRup, rX, dip, width, zTop, zHyp, vs30, z2p5, style, pgaRock
+ )
mean = max(mean, pgaMean)
self.setIMT(imt_tmp)
alpha = self.calcAlpha(vs30, pgaRock)
- phiSq = self.calcPhiSq(Mw, alpha)
- tauSq = self.calcTauSq(Mw, alpha)
- stdDev = np.sqrt(phiSq + tauSq)
+ phiSq = self.calcPhiSq(Mw, alpha) # noqa: N806
+ tauSq = self.calcTauSq(Mw, alpha) # noqa: N806
+ stdDev = np.sqrt(phiSq + tauSq) # noqa: N806
return mean, stdDev, np.sqrt(tauSq), np.sqrt(phiSq)
- def get_IM(self, Mw, site_rup_dict, site_info, im_info):
- vsInf = bool(site_info["vsInferred"])
- style = self.getFaultFromRake(site_rup_dict["aveRake"])
+ def get_IM(self, Mw, site_rup_dict, site_info, im_info): # noqa: N802, N803, D102
+ vsInf = bool(site_info['vsInferred']) # noqa: N806, F841
+ style = self.getFaultFromRake(site_rup_dict['aveRake'])
if 'SA' in im_info['Type']:
- cur_T = im_info.get('Periods', None)
+ cur_T = im_info.get('Periods', None) # noqa: N806
elif im_info['Type'] == 'PGA':
- cur_T = ['PGA']
+ cur_T = ['PGA'] # noqa: N806
elif im_info['Type'] == 'PGV':
- cur_T = ['PGV']
+ cur_T = ['PGV'] # noqa: N806
else:
- print(f'The IM type {im_info["Type"]} is not supported')
- meanList = []
- stdDevList = []
- InterEvStdDevList = []
- IntraEvStdDevList = []
- for Tj in cur_T:
+ print(f'The IM type {im_info["Type"]} is not supported') # noqa: T201
+ meanList = [] # noqa: N806
+ stdDevList = [] # noqa: N806
+ InterEvStdDevList = [] # noqa: N806
+ IntraEvStdDevList = [] # noqa: N806
+ for Tj in cur_T: # noqa: N806
start = time.process_time_ns()
self.setIMT(Tj)
self.timeSetImt += time.process_time_ns() - start
start = time.process_time_ns()
- mean, stdDev, InterEvStdDev, IntraEvStdDev = \
- self.calc(Mw, site_info["rJB"], site_info["rRup"], site_info["rX"],\
- site_rup_dict["dip"], site_rup_dict["width"], site_rup_dict["zTop"],\
- site_rup_dict["zHyp"], site_info["vs30"], site_info["z2pt5"]/1000.0, style)
+ mean, stdDev, InterEvStdDev, IntraEvStdDev = self.calc( # noqa: N806
+ Mw,
+ site_info['rJB'],
+ site_info['rRup'],
+ site_info['rX'],
+ site_rup_dict['dip'],
+ site_rup_dict['width'],
+ site_rup_dict['zTop'],
+ site_rup_dict['zHyp'],
+ site_info['vs30'],
+ site_info['z2pt5'] / 1000.0,
+ style,
+ )
self.timeCalc += time.process_time_ns() - start
meanList.append(mean)
stdDevList.append(stdDev)
InterEvStdDevList.append(InterEvStdDev)
IntraEvStdDevList.append(IntraEvStdDev)
- saResult = {'Mean': meanList,
- 'TotalStdDev': stdDevList,
- 'InterEvStdDev': InterEvStdDevList,
- 'IntraEvStdDev': IntraEvStdDevList}
- return saResult
\ No newline at end of file
+ saResult = { # noqa: N806
+ 'Mean': meanList,
+ 'TotalStdDev': stdDevList,
+ 'InterEvStdDev': InterEvStdDevList,
+ 'IntraEvStdDev': IntraEvStdDevList,
+ }
+ return saResult # noqa: RET504
diff --git a/modules/performRegionalEventSimulation/regionalGroundMotion/liquefaction.py b/modules/performRegionalEventSimulation/regionalGroundMotion/liquefaction.py
index d508fe850..7f0fe1dfc 100644
--- a/modules/performRegionalEventSimulation/regionalGroundMotion/liquefaction.py
+++ b/modules/performRegionalEventSimulation/regionalGroundMotion/liquefaction.py
@@ -1,18 +1,30 @@
+import os # noqa: CPY001, D100, INP001
+import sys
+import warnings
+from enum import Enum
+from itertools import starmap
+
+import geopandas as gpd
import numpy as np
+import pandas # noqa: ICN001
import rasterio as rio
+import shapely
+from pyproj import CRS, Transformer
from scipy.interpolate import interp2d
-import sys, warnings, shapely, pandas, os
-from pyproj import Transformer
-from pyproj import CRS
-from enum import Enum
-import geopandas as gpd
from scipy.spatial import ConvexHull
-## Helper functions
-def sampleRaster(raster_file_path, raster_crs, x, y, interp_scheme = 'nearest',\
- dtype = None):
- """performs 2D interpolation at (x,y) pairs. Accepted interp_scheme = 'nearest', 'linear', 'cubic', and 'quintic'"""
- print(f"Sampling from the Raster File: {os.path.basename(raster_file_path)}...")
+
+# Helper functions
+def sampleRaster( # noqa: N802
+ raster_file_path,
+ raster_crs,
+ x,
+ y,
+ interp_scheme='nearest',
+ dtype=None,
+):
+ """Performs 2D interpolation at (x,y) pairs. Accepted interp_scheme = 'nearest', 'linear', 'cubic', and 'quintic'""" # noqa: D400, D401
+ print(f'Sampling from the Raster File: {os.path.basename(raster_file_path)}...') # noqa: T201, PTH119
invalid_value = np.nan
xy_crs = CRS.from_user_input(4326)
raster_crs = CRS.from_user_input(raster_crs)
@@ -20,82 +32,114 @@ def sampleRaster(raster_file_path, raster_crs, x, y, interp_scheme = 'nearest',\
try:
raster_data = raster_file.read()
if raster_data.shape[0] > 1:
- warnings.warn(f"More than one band in the file {raster_file_path}, the first band is used.")
- except:
- sys.exit(f"Can not read data from {raster_file_path}")
+ warnings.warn( # noqa: B028
+ f'More than one band in the file {raster_file_path}, the first band is used.'
+ )
+ except: # noqa: E722
+ sys.exit(f'Can not read data from {raster_file_path}')
if xy_crs != raster_crs:
# make transformer for reprojection
- transformer_xy_to_data = Transformer.from_crs(xy_crs, raster_crs,\
- always_xy=True)
+ transformer_xy_to_data = Transformer.from_crs(
+ xy_crs, raster_crs, always_xy=True
+ )
# reproject and store
x_proj, y_proj = transformer_xy_to_data.transform(x, y)
x = x_proj
y = y_proj
n_sample = len(x)
if interp_scheme == 'nearest':
- sample = np.array([val[0] for val in raster_file.sample(list(zip(x,y)))])
+ sample = np.array(
+ [val[0] for val in raster_file.sample(list(zip(x, y)))]
+ )
else:
# create x and y ticks for grid
- x_tick = np.linspace(raster_file.bounds.left, \
- raster_file.bounds.right, raster_file.width, endpoint=False)
- y_tick = np.linspace(raster_file.bounds.bottom,\
- raster_file.bounds.top, raster_file.height, endpoint=False)
+ x_tick = np.linspace(
+ raster_file.bounds.left,
+ raster_file.bounds.right,
+ raster_file.width,
+ endpoint=False,
+ )
+ y_tick = np.linspace(
+ raster_file.bounds.bottom,
+ raster_file.bounds.top,
+ raster_file.height,
+ endpoint=False,
+ )
# create interp2d function
interp_function = interp2d(
- x_tick, y_tick, np.flipud(raster_file.read(1)),
- kind=interp_scheme, fill_value=invalid_value)
+ x_tick,
+ y_tick,
+ np.flipud(raster_file.read(1)),
+ kind=interp_scheme,
+ fill_value=invalid_value,
+ )
# get samples
sample = np.transpose(
- [interp_function(x[i],y[i]) for i in range(n_sample)]
+ [interp_function(x[i], y[i]) for i in range(n_sample)]
)[0]
# convert to target datatype
if dtype is not None:
sample = sample.astype(dtype)
# clean up invalid values (returned as 1e38 by NumPy)
- sample[abs(sample)>1e10] = invalid_value
+ sample[abs(sample) > 1e10] = invalid_value # noqa: PLR2004
return sample
-## Helper functions
-def sampleVector(vector_file_path, vector_crs, x, y, dtype = None):
- """performs spatial join of vector_file with xy'"""
- print(f"Sampling from the Vector File: {os.path.basename(vector_file_path)}...")
- invalid_value = np.nan
+
+# Helper functions
+def sampleVector(vector_file_path, vector_crs, x, y, dtype=None): # noqa: ARG001, N802
+ """Performs spatial join of vector_file with xy'""" # noqa: D400, D401
+ print(f'Sampling from the Vector File: {os.path.basename(vector_file_path)}...') # noqa: T201, PTH119
+ invalid_value = np.nan # noqa: F841
xy_crs = CRS.from_user_input(4326)
vector_gdf = gpd.read_file(vector_file_path)
- if vector_gdf.crs != vector_crs:
- sys.exit(f"The CRS of vector file {vector_file_path} is {vector_gdf.crs}, and doesn't match the input CRS ({xy_crs}) defined for liquefaction triggering models")
+
+ try:
+ user_crs_input = CRS.from_user_input(vector_crs).to_epsg()
+ if vector_gdf.crs.to_epsg() != user_crs_input:
+ sys.exit(f"The CRS of vector file {vector_file_path} is {vector_gdf.crs}, and doesn't match the input CRS ({xy_crs}) defined for liquefaction triggering models")
+ except:
+ print("The input CRS ({xy_crs}) defined for liquefaction triggering models is invalid. The CRS of vector files are used")
+ # if vector_gdf.crs != vector_crs:
+ # sys.exit(f"The CRS of vector file {vector_file_path} is {vector_gdf.crs}, and doesn't match the input CRS ({xy_crs}) defined for liquefaction triggering models")
+
if xy_crs != vector_crs:
# make transformer for reprojection
- transformer_xy_to_data = Transformer.from_crs(xy_crs, vector_crs,\
- always_xy=True)
+ transformer_xy_to_data = Transformer.from_crs(
+ xy_crs, vector_crs, always_xy=True
+ )
# reproject and store
x_proj, y_proj = transformer_xy_to_data.transform(x, y)
x = x_proj
y = y_proj
- # Create a convex hull containing all sites
+ # Create a convex hull containing all sites
sites = np.array([x, y]).transpose()
try:
hull = ConvexHull(sites)
vertices = hull.vertices
vertices = sites[np.append(vertices, vertices[0])]
centroid = np.mean(vertices, axis=0)
- vertices = vertices + 0.05 * (vertices - centroid)
- RoI = shapely.geometry.Polygon(vertices)
- except:
+ vertices = vertices + 0.05 * (vertices - centroid) # noqa: PLR6104
+ RoI = shapely.geometry.Polygon(vertices) # noqa: N806
+ except: # noqa: E722
centroid = shapely.geometry.Point(np.mean(x), np.mean(y))
points = [shapely.geometry.Point(x[i], y[i]) for i in range(len(x))]
if len(points) == 1:
- distances = [0.1] # Degree
+ distances = [0.1] # Degree
else:
distances = [point.distance(centroid) for point in points]
- max_distance = max(distances)*1.2
+ max_distance = max(distances) * 1.2
angles = np.linspace(0, 2 * np.pi, 36)
- circle_points = [(centroid.x + max_distance * np.cos(angle), \
- centroid.y + max_distance * np.sin(angle)) for angle in angles]
- RoI = shapely.geometry.Polygon(circle_points)
- data = dict()
+ circle_points = [
+ (
+ centroid.x + max_distance * np.cos(angle),
+ centroid.y + max_distance * np.sin(angle),
+ )
+ for angle in angles
+ ]
+ RoI = shapely.geometry.Polygon(circle_points) # noqa: N806
+ data = dict() # noqa: C408
for col in vector_gdf.columns:
- data.update({col:[]})
+ data.update({col: []})
for row_index in vector_gdf.index:
new_geom = RoI.intersection(vector_gdf.loc[row_index, 'geometry'])
if new_geom.is_empty:
@@ -106,32 +150,39 @@ def sampleVector(vector_file_path, vector_crs, x, y, dtype = None):
data[col].append(vector_gdf.loc[row_index, col])
data['geometry'].append(new_geom)
del vector_gdf
- gdf_roi = gpd.GeoDataFrame(data, geometry="geometry", crs=4326)
- geometry = [shapely.geometry.Point(lon, lat) for lon, lat in zip(x, y)]
+ gdf_roi = gpd.GeoDataFrame(data, geometry='geometry', crs=4326)
+ geometry = list(starmap(shapely.geometry.Point, zip(x, y)))
gdf_sites = gpd.GeoDataFrame(geometry=geometry, crs=4326).reset_index()
- merged = gpd.GeoDataFrame.sjoin(gdf_roi, gdf_sites, how = 'inner', predicate = 'contains')
+ merged = gpd.GeoDataFrame.sjoin(
+ gdf_roi, gdf_sites, how='inner', predicate='contains'
+ )
merged = merged.set_index('index_right').sort_index().drop(columns=['geometry'])
- gdf_sites = pandas.merge(gdf_sites, merged, on = 'index', how = 'left')
- gdf_sites.drop(columns=['geometry', 'index'], inplace=True)
+ gdf_sites = pandas.merge(gdf_sites, merged, on='index', how='left')
+ gdf_sites.drop(columns=['geometry', 'index'], inplace=True) # noqa: PD002
return gdf_sites
-def find_additional_output_req(liq_info, current_step):
+
+def find_additional_output_req(liq_info, current_step): # noqa: D103
additional_output_keys = []
if current_step == 'Triggering':
- trigging_parameters = liq_info['Triggering']\
- ['Parameters'].keys()
- triger_dist_water = liq_info['Triggering']['Parameters'].get('DistWater', None)
+ trigging_parameters = liq_info['Triggering']['Parameters'].keys() # noqa: F841
+ triger_dist_water = liq_info['Triggering']['Parameters'].get(
+ 'DistWater', None
+ )
if triger_dist_water is None:
return additional_output_keys
- if 'LateralSpreading' in liq_info.keys():
- lat_dist_water = liq_info['LateralSpreading']['Parameters'].get('DistWater', None)
- if (liq_info['LateralSpreading']['Model'] == 'Hazus2020')\
- and (lat_dist_water==triger_dist_water):
+ if 'LateralSpreading' in liq_info.keys(): # noqa: SIM118
+ lat_dist_water = liq_info['LateralSpreading']['Parameters'].get(
+ 'DistWater', None
+ )
+ if (liq_info['LateralSpreading']['Model'] == 'Hazus2020') and (
+ lat_dist_water == triger_dist_water
+ ):
additional_output_keys.append('dist_to_water')
return additional_output_keys
-class liq_susc_enum(Enum):
+class liq_susc_enum(Enum): # noqa: D101
very_high = 5
high = 4
moderate = 3
@@ -139,19 +190,20 @@ class liq_susc_enum(Enum):
very_low = 1
none = 0
-## Triggering:
-class Liquefaction:
+
+# Triggering:
+class Liquefaction: # noqa: D101
def __init__(self) -> None:
pass
+
# -----------------------------------------------------------
class ZhuEtal2017(Liquefaction):
- """
- A map-based procedure to quantify liquefaction at a given location using logistic models by Zhu et al. (2017). Two models are provided:
+ """A map-based procedure to quantify liquefaction at a given location using logistic models by Zhu et al. (2017). Two models are provided:
1. For distance to coast < cutoff, **prob_liq** = f(**pgv**, **vs30**, **precip**, **dist_coast**, **dist_river**)
2. For distance to coast >= cutoff, **prob_liq** = f(**pgv**, **vs30**, **precip**, **dist_coast**, **dist_river**, **gw_depth**)
-
+
Parameters
----------
From upstream PBEE:
@@ -162,9 +214,9 @@ class ZhuEtal2017(Liquefaction):
pga: float, np.ndarray or list
[g] peak ground acceleration, only to check threshold where prob_liq(pga<0.1g)=0
stations: list
- a list of dict containing the site infomation. Keys in the dict are 'ID',
+ a list of dict containing the site information. Keys in the dict are 'ID',
'lon', 'lat', 'vs30', 'z1pt0', 'z2pt5', 'vsInferred', 'rRup', 'rJB', 'rX'
-
+
Geotechnical/geologic:
vs30: float, np.ndarray or list
[m/s] time-averaged shear wave velocity in the upper 30-meters
@@ -178,7 +230,7 @@ class ZhuEtal2017(Liquefaction):
[km] distance to nearest river, lake, or coast
gw_depth: float, np.ndarray or list
[m] groundwater table depth
-
+
Fixed:
# dist_water_cutoff: float, optional
# [km] distance to water cutoff for switching between global and coastal model, default = 20 km
@@ -189,179 +241,224 @@ class ZhuEtal2017(Liquefaction):
probability for liquefaciton
liq_susc_val : str, np.ndarray
liquefaction susceptibility category value
-
+
References
----------
.. [1] Zhu, J., Baise, L.G., and Thompson, E.M., 2017, An Updated Geospatial Liquefaction Model for Global Application, Bulletin of the Seismological Society of America, vol. 107, no. 3, pp. 1365-1385.
-
- """
+
+ """ # noqa: D400
+
def __init__(self, parameters, stations) -> None:
self.stations = stations
self.parameters = parameters
- self.dist_to_water = None #(km)
- self.dist_to_river = None #(km)
- self.dist_to_coast = None #(km)
- self.gw_depth = None #(m)
- self.precip = None # (mm)
- self.vs30 = None #(m/s)
+ self.dist_to_water = None # (km)
+ self.dist_to_river = None # (km)
+ self.dist_to_coast = None # (km)
+ self.gw_depth = None # (m)
+ self.precip = None # (mm)
+ self.vs30 = None # (m/s)
self.interpolate_spatial_parameters(parameters)
- def interpolate_spatial_parameters(self, parameters):
+ def interpolate_spatial_parameters(self, parameters): # noqa: D102
# site coordinate in CRS 4326
lat_station = [site['lat'] for site in self.stations]
lon_station = [site['lon'] for site in self.stations]
- # dist_to_water
- if parameters["DistWater"] == "Defined (\"distWater\") in Site File (.csv)":
- self.dist_to_water = np.array([site['distWater'] for site in self.stations])
+ # dist_to_water
+ if parameters['DistWater'] == 'Defined ("distWater") in Site File (.csv)':
+ self.dist_to_water = np.array(
+ [site['distWater'] for site in self.stations]
+ )
else:
- self.dist_to_water = sampleRaster(parameters["DistWater"], parameters["inputCRS"],\
- lon_station, lat_station)
+ self.dist_to_water = sampleRaster(
+ parameters['DistWater'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
# dist_to_river
- if parameters["DistRiver"] == "Defined (\"distRiver\") in Site File (.csv)":
- self.dist_to_river = np.array([site['distRiver'] for site in self.stations])
+ if parameters['DistRiver'] == 'Defined ("distRiver") in Site File (.csv)':
+ self.dist_to_river = np.array(
+ [site['distRiver'] for site in self.stations]
+ )
else:
- self.dist_to_river = sampleRaster(parameters["DistRiver"], parameters["inputCRS"],\
- lon_station, lat_station)
+ self.dist_to_river = sampleRaster(
+ parameters['DistRiver'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
# dist_to_coast
- if parameters["DistCoast"] == "Defined (\"distCoast\") in Site File (.csv)":
- self.dist_to_coast = np.array([site['distCoast'] for site in self.stations])
+ if parameters['DistCoast'] == 'Defined ("distCoast") in Site File (.csv)':
+ self.dist_to_coast = np.array(
+ [site['distCoast'] for site in self.stations]
+ )
else:
- self.dist_to_coast = sampleRaster(parameters["DistCoast"], parameters["inputCRS"],\
- lon_station, lat_station)
+ self.dist_to_coast = sampleRaster(
+ parameters['DistCoast'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
# gw_water
- if parameters["GwDepth"] == "Defined (\"gwDepth\") in Site File (.csv)":
+ if parameters['GwDepth'] == 'Defined ("gwDepth") in Site File (.csv)':
self.gw_depth = np.array([site['gwDepth'] for site in self.stations])
else:
- self.gw_depth = sampleRaster(parameters["GwDepth"], parameters["inputCRS"],\
- lon_station, lat_station)
- # precipitation
- if parameters["Precipitation"] == "Defined (\"precipitation\") in Site File (.csv)":
+ self.gw_depth = sampleRaster(
+ parameters['GwDepth'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
+ # precipitation
+ if (
+ parameters['Precipitation']
+ == 'Defined ("precipitation") in Site File (.csv)'
+ ):
self.precip = np.array([site['precipitation'] for site in self.stations])
else:
- self.precip = sampleRaster(parameters["Precipitation"], parameters["inputCRS"],\
- lon_station, lat_station)
+ self.precip = sampleRaster(
+ parameters['Precipitation'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
self.vs30 = np.array([site['vs30'] for site in self.stations])
- print("Initiation finished")
-
- def run(self, ln_im_data, eq_data, im_list, output_keys, additional_output_keys):
+ print('Sampling finished') # noqa: T201
+
+ def run(self, ln_im_data, eq_data, im_list, output_keys, additional_output_keys): # noqa: D102
if ('PGA' in im_list) and ('PGV' in im_list):
num_stations = len(self.stations)
num_scenarios = len(eq_data)
- PGV_col_id = [i for i, x in enumerate(im_list) if x == 'PGV'][0]
- PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0]
+ PGV_col_id = [i for i, x in enumerate(im_list) if x == 'PGV'][0] # noqa: N806, RUF015
+ PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0] # noqa: N806, RUF015
for scenario_id in range(num_scenarios):
num_rlzs = ln_im_data[scenario_id].shape[2]
- im_data_scen = np.zeros([num_stations,\
- len(im_list)+len(output_keys), num_rlzs])
- im_data_scen[:,0:len(im_list),:] = ln_im_data[scenario_id]
+ im_data_scen = np.zeros(
+ [num_stations, len(im_list) + len(output_keys), num_rlzs]
+ )
+ im_data_scen[:, 0 : len(im_list), :] = ln_im_data[scenario_id]
for rlz_id in range(num_rlzs):
- pgv = np.exp(ln_im_data[scenario_id][:,PGV_col_id,rlz_id])
- pga = np.exp(ln_im_data[scenario_id][:,PGA_col_id,rlz_id])
+ pgv = np.exp(ln_im_data[scenario_id][:, PGV_col_id, rlz_id])
+ pga = np.exp(ln_im_data[scenario_id][:, PGA_col_id, rlz_id])
mag = float(eq_data[scenario_id][0])
model_output = self.model(pgv, pga, mag)
for i, key in enumerate(output_keys):
- im_data_scen[:,len(im_list)+i,rlz_id] = model_output[key]
+ im_data_scen[:, len(im_list) + i, rlz_id] = model_output[key]
ln_im_data[scenario_id] = im_data_scen
- im_list = im_list + output_keys
- additional_output = dict()
+ im_list = im_list + output_keys # noqa: PLR6104
+ additional_output = dict() # noqa: C408
for key in additional_output_keys:
item = getattr(self, key, None)
if item is None:
- warnings.warn(f"Additional output {key} is not avaliable in the liquefaction trigging model 'ZhuEtal2017'.")
+ warnings.warn( # noqa: B028
+ f"Additional output {key} is not available in the liquefaction trigging model 'ZhuEtal2017'."
+ )
else:
- additional_output.update({key:item})
+ additional_output.update({key: item})
else:
- sys.exit(f"At least one of 'PGA' and 'PGV' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed.")
+ sys.exit(
+ "At least one of 'PGA' and 'PGV' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed."
+ )
# print(f"At least one of 'PGA' and 'PGV' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed."\
# , file=sys.stderr)
# sys.stderr.write("test")
# sys.exit(-1)
return ln_im_data, eq_data, im_list, additional_output
-
+
def model(self, pgv, pga, mag):
- """Model"""
+ """Model""" # noqa: D400
# zero prob_liq
- zero_prob_liq = 1e-5 # decimal
-
+ zero_prob_liq = 1e-5 # decimal
+
# distance cutoff for model
- model_transition = 20 # km
+ model_transition = 20 # km
# initialize arrays
x_logistic = np.empty(pgv.shape)
prob_liq = np.empty(pgv.shape)
- liq_susc_val = np.ones(pgv.shape)*-99
+ liq_susc_val = np.ones(pgv.shape) * -99
liq_susc = np.empty(pgv.shape, dtype=int)
-
+
# magnitude correction, from Baise & Rashidian (2020) and Allstadt et al. (2022)
- pgv_mag = pgv/(1+np.exp(-2*(mag-6)))
- pga_mag = pga/(10**2.24/mag**2.56)
+ pgv_mag = pgv / (1 + np.exp(-2 * (mag - 6)))
+ pga_mag = pga / (10**2.24 / mag**2.56)
# find where dist_water <= cutoff for model of 20 km
# coastal model
- ind_coastal = self.dist_to_water<=model_transition
+ ind_coastal = self.dist_to_water <= model_transition
# global model
# ind_global = list(set(list(range(pgv.shape[0]))).difference(set(ind_coastal)))
- ind_global = ~(self.dist_to_water<=model_transition)
+ ind_global = ~(self.dist_to_water <= model_transition)
# set cap of precip to 1700 mm
- self.precip[self.precip>1700] = 1700
+ self.precip[self.precip > 1700] = 1700 # noqa: PLR2004
# x = b0 + b1*var1 + ...
# if len(ind_global) > 0:
# liquefaction susceptbility value, disregard pgv term
- liq_susc_val[ind_global] = \
- 8.801 + \
- -1.918 * np.log(self.vs30[ind_global]) + \
- 5.408e-4 * self.precip[ind_global] + \
- -0.2054 * self.dist_to_water[ind_global] + \
- -0.0333 * self.gw_depth[ind_global]
+ liq_susc_val[ind_global] = (
+ 8.801
+ + -1.918 * np.log(self.vs30[ind_global])
+ + 5.408e-4 * self.precip[ind_global]
+ + -0.2054 * self.dist_to_water[ind_global]
+ + -0.0333 * self.gw_depth[ind_global]
+ )
# liquefaction susceptbility value, disregard pgv term
- liq_susc_val[ind_coastal] = \
- 12.435 + \
- -2.615 * np.log(self.vs30[ind_coastal]) + \
- 5.556e-4 * self.precip[ind_coastal] + \
- -0.0287 * np.sqrt(self.dist_to_coast[ind_coastal]) + \
- 0.0666 * self.dist_to_river[ind_coastal] + \
- -0.0369 * self.dist_to_river[ind_coastal]*np.sqrt(self.dist_to_coast[ind_coastal])
+ liq_susc_val[ind_coastal] = (
+ 12.435
+ + -2.615 * np.log(self.vs30[ind_coastal])
+ + 5.556e-4 * self.precip[ind_coastal]
+ + -0.0287 * np.sqrt(self.dist_to_coast[ind_coastal])
+ + 0.0666 * self.dist_to_river[ind_coastal]
+ + -0.0369
+ * self.dist_to_river[ind_coastal]
+ * np.sqrt(self.dist_to_coast[ind_coastal])
+ )
# catch nan values
- liq_susc_val[np.isnan(liq_susc_val)] = -99.
+ liq_susc_val[np.isnan(liq_susc_val)] = -99.0
# x-term for logistic model = liq susc val + pgv term
- x_logistic[ind_global] = liq_susc_val[ind_global] + 0.334*np.log(pgv_mag[ind_global])
+ x_logistic[ind_global] = liq_susc_val[ind_global] + 0.334 * np.log(
+ pgv_mag[ind_global]
+ )
# x-term for logistic model = liq susc val + pgv term
- x_logistic[ind_coastal] = liq_susc_val[ind_coastal] + 0.301*np.log(pgv_mag[ind_coastal])
+ x_logistic[ind_coastal] = liq_susc_val[ind_coastal] + 0.301 * np.log(
+ pgv_mag[ind_coastal]
+ )
# probability of liquefaction
- prob_liq = 1/(1+np.exp(-x_logistic)) # decimal
- prob_liq = np.maximum(prob_liq,zero_prob_liq) # set prob to > "0" to avoid 0% in log
+ prob_liq = 1 / (1 + np.exp(-x_logistic)) # decimal
+ prob_liq = np.maximum(
+ prob_liq, zero_prob_liq
+ ) # set prob to > "0" to avoid 0% in log
# for pgv_mag < 3 cm/s, set prob to "0"
- prob_liq[pgv_mag<3] = zero_prob_liq
+ prob_liq[pgv_mag < 3] = zero_prob_liq # noqa: PLR2004
# for pga_mag < 0.1 g, set prob to "0"
- prob_liq[pga_mag<0.1] = zero_prob_liq
+ prob_liq[pga_mag < 0.1] = zero_prob_liq # noqa: PLR2004
# for vs30 > 620 m/s, set prob to "0"
- prob_liq[self.vs30>620] = zero_prob_liq
+ prob_liq[self.vs30 > 620] = zero_prob_liq # noqa: PLR2004
# calculate sigma_mu
- sigma_mu = (np.exp(0.25)-1) * prob_liq
+ sigma_mu = (np.exp(0.25) - 1) * prob_liq # noqa: F841
# determine liquefaction susceptibility category
- liq_susc[liq_susc_val>-1.15] = liq_susc_enum['very_high'].value
- liq_susc[liq_susc_val<=-1.15] = liq_susc_enum['high'].value
- liq_susc[liq_susc_val<=-1.95] = liq_susc_enum['moderate'].value
- liq_susc[liq_susc_val<=-3.15] = liq_susc_enum['low'].value
- liq_susc[liq_susc_val<=-3.20] = liq_susc_enum['very_low'].value
- liq_susc[liq_susc_val<=-38.1] = liq_susc_enum['none'].value
+ liq_susc[liq_susc_val > -1.15] = liq_susc_enum['very_high'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -1.15] = liq_susc_enum['high'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -1.95] = liq_susc_enum['moderate'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -3.15] = liq_susc_enum['low'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -3.20] = liq_susc_enum['very_low'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -38.1] = liq_susc_enum['none'].value # noqa: PLR2004
# liq_susc[prob_liq==zero_prob_liq] = 'none'
-
- return {"liq_prob":prob_liq, "liq_susc":liq_susc}
-
+
+ return {'liq_prob': prob_liq, 'liq_susc': liq_susc}
+
+
# -----------------------------------------------------------
class Hazus2020(Liquefaction):
- """
- Compute probability of liquefaction at a given location using a simplified method after Liao et al. (1988).
+ """Compute probability of liquefaction at a given location using a simplified method after Liao et al. (1988).
Also called Youd and Perkins (1978) with Hazus (2020)
-
+
Parameters
----------
From upstream PBEE:
@@ -369,11 +466,11 @@ class Hazus2020(Liquefaction):
[g] peak ground acceleration
mag: float, np.ndarray or list
moment magnitude
-
+
Geotechnical/geologic:
gw_depth: float, np.ndarray or list
[m] groundwater table depth
-
+
Fixed:
liq_susc: str, np.ndarray or list
susceptibility category to liquefaction (none, very low, low, moderate, high, very high)
@@ -382,147 +479,188 @@ class Hazus2020(Liquefaction):
-------
prob_liq : float, np.ndarray
probability for liquefaciton
-
+
References
----------
.. [1] Federal Emergency Management Agency (FEMA), 2020, Hazus Earthquake Model - Technical Manual, Hazus 4.2 SP3, 436 pp. https://www.fema.gov/flood-maps/tools-resources/flood-map-products/hazus/user-technical-manuals.
.. [2] Liao, S.S., Veneziano, D., and Whitman, R.V., 1988, Regression Models for Evaluating Liquefaction Probability, Journal of Geotechnical Engineering, vol. 114, no. 4, pp. 389-411.
-
- """
+
+ """ # noqa: D205, D400
+
def __init__(self, parameters, stations) -> None:
self.stations = stations
self.parameters = parameters
- self.gw_depth = None #(m)
+ self.gw_depth = None # (m)
self.interpolate_spatial_parameters(parameters)
-
- def interpolate_spatial_parameters(self, parameters):
+
+ def interpolate_spatial_parameters(self, parameters): # noqa: D102
# site coordinate in CRS 4326
lat_station = [site['lat'] for site in self.stations]
lon_station = [site['lon'] for site in self.stations]
# gw_water
- if parameters["GwDepth"] == "Defined (\"gwDepth\") in Site File (.csv)":
+ if parameters['GwDepth'] == 'Defined ("gwDepth") in Site File (.csv)':
self.gw_depth = np.array([site['gwDepth'] for site in self.stations])
else:
- self.gw_depth = sampleRaster(parameters["GwDepth"], parameters["inputCRS"],\
- lon_station, lat_station)
+ self.gw_depth = sampleRaster(
+ parameters['GwDepth'],
+ parameters['inputCRS'],
+ lon_station,
+ lat_station,
+ )
# liq_susc
- if parameters["LiqSusc"] == "Defined (\"liqSusc\") in Site File (.csv)":
- liq_susc_samples = pandas.DataFrame(np.array([site['liqSusc'] \
- for site in self.stations]), columns = ['liqSusc'])
- SusceptibilityKey = 'liqSusc'
+ if parameters['LiqSusc'] == 'Defined ("liqSusc") in Site File (.csv)':
+ liq_susc_samples = pandas.DataFrame(
+ np.array([site['liqSusc'] for site in self.stations]),
+ columns=['liqSusc'],
+ )
+ SusceptibilityKey = 'liqSusc' # noqa: N806
else:
- SusceptibilityFile = parameters["SusceptibilityFile"]
- liq_susc_samples = sampleVector(SusceptibilityFile,
- parameters["inputCRS"],\
- lon_station, lat_station)
- SusceptibilityKey = parameters["SusceptibilityKey"]
+ SusceptibilityFile = parameters['SusceptibilityFile'] # noqa: N806
+ liq_susc_samples = sampleVector(
+ SusceptibilityFile, parameters['inputCRS'], lon_station, lat_station
+ )
+ SusceptibilityKey = parameters['SusceptibilityKey'] # noqa: N806
self.liq_susc = []
for susc in liq_susc_samples[SusceptibilityKey].unique():
- if not susc in list(liq_susc_enum.__members__.keys()):
- warnings.warn(f"Unkown susceptibility \"{susc}\" defined, and is treated as \"none\".")
+ if susc not in list(liq_susc_enum.__members__.keys()):
+ warnings.warn( # noqa: B028
+ f'Unkown susceptibility "{susc}" defined, and is treated as "none".'
+ )
for row_index in liq_susc_samples.index:
if pandas.isna(liq_susc_samples.loc[row_index, SusceptibilityKey]):
self.liq_susc.append(0)
- elif hasattr(liq_susc_enum, liq_susc_samples.loc[row_index, SusceptibilityKey]):
- self.liq_susc.append(liq_susc_enum[liq_susc_samples.loc[row_index, SusceptibilityKey]].value)
+ elif hasattr(
+ liq_susc_enum, liq_susc_samples.loc[row_index, SusceptibilityKey]
+ ):
+ self.liq_susc.append(
+ liq_susc_enum[
+ liq_susc_samples.loc[row_index, SusceptibilityKey]
+ ].value
+ )
else:
self.liq_susc.append(0)
self.liq_susc = np.array(self.liq_susc)
# liq_susc = liq_susc_samples[parameters["SusceptibilityKey"]].fillna("NaN")
# self.liq_susc = liq_susc.to_numpy()
- print("Initiation finished")
+ print('Initiation finished') # noqa: T201
-
- def run(self, ln_im_data, eq_data, im_list, output_keys, additional_output_keys):
- if ('PGA' in im_list):
+ def run(self, ln_im_data, eq_data, im_list, output_keys, additional_output_keys): # noqa: D102
+ if 'PGA' in im_list:
num_stations = len(self.stations)
num_scenarios = len(eq_data)
- PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0]
+ PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0] # noqa: N806, RUF015
for scenario_id in range(num_scenarios):
num_rlzs = ln_im_data[scenario_id].shape[2]
- im_data_scen = np.zeros([num_stations,\
- len(im_list)+len(output_keys), num_rlzs])
- im_data_scen[:,0:len(im_list),:] = ln_im_data[scenario_id]
+ im_data_scen = np.zeros(
+ [num_stations, len(im_list) + len(output_keys), num_rlzs]
+ )
+ im_data_scen[:, 0 : len(im_list), :] = ln_im_data[scenario_id]
for rlz_id in range(num_rlzs):
- pga = np.exp(ln_im_data[scenario_id][:,PGA_col_id,rlz_id])
+ pga = np.exp(ln_im_data[scenario_id][:, PGA_col_id, rlz_id])
mag = float(eq_data[scenario_id][0])
model_output = self.model(pga, mag, self.gw_depth, self.liq_susc)
for i, key in enumerate(output_keys):
- im_data_scen[:,len(im_list)+i,rlz_id] = model_output[key]
+ im_data_scen[:, len(im_list) + i, rlz_id] = model_output[key]
ln_im_data[scenario_id] = im_data_scen
- im_list = im_list + output_keys
- additional_output = dict()
+ im_list = im_list + output_keys # noqa: PLR6104
+ additional_output = dict() # noqa: C408
for key in additional_output_keys:
item = getattr(self, key, None)
if item is None:
- warnings.warn(f"Additional output {key} is not avaliable in the liquefaction trigging model 'Hazus2020'.")
+ warnings.warn( # noqa: B028
+ f"Additional output {key} is not available in the liquefaction trigging model 'Hazus2020'."
+ )
else:
- additional_output.update({key:item})
+ additional_output.update({key: item})
else:
- sys.exit(f"'PGA'is missing in the selected intensity measures and the liquefaction trigging model 'Hazus2020' can not be computed.")
+ sys.exit(
+ "'PGA'is missing in the selected intensity measures and the liquefaction trigging model 'Hazus2020' can not be computed."
+ )
return ln_im_data, eq_data, im_list, additional_output
+
@staticmethod
# @njit
def model(
- pga, mag, # upstream PBEE RV
- gw_depth, # geotechnical/geologic
- liq_susc, # fixed/toggles
- return_inter_params=False # to get intermediate params
+ pga,
+ mag, # upstream PBEE RV
+ gw_depth, # geotechnical/geologic
+ liq_susc, # fixed/toggles
+ return_inter_params=False, # to get intermediate params # noqa: ARG004, FBT002
):
- """Model"""
+ """Model""" # noqa: D400
# zero prob_liq
- zero_prob_liq = 1e-5 # decimal
-
+ zero_prob_liq = 1e-5 # decimal
+
# initialize arrays
prob_liq_pga = np.zeros(pga.shape)
p_ml = np.zeros(pga.shape)
-
+
# if gw_depth is nan
- gw_depth[np.isnan(gw_depth)] = 999
-
+ gw_depth[np.isnan(gw_depth)] = 999
+
# correction factor for moment magnitudes other than M=7.5, eq. 4-21
- k_mag = 0.0027*mag**3 - 0.0267*mag**2 - 0.2055*mag + 2.9188
+ k_mag = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188
# correction for groudnwater depths other than 5 feet, eq. 4-22
- k_gw_depth = 0.022 * gw_depth*3.28 + 0.93
-
+ k_gw_depth = 0.022 * gw_depth * 3.28 + 0.93
+
# get uncorrected p_liq given pga
- prob_liq_pga[liq_susc==liq_susc_enum['very_high'].value] = \
- np.maximum(np.minimum(9.09*pga[liq_susc==liq_susc_enum['very_high'].value]-0.82,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['high'].value] = \
- np.maximum(np.minimum(7.67*pga[liq_susc==liq_susc_enum['high'].value]-0.92,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['moderate'].value] = \
- np.maximum(np.minimum(6.67*pga[liq_susc==liq_susc_enum['moderate'].value]-1.00,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['low'].value] = \
- np.maximum(np.minimum(5.57*pga[liq_susc==liq_susc_enum['low'].value]-1.18,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['very_low'].value] = \
- np.maximum(np.minimum(4.16*pga[liq_susc==liq_susc_enum['very_low'].value]-1.08,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['none'].value] = 0
+ prob_liq_pga[liq_susc == liq_susc_enum['very_high'].value] = np.maximum(
+ np.minimum(
+ 9.09 * pga[liq_susc == liq_susc_enum['very_high'].value] - 0.82, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['high'].value] = np.maximum(
+ np.minimum(
+ 7.67 * pga[liq_susc == liq_susc_enum['high'].value] - 0.92, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['moderate'].value] = np.maximum(
+ np.minimum(
+ 6.67 * pga[liq_susc == liq_susc_enum['moderate'].value] - 1.00, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['low'].value] = np.maximum(
+ np.minimum(5.57 * pga[liq_susc == liq_susc_enum['low'].value] - 1.18, 1),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['very_low'].value] = np.maximum(
+ np.minimum(
+ 4.16 * pga[liq_susc == liq_susc_enum['very_low'].value] - 1.08, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['none'].value] = 0
# get portion of map unit susceptible to liquefaction
- p_ml[liq_susc==liq_susc_enum['very_high'].value] = 0.25
- p_ml[liq_susc==liq_susc_enum['high'].value] = 0.20
- p_ml[liq_susc==liq_susc_enum['moderate'].value] = 0.10
- p_ml[liq_susc==liq_susc_enum['low'].value] = 0.05
- p_ml[liq_susc==liq_susc_enum['very_low'].value] = 0.02
- p_ml[liq_susc==liq_susc_enum['none'].value] = 0.00
-
+ p_ml[liq_susc == liq_susc_enum['very_high'].value] = 0.25
+ p_ml[liq_susc == liq_susc_enum['high'].value] = 0.20
+ p_ml[liq_susc == liq_susc_enum['moderate'].value] = 0.10
+ p_ml[liq_susc == liq_susc_enum['low'].value] = 0.05
+ p_ml[liq_susc == liq_susc_enum['very_low'].value] = 0.02
+ p_ml[liq_susc == liq_susc_enum['none'].value] = 0.00
+
# liquefaction likelihood, p_liq
- prob_liq = prob_liq_pga / k_mag / k_gw_depth * p_ml # eq. 4-20
- prob_liq = np.maximum(prob_liq,zero_prob_liq) # set prob to > "0" to avoid 0% in log
+ prob_liq = prob_liq_pga / k_mag / k_gw_depth * p_ml # eq. 4-20
+ prob_liq = np.maximum(
+ prob_liq, zero_prob_liq
+ ) # set prob to > "0" to avoid 0% in log
# Zhu et al. (2017) boundary constraints
# for pga_mag < 0.1 g, set prob to "0"
# magnitude correction, from Baise & Rashidian (2020) and Allstadt et al. (2022)
- pga_mag = pga/(10**2.24/mag**2.56)
- prob_liq[pga_mag<0.1] = zero_prob_liq
+ pga_mag = pga / (10**2.24 / mag**2.56)
+ prob_liq[pga_mag < 0.1] = zero_prob_liq # noqa: PLR2004
+
+ return {'liq_prob': prob_liq, 'liq_susc': liq_susc}
+
- return {"liq_prob":prob_liq, "liq_susc":liq_susc}
-
# -----------------------------------------------------------
class Hazus2020_with_ZhuEtal2017(ZhuEtal2017):
- """
- Compute probability of liquefaction using Hazus (FEMA, 2020), with liq. susc. category from Zhu et al. (2017).
-
+ """Compute probability of liquefaction using Hazus (FEMA, 2020), with liq. susc. category from Zhu et al. (2017).
+
Parameters
----------
From upstream PBEE:
@@ -530,7 +668,7 @@ class Hazus2020_with_ZhuEtal2017(ZhuEtal2017):
[g] peak ground acceleration
mag: float, np.ndarray or list
moment magnitude
-
+
Geotechnical/geologic:
vs30: float, np.ndarray or list
[m/s] time-averaged shear wave velocity in the upper 30-meters
@@ -544,7 +682,7 @@ class Hazus2020_with_ZhuEtal2017(ZhuEtal2017):
[km] distance to nearest river, lake, or coast
gw_depth: float, np.ndarray or list
[m] groundwater table depth
-
+
Fixed:
# liq_susc: str, np.ndarray or list
# susceptibility category to liquefaction (none, very low, low, moderate, high, very high)
@@ -553,121 +691,145 @@ class Hazus2020_with_ZhuEtal2017(ZhuEtal2017):
-------
prob_liq : float, np.ndarray
probability for liquefaciton
-
+
References
----------
.. [1] Federal Emergency Management Agency (FEMA), 2020, Hazus Earthquake Model - Technical Manual, Hazus 4.2 SP3, 436 pp. https://www.fema.gov/flood-maps/tools-resources/flood-map-products/hazus/user-technical-manuals.
.. [2] Liao, S.S., Veneziano, D., and Whitman, R.V., 1988, Regression Models for Evaluating Liquefaction Probability, Journal of Geotechnical Engineering, vol. 114, no. 4, pp. 389-411.
.. [3] Zhu, J., Baise, L.G., and Thompson, E.M., 2017, An Updated Geospatial Liquefaction Model for Global Application, Bulletin of the Seismological Society of America, vol. 107, no. 3, pp. 1365-1385.
-
+
"""
+
def model(self, pgv, pga, mag):
- """Model"""
+ """Model""" # noqa: D400
# zero prob_liq
- zero_prob_liq = 1e-5 # decimal
-
+ zero_prob_liq = 1e-5 # decimal
+
# distance cutoff for model
- model_transition = 20 # km
+ model_transition = 20 # km
# initialize arrays
prob_liq = np.empty(pgv.shape)
- liq_susc_val = np.ones(pgv.shape)*-99
+ liq_susc_val = np.ones(pgv.shape) * -99
liq_susc = np.empty(pgv.shape, dtype=int)
# find where dist_water <= cutoff for model of 20 km
# coastal model
- ind_coastal = self.dist_to_water<=model_transition
+ ind_coastal = self.dist_to_water <= model_transition
# global model
# ind_global = list(set(list(range(pgv.shape[0]))).difference(set(ind_coastal)))
- ind_global = ~(self.dist_to_water<=model_transition)
+ ind_global = ~(self.dist_to_water <= model_transition)
# set cap of precip to 1700 mm
- self.precip[self.precip>1700] = 1700
+ self.precip[self.precip > 1700] = 1700 # noqa: PLR2004
# x = b0 + b1*var1 + ...
# if len(ind_global) > 0:
# liquefaction susceptbility value, disregard pgv term
- liq_susc_val[ind_global] = \
- 8.801 + \
- -1.918 * np.log(self.vs30[ind_global]) + \
- 5.408e-4 * self.precip[ind_global] + \
- -0.2054 * self.dist_to_water[ind_global] + \
- -0.0333 * self.gw_depth[ind_global]
+ liq_susc_val[ind_global] = (
+ 8.801
+ + -1.918 * np.log(self.vs30[ind_global])
+ + 5.408e-4 * self.precip[ind_global]
+ + -0.2054 * self.dist_to_water[ind_global]
+ + -0.0333 * self.gw_depth[ind_global]
+ )
# liquefaction susceptbility value, disregard pgv term
- liq_susc_val[ind_coastal] = \
- 12.435 + \
- -2.615 * np.log(self.vs30[ind_coastal]) + \
- 5.556e-4 * self.precip[ind_coastal] + \
- -0.0287 * np.sqrt(self.dist_to_coast[ind_coastal]) + \
- 0.0666 * self.dist_to_river[ind_coastal] + \
- -0.0369 * self.dist_to_river[ind_coastal]*np.sqrt(self.dist_to_coast[ind_coastal])
+ liq_susc_val[ind_coastal] = (
+ 12.435
+ + -2.615 * np.log(self.vs30[ind_coastal])
+ + 5.556e-4 * self.precip[ind_coastal]
+ + -0.0287 * np.sqrt(self.dist_to_coast[ind_coastal])
+ + 0.0666 * self.dist_to_river[ind_coastal]
+ + -0.0369
+ * self.dist_to_river[ind_coastal]
+ * np.sqrt(self.dist_to_coast[ind_coastal])
+ )
# catch nan values
- liq_susc_val[np.isnan(liq_susc_val)] = -99.
+ liq_susc_val[np.isnan(liq_susc_val)] = -99.0
# determine liquefaction susceptibility category
- liq_susc[liq_susc_val>-1.15] = liq_susc_enum['very_high'].value
- liq_susc[liq_susc_val<=-1.15] = liq_susc_enum['high'].value
- liq_susc[liq_susc_val<=-1.95] = liq_susc_enum['moderate'].value
- liq_susc[liq_susc_val<=-3.15] = liq_susc_enum['low'].value
- liq_susc[liq_susc_val<=-3.20] = liq_susc_enum['very_low'].value
- liq_susc[liq_susc_val<=-38.1] = liq_susc_enum['none'].value
+ liq_susc[liq_susc_val > -1.15] = liq_susc_enum['very_high'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -1.15] = liq_susc_enum['high'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -1.95] = liq_susc_enum['moderate'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -3.15] = liq_susc_enum['low'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -3.20] = liq_susc_enum['very_low'].value # noqa: PLR2004
+ liq_susc[liq_susc_val <= -38.1] = liq_susc_enum['none'].value # noqa: PLR2004
# Below are HAZUS
# magnitude correction, from Baise & Rashidian (2020) and Allstadt et al. (2022)
- pga_mag = pga/(10**2.24/mag**2.56)
+ pga_mag = pga / (10**2.24 / mag**2.56)
# initialize arrays
prob_liq_pga = np.zeros(pga.shape)
p_ml = np.zeros(pga.shape)
# correction factor for moment magnitudes other than M=7.5, eq. 4-21
- k_mag = 0.0027*mag**3 - 0.0267*mag**2 - 0.2055*mag + 2.9188
+ k_mag = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188
# correction for groudnwater depths other than 5 feet, eq. 4-22
- k_gw_depth = 0.022 * self.gw_depth*3.28 + 0.93
+ k_gw_depth = 0.022 * self.gw_depth * 3.28 + 0.93
# get uncorrected p_liq given pga
- prob_liq_pga[liq_susc==liq_susc_enum['very_high'].value] = \
- np.maximum(np.minimum(9.09*pga[liq_susc==liq_susc_enum['very_high'].value]-0.82,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['high'].value] = \
- np.maximum(np.minimum(7.67*pga[liq_susc==liq_susc_enum['high'].value]-0.92,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['moderate'].value] = \
- np.maximum(np.minimum(6.67*pga[liq_susc==liq_susc_enum['moderate'].value]-1.00,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['low'].value] = \
- np.maximum(np.minimum(5.57*pga[liq_susc==liq_susc_enum['low'].value]-1.18,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['very_low'].value] = \
- np.maximum(np.minimum(4.16*pga[liq_susc==liq_susc_enum['very_low'].value]-1.08,1),0)
- prob_liq_pga[liq_susc==liq_susc_enum['none'].value] = 0
+ prob_liq_pga[liq_susc == liq_susc_enum['very_high'].value] = np.maximum(
+ np.minimum(
+ 9.09 * pga[liq_susc == liq_susc_enum['very_high'].value] - 0.82, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['high'].value] = np.maximum(
+ np.minimum(
+ 7.67 * pga[liq_susc == liq_susc_enum['high'].value] - 0.92, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['moderate'].value] = np.maximum(
+ np.minimum(
+ 6.67 * pga[liq_susc == liq_susc_enum['moderate'].value] - 1.00, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['low'].value] = np.maximum(
+ np.minimum(5.57 * pga[liq_susc == liq_susc_enum['low'].value] - 1.18, 1),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['very_low'].value] = np.maximum(
+ np.minimum(
+ 4.16 * pga[liq_susc == liq_susc_enum['very_low'].value] - 1.08, 1
+ ),
+ 0,
+ )
+ prob_liq_pga[liq_susc == liq_susc_enum['none'].value] = 0
# get portion of map unit susceptible to liquefaction
- p_ml[liq_susc==liq_susc_enum['very_high'].value] = 0.25
- p_ml[liq_susc==liq_susc_enum['high'].value] = 0.20
- p_ml[liq_susc==liq_susc_enum['moderate'].value] = 0.10
- p_ml[liq_susc==liq_susc_enum['low'].value] = 0.05
- p_ml[liq_susc==liq_susc_enum['very_low'].value] = 0.02
- p_ml[liq_susc==liq_susc_enum['none'].value] = 0.00
+ p_ml[liq_susc == liq_susc_enum['very_high'].value] = 0.25
+ p_ml[liq_susc == liq_susc_enum['high'].value] = 0.20
+ p_ml[liq_susc == liq_susc_enum['moderate'].value] = 0.10
+ p_ml[liq_susc == liq_susc_enum['low'].value] = 0.05
+ p_ml[liq_susc == liq_susc_enum['very_low'].value] = 0.02
+ p_ml[liq_susc == liq_susc_enum['none'].value] = 0.00
# liquefaction likelihood, p_liq
- prob_liq = prob_liq_pga / k_mag / k_gw_depth * p_ml # decimal, eq. 4-20
- prob_liq = np.maximum(prob_liq,zero_prob_liq) # set prob to > "0" to avoid 0% in log
+ prob_liq = prob_liq_pga / k_mag / k_gw_depth * p_ml # decimal, eq. 4-20
+ prob_liq = np.maximum(
+ prob_liq, zero_prob_liq
+ ) # set prob to > "0" to avoid 0% in log
# Zhu et al. (2017) boundary constraints
# for pga_mag < 0.1 g, set prob to "0"
- prob_liq[pga_mag<0.1] = zero_prob_liq
+ prob_liq[pga_mag < 0.1] = zero_prob_liq # noqa: PLR2004
# for vs30 > 620 m/s, set prob to "0"
- prob_liq[self.vs30>620] = zero_prob_liq
+ prob_liq[self.vs30 > 620] = zero_prob_liq # noqa: PLR2004
# for precip > 1700 mm, set prob to "0"
- prob_liq[self.precip>1700] = zero_prob_liq
-
- return {"liq_prob":prob_liq, "liq_susc":liq_susc}
+ prob_liq[self.precip > 1700] = zero_prob_liq # noqa: PLR2004
+ return {'liq_prob': prob_liq, 'liq_susc': liq_susc}
-## Lateral Spreading:
-class LateralSpread:
+# Lateral Spreading:
+class LateralSpread: # noqa: D101
def __init__(self) -> None:
pass
+
# -----------------------------------------------------------
class Hazus2020Lateral(LateralSpread):
- """
- Compute lateral spreading, same methodology as Grant et al. (2016).
-
+ """Compute lateral spreading, same methodology as Grant et al. (2016).
+
Parameters
----------
From upstream PBEE:
@@ -675,13 +837,13 @@ class Hazus2020Lateral(LateralSpread):
[g] peak ground acceleration
mag: float, np.ndarray or list
moment magnitude
-
+
Geotechnical/geologic:
prob_liq: float, np.ndarray or list
probability of liquefaction
dist_water: float, np.ndarray or list, optional
[km] distance to nearest river, lake, or coast; site is only susceptible to lateral spread if distance is less than 25 meters
-
+
Fixed:
liq_susc: str, np.ndarray or list
susceptibility category to liquefaction (none, very low, low, moderate, high, very high)
@@ -692,138 +854,155 @@ class Hazus2020Lateral(LateralSpread):
[m] permanent ground deformation
sigma_pgdef : float, np.ndarray
aleatory variability for ln(pgdef)
-
+
References
----------
.. [1] Federal Emergency Management Agency (FEMA), 2020, Hazus Earthquake Model - Technical Manual, Hazus 4.2 SP3, 436 pp. https://www.fema.gov/flood-maps/tools-resources/flood-map-products/hazus/user-technical-manuals.
-
+
"""
+
def __init__(self, stations, parameters):
super().__init__()
self.stations = stations
- dist_to_water = parameters.get("DistWater")
- if type(dist_to_water) == np.array:
+ dist_to_water = parameters.get('DistWater')
+ if type(dist_to_water) == np.array: # noqa: E721
self.dist_to_water = dist_to_water
- elif dist_to_water == "Defined (\"distWater\") in Site File (.csv)":
- self.dist_to_water = np.array([site['distWater'] for site in self.stations])
- elif os.path.exists(os.path.dirname(dist_to_water)):
+ elif dist_to_water == 'Defined ("distWater") in Site File (.csv)':
+ self.dist_to_water = np.array(
+ [site['distWater'] for site in self.stations]
+ )
+ elif os.path.exists(os.path.dirname(dist_to_water)): # noqa: PTH110, PTH120
lat_station = [site['lat'] for site in self.stations]
lon_station = [site['lon'] for site in self.stations]
- self.dist_to_water = sampleRaster(dist_to_water, \
- parameters["inputCRS"],lon_station, lat_station)
+ self.dist_to_water = sampleRaster(
+ dist_to_water, parameters['inputCRS'], lon_station, lat_station
+ )
else:
self.dist_to_water = np.zeros(len(self.stations))
-
- def run(self, ln_im_data, eq_data, im_list):
+ def run(self, ln_im_data, eq_data, im_list): # noqa: D102
output_keys = ['liq_PGD_h']
- if ('PGA' in im_list) and ('liq_prob' in im_list) and \
- ('liq_susc' in im_list):
+ if (
+ ('PGA' in im_list)
+ and ('liq_prob' in im_list)
+ and ('liq_susc' in im_list)
+ ):
num_stations = len(self.stations)
num_scenarios = len(eq_data)
- PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0]
- liq_prob_col_id = [i for i, x in enumerate(im_list) if \
- x == 'liq_prob'][0]
- liq_susc_col_id = [i for i, x in enumerate(im_list) if \
- x == 'liq_susc'][0]
+ PGA_col_id = [i for i, x in enumerate(im_list) if x == 'PGA'][0] # noqa: N806, RUF015
+ liq_prob_col_id = [i for i, x in enumerate(im_list) if x == 'liq_prob'][ # noqa: RUF015
+ 0
+ ]
+ liq_susc_col_id = [i for i, x in enumerate(im_list) if x == 'liq_susc'][ # noqa: RUF015
+ 0
+ ]
for scenario_id in range(num_scenarios):
num_rlzs = ln_im_data[scenario_id].shape[2]
- im_data_scen = np.zeros([num_stations,\
- len(im_list)+len(output_keys), num_rlzs])
- im_data_scen[:,0:len(im_list),:] = ln_im_data[scenario_id]
+ im_data_scen = np.zeros(
+ [num_stations, len(im_list) + len(output_keys), num_rlzs]
+ )
+ im_data_scen[:, 0 : len(im_list), :] = ln_im_data[scenario_id]
for rlz_id in range(num_rlzs):
- liq_prob = ln_im_data[scenario_id][:,liq_prob_col_id,rlz_id]
- liq_susc = ln_im_data[scenario_id][:,liq_susc_col_id,rlz_id]
- pga = np.exp(ln_im_data[scenario_id][:,PGA_col_id,rlz_id])
+ liq_prob = ln_im_data[scenario_id][:, liq_prob_col_id, rlz_id]
+ liq_susc = ln_im_data[scenario_id][:, liq_susc_col_id, rlz_id]
+ pga = np.exp(ln_im_data[scenario_id][:, PGA_col_id, rlz_id])
mag = float(eq_data[scenario_id][0])
- model_output = self.model(pga, mag, liq_prob, \
- self.dist_to_water, liq_susc)
+ model_output = self.model(
+ pga, mag, liq_prob, self.dist_to_water, liq_susc
+ )
for i, key in enumerate(output_keys):
- im_data_scen[:,len(im_list)+i,rlz_id] = model_output[key]
+ im_data_scen[:, len(im_list) + i, rlz_id] = model_output[key]
ln_im_data[scenario_id] = im_data_scen
- im_list = im_list + output_keys
+ im_list = im_list + output_keys # noqa: PLR6104
else:
- sys.exit(f"At least one of 'PGA' and 'PGV' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed.")
+ sys.exit(
+ "At least one of 'PGA' and 'PGV' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed."
+ )
return ln_im_data, eq_data, im_list
@staticmethod
# @njit
def model(
- pga, mag, # upstream PBEE RV
- prob_liq, dist_water, # geotechnical/geologic
- liq_susc, # fixed/toggles
- extrapolate_expected_pgdef=True
+ pga,
+ mag, # upstream PBEE RV
+ prob_liq,
+ dist_water, # geotechnical/geologic
+ liq_susc, # fixed/toggles
+ extrapolate_expected_pgdef=True, # noqa: FBT002
):
- """Model"""
-
+ """Model""" # noqa: D400
# initialize arrays
-
+
# get threshold pga against liquefaction
- pga_t = np.ones(pga.shape)*np.nan
- pga_t[liq_susc==liq_susc_enum['very_high'].value] = 0.09 # g
- pga_t[liq_susc==liq_susc_enum['high'].value] = 0.12 # g
- pga_t[liq_susc==liq_susc_enum['moderate'].value] = 0.15 # g
- pga_t[liq_susc==liq_susc_enum['low'].value] = 0.21 # g
- pga_t[liq_susc==liq_susc_enum['very_low'].value] = 0.26 # g
- pga_t[liq_susc==liq_susc_enum['none'].value] = 1. # g
-
+ pga_t = np.ones(pga.shape) * np.nan
+ pga_t[liq_susc == liq_susc_enum['very_high'].value] = 0.09 # g
+ pga_t[liq_susc == liq_susc_enum['high'].value] = 0.12 # g
+ pga_t[liq_susc == liq_susc_enum['moderate'].value] = 0.15 # g
+ pga_t[liq_susc == liq_susc_enum['low'].value] = 0.21 # g
+ pga_t[liq_susc == liq_susc_enum['very_low'].value] = 0.26 # g
+ pga_t[liq_susc == liq_susc_enum['none'].value] = 1.0 # g
+
# pga factor of safety
- ratio = pga/pga_t\
-
+ ratio = pga / pga_t
# get normalized displacement in inches, a, for M=7
- expected_pgdef = np.ones(pga.shape)*np.nan
- expected_pgdef[ratio<=1] = 1e-3 # above 1e-3 cm, or 1e-5 m
- expected_pgdef[np.logical_and(ratio>1,ratio<=2)] = 12*ratio[np.logical_and(ratio>1,ratio<=2)] - 12
- expected_pgdef[np.logical_and(ratio>2,ratio<=3)] = 18*ratio[np.logical_and(ratio>2,ratio<=3)] - 24
+ expected_pgdef = np.ones(pga.shape) * np.nan
+ expected_pgdef[ratio <= 1] = 1e-3 # above 1e-3 cm, or 1e-5 m
+ expected_pgdef[np.logical_and(ratio > 1, ratio <= 2)] = ( # noqa: PLR2004
+ 12 * ratio[np.logical_and(ratio > 1, ratio <= 2)] - 12 # noqa: PLR2004
+ )
+ expected_pgdef[np.logical_and(ratio > 2, ratio <= 3)] = ( # noqa: PLR2004
+ 18 * ratio[np.logical_and(ratio > 2, ratio <= 3)] - 24 # noqa: PLR2004
+ )
if extrapolate_expected_pgdef is True:
- expected_pgdef[ratio>3] = 70*ratio[ratio>3] - 180
+ expected_pgdef[ratio > 3] = 70 * ratio[ratio > 3] - 180 # noqa: PLR2004
else:
- expected_pgdef[np.logical_and(ratio>3,ratio<=4)] = 70*ratio[np.logical_and(ratio>3,ratio<=4)] - 180
- expected_pgdef[ratio>4] = 100
- expected_pgdef *= 2.54 # convert from inches to cm
-
+ expected_pgdef[np.logical_and(ratio > 3, ratio <= 4)] = ( # noqa: PLR2004
+ 70 * ratio[np.logical_and(ratio > 3, ratio <= 4)] - 180 # noqa: PLR2004
+ )
+ expected_pgdef[ratio > 4] = 100 # noqa: PLR2004
+ expected_pgdef *= 2.54 # convert from inches to cm
+
# magnitude correction
- k_delta = 0.0086*mag**3 - 0.0914*mag**2 + 0.4698*mag - 0.9835
-
+ k_delta = 0.0086 * mag**3 - 0.0914 * mag**2 + 0.4698 * mag - 0.9835
+
# susceptibility to lateral spreading only for deposits found near water body (dw < dw_cutoff)
pgdef = k_delta * expected_pgdef * prob_liq
- pgdef = pgdef/100 # also convert from cm to m
- pgdef[dist_water>25] = 1e-5
-
+ pgdef = pgdef / 100 # also convert from cm to m # noqa: PLR6104
+ pgdef[dist_water > 25] = 1e-5 # noqa: PLR2004
+
# keep pgdef to minimum of 1e-5 m
- pgdef = np.maximum(pgdef,1e-5)
-
+ pgdef = np.maximum(pgdef, 1e-5)
+
# prepare outputs
- output = {'liq_PGD_h':pgdef}
+ output = {'liq_PGD_h': pgdef}
# get intermediate values if requested
# if return_inter_params:
# output['k_delta'] = k_delta
# output['expected_pgdef'] = expected_pgdef
# output['pga_t'] = pga_t
# output['ratio'] = ratio
-
+
# return
- return output
-
+ return output # noqa: RET504
+
-## Settlement:
-class GroundSettlement:
+# Settlement:
+class GroundSettlement: # noqa: D101
def __init__(self) -> None:
pass
class Hazus2020Vertical(GroundSettlement):
- """
- Compute volumetric settlement at a given location using a simplified deterministic approach (after Tokimatsu and Seed, 1987).
-
+ """Compute volumetric settlement at a given location using a simplified deterministic approach (after Tokimatsu and Seed, 1987).
+
Parameters
----------
From upstream PBEE:
-
+
Geotechnical/geologic:
prob_liq: float, np.ndarray or list
probability of liquefaction
-
+
Fixed:
liq_susc: str, np.ndarray or list
susceptibility category to liquefaction (none, very low, low, moderate, high, very high)
@@ -834,73 +1013,78 @@ class Hazus2020Vertical(GroundSettlement):
[m] permanent ground deformation
sigma_pgdef : float, np.ndarray
aleatory variability for ln(pgdef)
-
+
References
----------
.. [1] Federal Emergency Management Agency (FEMA), 2020, Hazus Earthquake Model - Technical Manual, Hazus 4.2 SP3, 436 pp. https://www.fema.gov/flood-maps/tools-resources/flood-map-products/hazus/user-technical-manuals.
.. [2] Tokimatsu, K., and Seed, H.B., 1987, Evaluation of Settlements in Sands Due to Earthquake Shaking. Journal of Geotechnical Engineering, vol. 113, no. 8, pp. 861-878.
-
+
"""
+
@staticmethod
# @njit
def model(
- prob_liq, # geotechnical/geologic
- liq_susc, # fixed/toggles
- return_inter_params=False # to get intermediate params
+ prob_liq, # geotechnical/geologic
+ liq_susc, # fixed/toggles
+ return_inter_params=False, # to get intermediate params # noqa: FBT002
):
- """Model"""
-
+ """Model""" # noqa: D400
# initialize arrays
# get threshold pga against liquefaction, in cm
- pgdef = np.ones(liq_susc.shape)*np.nan
- pgdef[liq_susc==liq_susc_enum['very_high'].value] = 30
- pgdef[liq_susc==liq_susc_enum['high'].value] = 15
- pgdef[liq_susc==liq_susc_enum['moderate'].value] = 5
- pgdef[liq_susc==liq_susc_enum['low'].value] = 2.5
- pgdef[liq_susc==liq_susc_enum['very_low'].value] = 1
- pgdef[liq_susc==liq_susc_enum['none'].value] = 1e-3
-
+ pgdef = np.ones(liq_susc.shape) * np.nan
+ pgdef[liq_susc == liq_susc_enum['very_high'].value] = 30
+ pgdef[liq_susc == liq_susc_enum['high'].value] = 15
+ pgdef[liq_susc == liq_susc_enum['moderate'].value] = 5
+ pgdef[liq_susc == liq_susc_enum['low'].value] = 2.5
+ pgdef[liq_susc == liq_susc_enum['very_low'].value] = 1
+ pgdef[liq_susc == liq_susc_enum['none'].value] = 1e-3
+
# condition with prob_liq
- pgdef = pgdef * prob_liq
-
+ pgdef = pgdef * prob_liq # noqa: PLR6104
+
# convert from cm to m
- pgdef = pgdef/100
-
+ pgdef = pgdef / 100 # noqa: PLR6104
+
# limit deformations to 1e-5
- pgdef = np.maximum(pgdef,1e-5)
-
+ pgdef = np.maximum(pgdef, 1e-5)
+
# prepare outputs
- output = {'liq_PGD_v':pgdef}
+ output = {'liq_PGD_v': pgdef}
# get intermediate values if requested
if return_inter_params:
pass
-
+
# return
return output
-
- def run(self, ln_im_data, eq_data, im_list):
+
+ def run(self, ln_im_data, eq_data, im_list): # noqa: D102
output_keys = ['liq_PGD_v']
- if ('liq_susc' in im_list) and ('liq_prob' in im_list):
+ if ('liq_susc' in im_list) and ('liq_prob' in im_list):
num_stations = ln_im_data[0].shape[0]
num_scenarios = len(eq_data)
- liq_prob_col_id = [i for i, x in enumerate(im_list) if \
- x == 'liq_prob'][0]
- liq_susc_col_id = [i for i, x in enumerate(im_list) if \
- x == 'liq_susc'][0]
+ liq_prob_col_id = [i for i, x in enumerate(im_list) if x == 'liq_prob'][ # noqa: RUF015
+ 0
+ ]
+ liq_susc_col_id = [i for i, x in enumerate(im_list) if x == 'liq_susc'][ # noqa: RUF015
+ 0
+ ]
for scenario_id in range(num_scenarios):
num_rlzs = ln_im_data[scenario_id].shape[2]
- im_data_scen = np.zeros([num_stations,\
- len(im_list)+len(output_keys), num_rlzs])
- im_data_scen[:,0:len(im_list),:] = ln_im_data[scenario_id]
+ im_data_scen = np.zeros(
+ [num_stations, len(im_list) + len(output_keys), num_rlzs]
+ )
+ im_data_scen[:, 0 : len(im_list), :] = ln_im_data[scenario_id]
for rlz_id in range(num_rlzs):
- liq_prob = ln_im_data[scenario_id][:,liq_prob_col_id,rlz_id]
- liq_susc = ln_im_data[scenario_id][:,liq_susc_col_id,rlz_id]
+ liq_prob = ln_im_data[scenario_id][:, liq_prob_col_id, rlz_id]
+ liq_susc = ln_im_data[scenario_id][:, liq_susc_col_id, rlz_id]
model_output = self.model(liq_prob, liq_susc)
for i, key in enumerate(output_keys):
- im_data_scen[:,len(im_list)+i,rlz_id] = model_output[key]
+ im_data_scen[:, len(im_list) + i, rlz_id] = model_output[key]
ln_im_data[scenario_id] = im_data_scen
- im_list = im_list + output_keys
+ im_list = im_list + output_keys # noqa: PLR6104
else:
- sys.exit(f"At least one of 'liq_susc' and 'liq_prob' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed.")
- return ln_im_data, eq_data, im_list
\ No newline at end of file
+ sys.exit(
+ "At least one of 'liq_susc' and 'liq_prob' is missing in the selected intensity measures and the liquefaction trigging model 'ZhuEtal2017' can not be computed."
+ )
+ return ln_im_data, eq_data, im_list
diff --git a/modules/performRegionalEventSimulation/regionalWindField/ComputeIntensityMeasure.py b/modules/performRegionalEventSimulation/regionalWindField/ComputeIntensityMeasure.py
index d82c10440..f521ec680 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/ComputeIntensityMeasure.py
+++ b/modules/performRegionalEventSimulation/regionalWindField/ComputeIntensityMeasure.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,20 +37,20 @@
# Kuanshi Zhong
#
-import os
-import subprocess
-import sys
import json
-import copy
-import shutil
import multiprocessing as mp
+import os
+import shutil
+import subprocess # noqa: S404
+import sys
+
import numpy as np
import pandas as pd
-from WindFieldSimulation import *
+from WindFieldSimulation import * # noqa: F403
-def run_model(scen, p, t, path_perturb, feat_perturb, res_mp):
- model = LinearAnalyticalModel_SnaikiWu_2017(cyclone_param = p, storm_track = t)
+def run_model(scen, p, t, path_perturb, feat_perturb, res_mp): # noqa: D103
+ model = LinearAnalyticalModel_SnaikiWu_2017(cyclone_param=p, storm_track=t) # noqa: F405
if scen['Terrain']:
model.add_reference_terrain(scen['Terrain'])
model.set_cyclone_mesh(scen['StormMesh'])
@@ -63,32 +62,35 @@ def run_model(scen, p, t, path_perturb, feat_perturb, res_mp):
# this just an engineering judgement that the pressure difference, moving speed, and max-wind-speed radius
# should not be less than 0.0 in the value.
delta_feat[delta_feat < 0.0] = 0.0
- print('dLatitude, dLongtitude, dAngle = ', delta_path)
- print('dP, v, Rmax = ', delta_feat)
+ print('dLatitude, dLongtitude, dAngle = ', delta_path) # noqa: T201
+ print('dP, v, Rmax = ', delta_feat) # noqa: T201
model.set_delta_path(delta_path)
model.set_delta_feat(delta_feat)
model.compute_wind_field()
res_mp.append(model.get_station_data())
-def simulate_storm(scenarios, event_info, model_type):
- if (model_type == 'LinearAnalytical'):
+def simulate_storm(scenarios, event_info, model_type): # noqa: D103
+ if model_type == 'LinearAnalytical':
num_per_site = event_info['NumberPerSite']
- if (num_per_site == 1):
+ if num_per_site == 1:
path_perturb = np.zeros(3)
feat_perturb = np.zeros(3)
+ elif len(event_info.get('Perturbation', [])) != 6: # noqa: PLR2004
+ print('ComputeIntensityMeasure: Perturbation should have a size of 6.') # noqa: T201
+ path_perturb = np.array([0.5, 0.5, 90.0])
+ feat_perturb = np.array([10.0, 10.0, 10.0])
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: [1.0, 1.0, 90.0, 10.0, 10.0, 10.0] is used for perturbations.'
+ )
else:
- if (len(event_info.get('Perturbation', [])) != 6):
- print('ComputeIntensityMeasure: Perturbation should have a size of 6.')
- path_perturb = np.array([0.5, 0.5, 90.0])
- feat_perturb = np.array([10.0, 10.0, 10.0])
- print('ComputeIntensityMeasure: [1.0, 1.0, 90.0, 10.0, 10.0, 10.0] is used for perturbations.')
- else:
- path_perturb = np.array(event_info['Perturbation'][0:3])
- feat_perturb = np.array(event_info['Perturbation'][3:6])
+ path_perturb = np.array(event_info['Perturbation'][0:3])
+ feat_perturb = np.array(event_info['Perturbation'][3:6])
for i in range(len(scenarios)):
- if (i == 1):
- print('ComputeIntensityMeasure: currently supporting single scenario simulation only.')
+ if i == 1:
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: currently supporting single scenario simulation only.'
+ )
return -1
cur_scen = scenarios[i]
param = cur_scen['CycloneParam']
@@ -98,9 +100,18 @@ def simulate_storm(scenarios, event_info, model_type):
with mp.Manager() as manager:
res_mp = manager.list([])
proc_list = []
- for k in range(num_per_site):
- proc = mp.Process(target = run_model,
- args = (cur_scen, param, track, path_perturb, feat_perturb, res_mp))
+ for k in range(num_per_site): # noqa: B007
+ proc = mp.Process(
+ target=run_model,
+ args=(
+ cur_scen,
+ param,
+ track,
+ path_perturb,
+ feat_perturb,
+ res_mp,
+ ),
+ )
proc_list.append(proc)
for k in range(num_per_site):
proc = proc_list[k]
@@ -109,18 +120,26 @@ def simulate_storm(scenarios, event_info, model_type):
proc = proc_list[k]
proc.join()
# extract data
- res = [x for x in res_mp]
-
+ res = [x for x in res_mp] # noqa: C416
+
else:
- print('ComputeIntensityMeasure: currently only supporting LinearAnalytical model')
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: currently only supporting LinearAnalytical model'
+ )
# return
return res
-def simulate_storm_cpp(site_info, scenario_info, scenario_data, event_info, model_type, dir_info):
-
- if (model_type == 'LinearAnalytical'):
+def simulate_storm_cpp( # noqa: C901, D103
+ site_info,
+ scenario_info,
+ scenario_data,
+ event_info,
+ model_type,
+ dir_info,
+):
+ if model_type == 'LinearAnalytical':
# save configuration file
input_dir = dir_info['Input']
output_dir = dir_info['Output']
@@ -129,90 +148,114 @@ def simulate_storm_cpp(site_info, scenario_info, scenario_data, event_info, mode
scenario_info['Storm']['Track'] = 'Track_populated.csv'
scenario_info['Storm']['TrackSimu'] = 'TrackSimu_populated.csv'
scenario_info['Storm']['Landfall'] = {}
- scenario_info['Storm']['Landfall']['Latitude'] = scenario_data[0]['CycloneParam'][0]
- scenario_info['Storm']['Landfall']['Longitude'] = scenario_data[0]['CycloneParam'][1]
+ scenario_info['Storm']['Landfall']['Latitude'] = scenario_data[0][
+ 'CycloneParam'
+ ][0]
+ scenario_info['Storm']['Landfall']['Longitude'] = scenario_data[0][
+ 'CycloneParam'
+ ][1]
# updating landfall properties
scenario_info['Storm']['LandingAngle'] = scenario_data[0]['CycloneParam'][2]
scenario_info['Storm']['Pressure'] = scenario_data[0]['CycloneParam'][3]
scenario_info['Storm']['Speed'] = scenario_data[0]['CycloneParam'][4]
scenario_info['Storm']['Radius'] = scenario_data[0]['CycloneParam'][5]
- config = {
- "Scenario": scenario_info,
- "Event": event_info
- }
- abs_path_config = os.path.abspath(os.path.join(input_dir, 'SimuConfig.json'))
- with open (abs_path_config, "w") as f:
+ config = {'Scenario': scenario_info, 'Event': event_info}
+ abs_path_config = os.path.abspath(os.path.join(input_dir, 'SimuConfig.json')) # noqa: PTH100, PTH118
+ with open(abs_path_config, 'w') as f: # noqa: PLW1514, PTH123
json.dump(config, f)
# site file
- abs_path_site = os.path.abspath(os.path.join(input_dir, site_info['input_file']))
+ abs_path_site = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, site_info['input_file']) # noqa: PTH118
+ )
# track file
- abs_path_track = os.path.abspath(os.path.join(input_dir, scenario_info['Storm']['Track']))
+ abs_path_track = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, scenario_info['Storm']['Track']) # noqa: PTH118
+ )
if scenario_info['Generator'] == 'SimulationHist':
- df = pd.DataFrame.from_dict({
- 'Lat': scenario_data[0]['StormTrack']['Latitude'],
- 'Lon': scenario_data[0]['StormTrack']['Longitude']
- })
- df.to_csv(abs_path_track, sep=',', header=False, index=False)
+ df = pd.DataFrame.from_dict( # noqa: PD901
+ {
+ 'Lat': scenario_data[0]['StormTrack']['Latitude'],
+ 'Lon': scenario_data[0]['StormTrack']['Longitude'],
+ }
+ )
+ df.to_csv(abs_path_track, sep=',', header=False, index=False)
# lat_w file
if scenario_info['Storm'].get('TrackSimu', None):
- abs_path_latw = os.path.abspath(os.path.join(input_dir, scenario_info['Storm']['TrackSimu']))
+ abs_path_latw = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, scenario_info['Storm']['TrackSimu']) # noqa: PTH118
+ )
else:
- abs_path_latw = os.path.abspath(os.path.join(input_dir, 'TrackSimu_populated.csv'))
- df = pd.DataFrame.from_dict({
- 'Lat': scenario_data[0]['TrackSimu'],
- })
+ abs_path_latw = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, 'TrackSimu_populated.csv') # noqa: PTH118
+ )
+ df = pd.DataFrame.from_dict( # noqa: PD901
+ {
+ 'Lat': scenario_data[0]['TrackSimu'],
+ }
+ )
df.to_csv(abs_path_latw, sep=',', header=False, index=False)
if scenario_info['Generator'] == 'SimulationHist':
- df = pd.DataFrame.from_dict({
- 'Lat': scenario_data[0]['TrackSimu'],
- })
- df.to_csv(abs_path_latw, sep=',', header=False, index=False)
+ df = pd.DataFrame.from_dict( # noqa: PD901
+ {
+ 'Lat': scenario_data[0]['TrackSimu'],
+ }
+ )
+ df.to_csv(abs_path_latw, sep=',', header=False, index=False)
# terrain file
- if ('Terrain' in scenario_info.keys()):
- abs_path_terrain = os.path.abspath(os.path.join(input_dir, scenario_info['Terrain']))
+ if 'Terrain' in scenario_info.keys(): # noqa: SIM118
+ abs_path_terrain = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, scenario_info['Terrain']) # noqa: PTH118
+ )
else:
# default terrain z0 = 0.01 everywhere for the defined domain
- abs_path_terrain = os.path.abspath(os.path.join(input_dir, 'DefaultTerrain.geojson'))
+ abs_path_terrain = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, 'DefaultTerrain.geojson') # noqa: PTH118
+ )
dict_dt = {
- "type": "FeatureCollection",
- "features": [{
- "type": "Feature",
- "geometry": {
- "type": "Polygon",
- "coordinates": [
- [[-180.0, -90.0],
- [-180.0, 90.0],
- [180.0, 90.0],
- [180.0, -90.0],
- [-180.0, -90.0]]]
- },
- "properties": {
- "z0": 0.01
+ 'type': 'FeatureCollection',
+ 'features': [
+ {
+ 'type': 'Feature',
+ 'geometry': {
+ 'type': 'Polygon',
+ 'coordinates': [
+ [
+ [-180.0, -90.0],
+ [-180.0, 90.0],
+ [180.0, 90.0],
+ [180.0, -90.0],
+ [-180.0, -90.0],
+ ]
+ ],
+ },
+ 'properties': {'z0': 0.01},
}
- }
- ]
+ ],
}
- with open(abs_path_terrain, 'w') as f:
+ with open(abs_path_terrain, 'w') as f: # noqa: PLW1514, PTH123
json.dump(dict_dt, f, indent=2)
-
+
# configuring perturbation
num_per_site = event_info['NumberPerSite']
- if (num_per_site == 1):
+ if num_per_site == 1:
path_perturb = np.zeros(3)
feat_perturb = np.zeros(3)
+ elif len(event_info.get('Perturbation', [])) != 6: # noqa: PLR2004
+ print('ComputeIntensityMeasure: Perturbation should have a size of 6.') # noqa: T201
+ path_perturb = np.array([0.5, 0.5, 90.0])
+ feat_perturb = np.array([10.0, 10.0, 10.0])
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: [1.0, 1.0, 90.0, 10.0, 10.0, 10.0] is used for perturbations.'
+ )
else:
- if (len(event_info.get('Perturbation', [])) != 6):
- print('ComputeIntensityMeasure: Perturbation should have a size of 6.')
- path_perturb = np.array([0.5, 0.5, 90.0])
- feat_perturb = np.array([10.0, 10.0, 10.0])
- print('ComputeIntensityMeasure: [1.0, 1.0, 90.0, 10.0, 10.0, 10.0] is used for perturbations.')
- else:
- path_perturb = np.array(event_info['Perturbation'][0:3])
- feat_perturb = np.array(event_info['Perturbation'][3:6])
+ path_perturb = np.array(event_info['Perturbation'][0:3])
+ feat_perturb = np.array(event_info['Perturbation'][3:6])
for i in range(int(scenario_info['Number'])):
- if (i == 1):
- print('ComputeIntensityMeasure: currently supporting single scenario simulation only.')
+ if i == 1:
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: currently supporting single scenario simulation only.'
+ )
return -1
np.random.seed(100)
res = []
@@ -221,79 +264,112 @@ def simulate_storm_cpp(site_info, scenario_info, scenario_data, event_info, mode
args_list = []
odir_list = []
if sys.platform.startswith('win'):
- windsimu_bin = os.path.dirname(__file__) + '/WindFieldSimulation.exe'
+ windsimu_bin = os.path.dirname(__file__) + '/WindFieldSimulation.exe' # noqa: PTH120
else:
- windsimu_bin = os.path.dirname(__file__) + '/WindFieldSimulation'
- ## preparing files
+ windsimu_bin = os.path.dirname(__file__) + '/WindFieldSimulation' # noqa: PTH120
+ # preparing files
for j in range(num_per_site):
delta_path = (np.random.rand(3) - 0.5) * path_perturb
delta_feat = (np.random.rand(3) - 0.5) * feat_perturb
pert_dict = {
- "dLatitude": delta_path[0],
- "dLongitude": delta_path[1],
- "dAngle": delta_path[2],
- "dP": delta_feat[0],
- "dV": delta_feat[1],
- "dR": delta_feat[2]
+ 'dLatitude': delta_path[0],
+ 'dLongitude': delta_path[1],
+ 'dAngle': delta_path[2],
+ 'dP': delta_feat[0],
+ 'dV': delta_feat[1],
+ 'dR': delta_feat[2],
}
- abs_path_pert = os.path.abspath(os.path.join(input_dir, 'Perturbation' + str(j) + '.json'))
- with open(abs_path_pert, "w") as f:
+ abs_path_pert = os.path.abspath( # noqa: PTH100
+ os.path.join(input_dir, 'Perturbation' + str(j) + '.json') # noqa: PTH118
+ )
+ with open(abs_path_pert, 'w') as f: # noqa: PLW1514, PTH123
json.dump(pert_dict, f)
- print('dLatitude, dLongtitude, dAngle = ', delta_path)
- print('dP, dv, dR = ', delta_feat)
- output_subdir = os.path.abspath(os.path.join(output_dir, 'simu' + str(j)))
- if os.path.exists(output_subdir):
+ print('dLatitude, dLongtitude, dAngle = ', delta_path) # noqa: T201
+ print('dP, dv, dR = ', delta_feat) # noqa: T201
+ output_subdir = os.path.abspath( # noqa: PTH100
+ os.path.join(output_dir, 'simu' + str(j)) # noqa: PTH118
+ )
+ if os.path.exists(output_subdir): # noqa: PTH110
shutil.rmtree(output_subdir)
- os.makedirs(output_subdir)
- args = [windsimu_bin, "--config", abs_path_config, "--site", abs_path_site,
- "--track", abs_path_track, "--latw", abs_path_latw, "--pert", abs_path_pert,
- "--terrain", abs_path_terrain, "--z0", output_subdir,
- "--output", output_subdir]
+ os.makedirs(output_subdir) # noqa: PTH103
+ args = [
+ windsimu_bin,
+ '--config',
+ abs_path_config,
+ '--site',
+ abs_path_site,
+ '--track',
+ abs_path_track,
+ '--latw',
+ abs_path_latw,
+ '--pert',
+ abs_path_pert,
+ '--terrain',
+ abs_path_terrain,
+ '--z0',
+ output_subdir,
+ '--output',
+ output_subdir,
+ ]
pert_list.append(abs_path_pert)
args_list.append(args)
odir_list.append(output_subdir)
- ## running
- print('ComputeIntensityMeaure: running analysis.')
- procs_list = [subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for cmd in args_list]
+ # running
+ print('ComputeIntensityMeaure: running analysis.') # noqa: T201
+ procs_list = [
+ subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # noqa: S603
+ for cmd in args_list
+ ]
for proc in procs_list:
proc.communicate()
- ## loading output
- print('ComputeIntensityMeaure: postprocessing simulation data.')
+ # loading output
+ print('ComputeIntensityMeaure: postprocessing simulation data.') # noqa: T201
for j in range(num_per_site):
- os.remove(pert_list[j])
+ os.remove(pert_list[j]) # noqa: PTH107
station_res = {
'Latitude': [],
'Longitude': [],
'z0': [],
- 'PWS': {
- 'height': [],
- 'duration': 600.0,
- 'windspeed': []
- }
+ 'PWS': {'height': [], 'duration': 600.0, 'windspeed': []},
}
- df = pd.read_csv(os.path.join(os.path.abspath(odir_list[j]), 'StationZ0.csv'), header = None, index_col = None)
- station_res['z0'] = list(np.concatenate(df.values.tolist()).flat)
- df = pd.read_csv(os.path.join(os.path.abspath(odir_list[j]), 'MeasureHeight.csv'), header = None, index_col = None)
- station_res['PWS']['height'] = df.values.tolist()[0]
- df = pd.read_csv(os.path.join(os.path.abspath(odir_list[j]), 'MaxWindSpeed.csv'), header = None, index_col = None)
- station_res['PWS']['windspeed'] = df.values.tolist()
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(os.path.abspath(odir_list[j]), 'StationZ0.csv'), # noqa: PTH100, PTH118
+ header=None,
+ index_col=None,
+ )
+ station_res['z0'] = list(np.concatenate(df.values.tolist()).flat) # noqa: PD011
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(os.path.abspath(odir_list[j]), 'MeasureHeight.csv'), # noqa: PTH100, PTH118
+ header=None,
+ index_col=None,
+ )
+ station_res['PWS']['height'] = df.values.tolist()[0] # noqa: PD011
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(os.path.abspath(odir_list[j]), 'MaxWindSpeed.csv'), # noqa: PTH100, PTH118
+ header=None,
+ index_col=None,
+ )
+ station_res['PWS']['windspeed'] = df.values.tolist() # noqa: PD011
res.append(station_res)
shutil.rmtree(odir_list[j])
# house-keeping
- os.remove(abs_path_config)
+ os.remove(abs_path_config) # noqa: PTH107
else:
- print('ComputeIntensityMeasure: currently only supporting LinearAnalytical model')
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: currently only supporting LinearAnalytical model'
+ )
# return
return res
-def convert_wind_speed(event_info, simu_res):
+def convert_wind_speed(event_info, simu_res): # noqa: D103
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: converting peak wind speed to specified exposure, measuring height, and gust duration.'
+ )
- print('ComputeIntensityMeasure: converting peak wind speed to specificed exposure, measuring height, and gust duration.')
-
- if ('HAZUS' in event_info['IntensityMeasure']['Type']):
+ if 'HAZUS' in event_info['IntensityMeasure']['Type']:
# Exposure type C: z0 = 0.03
exposure = 'C'
# 10-m measuring height
@@ -302,8 +378,8 @@ def convert_wind_speed(event_info, simu_res):
gust_duration = 3.0
else:
exposure = event_info['IntensityMeasure']['Exposure']
- if exposure not in ['A', 'B', 'C', 'D']:
- print('ComputeIntensityMeasure: the Exposure should be A, B, C, or D.')
+ if exposure not in ['A', 'B', 'C', 'D']: # noqa: PLR6201
+ print('ComputeIntensityMeasure: the Exposure should be A, B, C, or D.') # noqa: T201
return -1
gust_duration = event_info['IntensityMeasure']['GustDuration']
reference_height = event_info['IntensityMeasure']['ReferenceHeight']
@@ -321,18 +397,20 @@ def convert_wind_speed(event_info, simu_res):
gust_duration_simu = cur_res['PWS']['duration']
# quick check the size
if pws_raw.shape[1] != len(measure_height):
- print('ComputeIntensityMeasure: please check the output wind speed results.')
+ print( # noqa: T201
+ 'ComputeIntensityMeasure: please check the output wind speed results.'
+ )
return -1
# ASCE 7-16 conversion (Chapter C26)
# station-wise empirical exponent \alpha
alpha = 5.65 * (z0_simu ** (-0.133))
# station-wise gradient height
- zg = 450.0 * (z0_simu ** 0.125)
+ zg = 450.0 * (z0_simu**0.125)
# target exposure alpha and graident height
- if (exposure == 'B'):
+ if exposure == 'B':
alpha_t = 7.0
zg_t = 365.76
- elif (exposure == 'D'):
+ elif exposure == 'D':
alpha_t = 11.5
zg_t = 213.36
else:
@@ -341,56 +419,67 @@ def convert_wind_speed(event_info, simu_res):
zg_t = 274.32
# conversion
pws_raw = interp_wind_by_height(pws_raw, measure_height, reference_height)
- print(np.max(pws_raw))
+ print(np.max(pws_raw)) # noqa: T201
# computing gradient-height wind speed
pws_tmp = pws_raw * (zg / reference_height) ** (1.0 / alpha)
# converting exposure
- pws_tmp = pws_tmp * (reference_height / zg_t) ** (1.0 / alpha_t)
+ pws_tmp = pws_tmp * (reference_height / zg_t) ** (1.0 / alpha_t) # noqa: PLR6104
pws = pws_tmp * gust_factor_ESDU(gust_duration_simu, gust_duration)
- print(np.max(pws))
+ print(np.max(pws)) # noqa: T201
# appending to pws_mr
pws_mr.append(pws)
- print('ComputeIntensityMeasure: wind speed conversion completed.')
+ print('ComputeIntensityMeasure: wind speed conversion completed.') # noqa: T201
# return
return pws_mr
def interp_wind_by_height(pws_ip, height_simu, height_ref):
- """
- interp_wind_by_height: interpolating the wind simulation results by the reference height
- """
+ """interp_wind_by_height: interpolating the wind simulation results by the reference height""" # noqa: D400
num_stat = pws_ip.shape[0]
pws_op = np.zeros(num_stat)
for i in range(num_stat):
- pws_op[i] = np.interp(height_ref, height_simu, pws_ip[i, :], left = pws_ip[i, 0], right = pws_ip[i, -1])
+ pws_op[i] = np.interp(
+ height_ref,
+ height_simu,
+ pws_ip[i, :],
+ left=pws_ip[i, 0],
+ right=pws_ip[i, -1],
+ )
# return
return pws_op
-def gust_factor_ESDU(gd_c, gd_t):
- """
- gust_factor_ESDU: return a gust facto between gd_c and gd_t
- """
+def gust_factor_ESDU(gd_c, gd_t): # noqa: N802
+ """gust_factor_ESDU: return a gust facto between gd_c and gd_t""" # noqa: D400
# gust duration (sec)
- gd = [1.0, 2.0, 5.0, 10.0, 20.0,
- 50.0, 100.0, 200.0, 500.0, 1000.0,
- 2000.0, 3600.0]
+ gd = [
+ 1.0,
+ 2.0,
+ 5.0,
+ 10.0,
+ 20.0,
+ 50.0,
+ 100.0,
+ 200.0,
+ 500.0,
+ 1000.0,
+ 2000.0,
+ 3600.0,
+ ]
# gust factor w.r.t. 3600 sec
- gf = [1.59, 1.55, 1.47, 1.40, 1.32,
- 1.20, 1.15, 1.10, 1.055, 1.045,
- 1.02, 1.00]
+ gf = [1.59, 1.55, 1.47, 1.40, 1.32, 1.20, 1.15, 1.10, 1.055, 1.045, 1.02, 1.00]
# interpolation
- gf_t = np.interp(gd_t, gd, gf, left = gf[0], right = gf[-1]) \
- / np.interp(gd_c, gd, gf, left = gf[0], right = gf[-1])
+ gf_t = np.interp(gd_t, gd, gf, left=gf[0], right=gf[-1]) / np.interp(
+ gd_c, gd, gf, left=gf[0], right=gf[-1]
+ )
# return
- return gf_t
-
+ return gf_t # noqa: RET504
-def export_pws(stations, pws, output_dir, filename = 'EventGrid.csv'):
- print('ComputeIntensityMeasure: saving results.')
+def export_pws(stations, pws, output_dir, filename='EventGrid.csv'): # noqa: D103
+ print('ComputeIntensityMeasure: saving results.') # noqa: T201
# collecting site locations
lat = []
@@ -401,25 +490,17 @@ def export_pws(stations, pws, output_dir, filename = 'EventGrid.csv'):
# saving data
station_num = len(lat)
- csv_file = [str(x + 1)+'.csv' for x in range(station_num)]
- d = {
- 'GP_file': csv_file,
- 'Latitude': lat,
- 'Longitude': lon
- }
- df = pd.DataFrame.from_dict(d)
- df.to_csv(os.path.join(output_dir, filename), index = False)
+ csv_file = [str(x + 1) + '.csv' for x in range(station_num)]
+ d = {'GP_file': csv_file, 'Latitude': lat, 'Longitude': lon}
+ df = pd.DataFrame.from_dict(d) # noqa: PD901
+ df.to_csv(os.path.join(output_dir, filename), index=False) # noqa: PTH118
for i in range(station_num):
pws_op = [pws[0][i]]
if len(pws) > 1:
for j in range(len(pws) - 1):
- pws_op.append(pws[j + 1][i])
- d = {
- 'PWS': pws_op
- }
- df = pd.DataFrame.from_dict(d)
- df.to_csv(os.path.join(output_dir, csv_file[i]), index = False)
-
- print('ComputeIntensityMeasure: simulated wind speed field saved.')
-
+ pws_op.append(pws[j + 1][i]) # noqa: PERF401
+ d = {'PWS': pws_op}
+ df = pd.DataFrame.from_dict(d) # noqa: PD901
+ df.to_csv(os.path.join(output_dir, csv_file[i]), index=False) # noqa: PTH118
+ print('ComputeIntensityMeasure: simulated wind speed field saved.') # noqa: T201
diff --git a/modules/performRegionalEventSimulation/regionalWindField/CreateScenario.py b/modules/performRegionalEventSimulation/regionalWindField/CreateScenario.py
index 2f5d6d31e..8abcf54dd 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/CreateScenario.py
+++ b/modules/performRegionalEventSimulation/regionalWindField/CreateScenario.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,15 +37,14 @@
# Kuanshi Zhong
#
-import os
-import subprocess
import json
-import random
+import os
+
import numpy as np
import pandas as pd
-def create_wind_scenarios(scenario_info, event_info, stations, data_dir):
+def create_wind_scenarios(scenario_info, event_info, stations, data_dir): # noqa: C901, D103
# Number of scenarios
source_num = scenario_info.get('Number', 1)
# Directly defining earthquake ruptures
@@ -58,70 +56,81 @@ def create_wind_scenarios(scenario_info, event_info, stations, data_dir):
lat.append(s['Latitude'])
lon.append(s['Longitude'])
# Station list
- station_list = {
- 'Latitude': lat,
- 'Longitude': lon
- }
+ station_list = {'Latitude': lat, 'Longitude': lon}
# Track data
try:
track_file = scenario_info['Storm'].get('Track')
- df = pd.read_csv(os.path.join(data_dir, track_file), header = None, index_col = None)
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(data_dir, track_file), # noqa: PTH118
+ header=None,
+ index_col=None,
+ )
track = {
- 'Latitude': df.iloc[:, 0].values.tolist(),
- 'Longitude': df.iloc[:, 1].values.tolist()
+ 'Latitude': df.iloc[:, 0].values.tolist(), # noqa: PD011
+ 'Longitude': df.iloc[:, 1].values.tolist(), # noqa: PD011
}
- except:
- print('CreateScenario: error - no storm track provided or file format not accepted.')
+ except: # noqa: E722
+ print( # noqa: T201
+ 'CreateScenario: error - no storm track provided or file format not accepted.'
+ )
# Save Lat_w.csv
track_simu_file = scenario_info['Storm'].get('TrackSimu', None)
- if track_simu_file:
- df = pd.read_csv(os.path.join(data_dir, track_simu_file), header = None, index_col = None)
- track_simu = df.iloc[:, 0].values.tolist()
+ if track_simu_file:
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(data_dir, track_simu_file), # noqa: PTH118
+ header=None,
+ index_col=None,
+ )
+ track_simu = df.iloc[:, 0].values.tolist() # noqa: PD011
else:
track_simu = track['Latitude']
# Reading Terrain info (if provided)
terrain_file = scenario_info.get('Terrain', None)
if terrain_file:
- with open(os.path.join(data_dir, terrain_file)) as f:
+ with open(os.path.join(data_dir, terrain_file)) as f: # noqa: PLW1514, PTH118, PTH123
terrain_data = json.load(f)
else:
terrain_data = []
# Parsing storm properties
param = []
try:
- param.append(scenario_info['Storm']['Landfall']['Latitude'])
+ param.append(scenario_info['Storm']['Landfall']['Latitude']) # noqa: FURB113
param.append(scenario_info['Storm']['Landfall']['Longitude'])
param.append(scenario_info['Storm']['Landfall']['LandingAngle'])
param.append(scenario_info['Storm']['Landfall']['Pressure'])
param.append(scenario_info['Storm']['Landfall']['Speed'])
param.append(scenario_info['Storm']['Landfall']['Radius'])
- except:
- print('CreateScenario: please provide all needed landfall properties.')
+ except: # noqa: E722
+ print('CreateScenario: please provide all needed landfall properties.') # noqa: T201
# Monte-Carlo
- #del_par = [0, 0, 0] # default
+ # del_par = [0, 0, 0] # default
# Parsing mesh configurations
- mesh_info = [1000., scenario_info['Mesh']['DivRad'], 1000000.]
- mesh_info.extend([0., scenario_info['Mesh']['DivDeg'], 360.])
+ mesh_info = [1000.0, scenario_info['Mesh']['DivRad'], 1000000.0]
+ mesh_info.extend([0.0, scenario_info['Mesh']['DivDeg'], 360.0])
# Wind speed measuring height
measure_height = event_info['IntensityMeasure']['MeasureHeight']
# Saving results
- scenario_data = dict()
+ scenario_data = dict() # noqa: C408
for i in range(source_num):
- scenario_data.update({i: {
- 'Type': 'Wind',
- 'CycloneParam': param,
- 'StormTrack': track,
- 'StormMesh': mesh_info,
- 'Terrain': terrain_data,
- 'TrackSimu': track_simu,
- 'StationList': station_list,
- 'MeasureHeight': measure_height
- }})
+ scenario_data.update(
+ {
+ i: {
+ 'Type': 'Wind',
+ 'CycloneParam': param,
+ 'StormTrack': track,
+ 'StormMesh': mesh_info,
+ 'Terrain': terrain_data,
+ 'TrackSimu': track_simu,
+ 'StationList': station_list,
+ 'MeasureHeight': measure_height,
+ }
+ }
+ )
# return
return scenario_data
# Using the properties of a historical storm to do simulation
- elif scenario_info['Generator'] == 'SimulationHist':
+ elif scenario_info['Generator'] == 'SimulationHist': # noqa: RET505
# Collecting site locations
lat = []
lon = []
@@ -129,59 +138,60 @@ def create_wind_scenarios(scenario_info, event_info, stations, data_dir):
lat.append(s['Latitude'])
lon.append(s['Longitude'])
# Station list
- station_list = {
- 'Latitude': lat,
- 'Longitude': lon
- }
+ station_list = {'Latitude': lat, 'Longitude': lon}
# Loading historical storm database
- df_hs = pd.read_csv(os.path.join(os.path.dirname(__file__),
- 'database/historical_storm/ibtracs.last3years.list.v04r00.csv'),
- header = [0,1], index_col = None)
+ df_hs = pd.read_csv(
+ os.path.join( # noqa: PTH118
+ os.path.dirname(__file__), # noqa: PTH120
+ 'database/historical_storm/ibtracs.last3years.list.v04r00.csv',
+ ),
+ header=[0, 1],
+ index_col=None,
+ )
# Storm name and year
try:
storm_name = scenario_info['Storm'].get('Name')
storm_year = scenario_info['Storm'].get('Year')
- except:
- print('CreateScenario: error - no storm name or year is provided.')
+ except: # noqa: E722
+ print('CreateScenario: error - no storm name or year is provided.') # noqa: T201
# Searching the storm
try:
df_chs = df_hs[df_hs[('NAME', ' ')] == storm_name]
df_chs = df_chs[df_chs[('SEASON', 'Year')] == storm_year]
- except:
- print('CreateScenario: error - the storm is not found.')
+ except: # noqa: E722
+ print('CreateScenario: error - the storm is not found.') # noqa: T201
if len(df_chs.values) == 0:
- print('CreateScenario: error - the storm is not found.')
+ print('CreateScenario: error - the storm is not found.') # noqa: T201
return 1
# Collecting storm properties
track_lat = []
track_lon = []
- for x in df_chs[('USA_LAT', 'degrees_north')].values.tolist():
+ for x in df_chs[('USA_LAT', 'degrees_north')].values.tolist(): # noqa: PD011
if x != ' ':
- track_lat.append(float(x))
- for x in df_chs[('USA_LON', 'degrees_east')].values.tolist():
+ track_lat.append(float(x)) # noqa: PERF401
+ for x in df_chs[('USA_LON', 'degrees_east')].values.tolist(): # noqa: PD011
if x != ' ':
- track_lon.append(float(x))
- # If the default option (USA_LAT and USA_LON) is not available, swithcing to LAT and LON
+ track_lon.append(float(x)) # noqa: PERF401
+ # If the default option (USA_LAT and USA_LON) is not available, switching to LAT and LON
if len(track_lat) == 0:
- print('CreateScenario: warning - the USA_LAT and USA_LON are not available, switching to LAT and LON.')
- for x in df_chs[('LAT', 'degrees_north')].values.tolist():
+ print( # noqa: T201
+ 'CreateScenario: warning - the USA_LAT and USA_LON are not available, switching to LAT and LON.'
+ )
+ for x in df_chs[('LAT', 'degrees_north')].values.tolist(): # noqa: PD011
if x != ' ':
- track_lat.append(float(x))
- for x in df_chs[('LON', 'degrees_east')].values.tolist():
+ track_lat.append(float(x)) # noqa: PERF401
+ for x in df_chs[('LON', 'degrees_east')].values.tolist(): # noqa: PD011
if x != ' ':
- track_lon.append(float(x))
+ track_lon.append(float(x)) # noqa: PERF401
if len(track_lat) == 0:
- print('CreateScenario: error - no track data is found.')
+ print('CreateScenario: error - no track data is found.') # noqa: T201
return 1
# Saving the track
- track = {
- 'Latitude': track_lat,
- 'Longitude': track_lon
- }
+ track = {'Latitude': track_lat, 'Longitude': track_lon}
# Reading Terrain info (if provided)
terrain_file = scenario_info.get('Terrain', None)
if terrain_file:
- with open(os.path.join(data_dir, terrain_file)) as f:
+ with open(os.path.join(data_dir, terrain_file)) as f: # noqa: PLW1514, PTH118, PTH123
terrain_data = json.load(f)
else:
terrain_data = []
@@ -189,57 +199,88 @@ def create_wind_scenarios(scenario_info, event_info, stations, data_dir):
dist2land = []
for x in df_chs[('DIST2LAND', 'km')]:
if x != ' ':
- dist2land.append(x)
+ dist2land.append(x) # noqa: PERF401
if len(track_lat) == 0:
- print('CreateScenario: error - no landing information is found.')
+ print('CreateScenario: error - no landing information is found.') # noqa: T201
return 1
- if (0 not in dist2land):
- print('CreateScenario: warning - no landing fall is found, using the closest location.')
+ if 0 not in dist2land:
+ print( # noqa: T201
+ 'CreateScenario: warning - no landing fall is found, using the closest location.'
+ )
tmploc = dist2land.index(min(dist2land))
else:
- tmploc = dist2land.index(0) # the first landing point in case the storm sway back and forth
+ tmploc = dist2land.index(
+ 0
+ ) # the first landing point in case the storm sway back and forth
# simulation track
track_simu_file = scenario_info['Storm'].get('TrackSimu', None)
- if track_simu_file:
+ if track_simu_file:
try:
- df = pd.read_csv(os.path.join(data_dir, track_simu_file), header = None, index_col = None)
- track_simu = df.iloc[:, 0].values.tolist()
- except:
- print('CreateScenario: warning - TrackSimu file not found, using the full track.')
+ df = pd.read_csv( # noqa: PD901
+ os.path.join(data_dir, track_simu_file), # noqa: PTH118
+ header=None,
+ index_col=None,
+ )
+ track_simu = df.iloc[:, 0].values.tolist() # noqa: PD011
+ except: # noqa: E722
+ print( # noqa: T201
+ 'CreateScenario: warning - TrackSimu file not found, using the full track.'
+ )
track_simu = track_lat
else:
- print('CreateScenario: warning - no truncation defined, using the full track.')
- #tmp = track_lat
- #track_simu = tmp[max(0, tmploc - 5): len(dist2land) - 1]
- #print(track_simu)
+ print( # noqa: T201
+ 'CreateScenario: warning - no truncation defined, using the full track.'
+ )
+ # tmp = track_lat
+ # track_simu = tmp[max(0, tmploc - 5): len(dist2land) - 1]
+ # print(track_simu)
track_simu = track_lat
# Reading data
try:
landfall_lat = float(df_chs[('USA_LAT', 'degrees_north')].iloc[tmploc])
landfall_lon = float(df_chs[('USA_LON', 'degrees_east')].iloc[tmploc])
- except:
- # If the default option (USA_LAT and USA_LON) is not available, swithcing to LAT and LON
+ except: # noqa: E722
+ # If the default option (USA_LAT and USA_LON) is not available, switching to LAT and LON
landfall_lat = float(df_chs[('LAT', 'degrees_north')].iloc[tmploc])
landfall_lon = float(df_chs[('LON', 'degrees_east')].iloc[tmploc])
try:
landfall_ang = float(df_chs[('STORM_DIR', 'degrees')].iloc[tmploc])
- except:
- print('CreateScenario: error - no landing angle is found.')
- if landfall_ang > 180.0:
- landfall_ang = landfall_ang - 360.0
- landfall_prs = 1013.0 - np.min([float(x) for x in df_chs[('USA_PRES', 'mb')].iloc[tmploc - 5: ].values.tolist() if x != ' '])
- landfall_spd = float(df_chs[('STORM_SPEED', 'kts')].iloc[tmploc]) * 0.51444 # convert knots/s to km/s
+ except: # noqa: E722
+ print('CreateScenario: error - no landing angle is found.') # noqa: T201
+ if landfall_ang > 180.0: # noqa: PLR2004
+ landfall_ang = landfall_ang - 360.0 # noqa: PLR6104
+ landfall_prs = (
+ 1013.0
+ - np.min(
+ [
+ float(x)
+ for x in df_chs[('USA_PRES', 'mb')] # noqa: PD011
+ .iloc[tmploc - 5 :]
+ .values.tolist()
+ if x != ' '
+ ]
+ )
+ )
+ landfall_spd = (
+ float(df_chs[('STORM_SPEED', 'kts')].iloc[tmploc]) * 0.51444
+ ) # convert knots/s to km/s
try:
- landfall_rad = float(df_chs[('USA_RMW', 'nmile')].iloc[tmploc]) * 1.60934 # convert nmile to km
- except:
+ landfall_rad = (
+ float(df_chs[('USA_RMW', 'nmile')].iloc[tmploc]) * 1.60934
+ ) # convert nmile to km
+ except: # noqa: E722
# No available radius of maximum wind is found
- print('CreateScenario: warning - swithcing to REUNION_RMW.')
+ print('CreateScenario: warning - switching to REUNION_RMW.') # noqa: T201
try:
- # If the default option (USA_RMW) is not available, swithcing to REUNION_RMW
- landfall_rad = float(df_chs[('REUNION_RMW', 'nmile')].iloc[tmploc]) * 1.60934 # convert nmile to km
- except:
+ # If the default option (USA_RMW) is not available, switching to REUNION_RMW
+ landfall_rad = (
+ float(df_chs[('REUNION_RMW', 'nmile')].iloc[tmploc]) * 1.60934
+ ) # convert nmile to km
+ except: # noqa: E722
# No available radius of maximum wind is found
- print('CreateScenario: warning - no available radius of maximum wind is found, using a default 50 km.')
+ print( # noqa: T201
+ 'CreateScenario: warning - no available radius of maximum wind is found, using a default 50 km.'
+ )
landfall_rad = 50
param = []
param.append(landfall_lat)
@@ -249,27 +290,31 @@ def create_wind_scenarios(scenario_info, event_info, stations, data_dir):
param.append(landfall_spd)
param.append(landfall_rad)
# Monte-Carlo
- #del_par = [0, 0, 0] # default
+ # del_par = [0, 0, 0] # default
# Parsing mesh configurations
- mesh_info = [1000., scenario_info['Mesh']['DivRad'], 1000000.]
- mesh_info.extend([0., scenario_info['Mesh']['DivDeg'], 360.])
+ mesh_info = [1000.0, scenario_info['Mesh']['DivRad'], 1000000.0]
+ mesh_info.extend([0.0, scenario_info['Mesh']['DivDeg'], 360.0])
# Wind speed measuring height
measure_height = event_info['IntensityMeasure']['MeasureHeight']
# Saving results
- scenario_data = dict()
+ scenario_data = dict() # noqa: C408
for i in range(source_num):
- scenario_data.update({i: {
- 'Type': 'Wind',
- 'CycloneParam': param,
- 'StormTrack': track,
- 'StormMesh': mesh_info,
- 'Terrain': terrain_data,
- 'TrackSimu': track_simu,
- 'StationList': station_list,
- 'MeasureHeight': measure_height
- }})
+ scenario_data.update(
+ {
+ i: {
+ 'Type': 'Wind',
+ 'CycloneParam': param,
+ 'StormTrack': track,
+ 'StormMesh': mesh_info,
+ 'Terrain': terrain_data,
+ 'TrackSimu': track_simu,
+ 'StationList': station_list,
+ 'MeasureHeight': measure_height,
+ }
+ }
+ )
# return
return scenario_data
-
+
else:
- print('CreateScenario: currently only supporting Simulation generator.')
+ print('CreateScenario: currently only supporting Simulation generator.') # noqa: T201, RET503
diff --git a/modules/performRegionalEventSimulation/regionalWindField/CreateStation.py b/modules/performRegionalEventSimulation/regionalWindField/CreateStation.py
index 55d6bea52..a03a64c08 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/CreateStation.py
+++ b/modules/performRegionalEventSimulation/regionalWindField/CreateStation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,67 +38,63 @@
#
import json
+
import numpy as np
import pandas as pd
-def get_label(options, labels, label_name):
+def get_label(options, labels, label_name): # noqa: D103
+ for option in options:
+ if option in labels:
+ labels = labels[labels != option]
+ return option, labels
- for option in options:
- if option in labels:
- labels = labels[labels != option]
- return option, labels
-
- print(f'WARNING: Could not identify the label for the {label_name}')
+ print(f'WARNING: Could not identify the label for the {label_name}') # noqa: T201, RET503
def create_stations(input_file, output_file, min_id, max_id):
- """
- Reading input csv file for stations and saving data to output json file
+ """Reading input csv file for stations and saving data to output json file
Input:
input_file: the filename of the station csv file
output_file: the filename of the output json file
min_id: the min ID to start
max_id: the max ID to end
Output:
- run_tag: 0 - success, 1 - input failure, 2 - outupt failure
- """
- # Reading csv data
- run_tag = 1
- try:
- stn_df = pd.read_csv(input_file, header=0, index_col=0)
- except:
- run_tag = 0
- return run_tag
- # Max and Min IDs
- stn_ids_min = np.min(stn_df.index.values)
- stn_ids_max = np.max(stn_df.index.values)
- if min_id is None:
- min_id = stn_ids_min
- if max_id is None:
- max_id = stn_ids_max
- min_id = np.max([stn_ids_min, min_id])
- max_id = np.min([stn_ids_max, max_id])
- selected_stn = stn_df.loc[min_id:max_id, :]
- # Extracting data
- labels = selected_stn.columns.values
- lon_label, labels = get_label(['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude')
- lat_label, labels = get_label(['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude')
- stn_file = {
- 'Stations': []
- }
- for stn_id, stn in selected_stn.iterrows():
- # Collecting station data
- tmp = {
- 'ID': stn_id,
- 'Longitude': stn[lon_label],
- 'Latitude': stn[lat_label]
- }
- stn_file['Stations'].append(tmp)
- # Saving data to the output file
- if output_file:
- with open(output_file, 'w') as f:
- json.dump(stn_file, f, indent=2)
- # Returning the final run state
- return stn_file
-
+ run_tag: 0 - success, 1 - input failure, 2 - output failure
+ """ # noqa: D205, D400, D401
+ # Reading csv data
+ run_tag = 1
+ try:
+ stn_df = pd.read_csv(input_file, header=0, index_col=0)
+ except: # noqa: E722
+ run_tag = 0
+ return run_tag # noqa: RET504
+ # Max and Min IDs
+ stn_ids_min = np.min(stn_df.index.values)
+ stn_ids_max = np.max(stn_df.index.values)
+ if min_id is None:
+ min_id = stn_ids_min
+ if max_id is None:
+ max_id = stn_ids_max
+ min_id = np.max([stn_ids_min, min_id])
+ max_id = np.min([stn_ids_max, max_id])
+ selected_stn = stn_df.loc[min_id:max_id, :]
+ # Extracting data
+ labels = selected_stn.columns.values # noqa: PD011
+ lon_label, labels = get_label(
+ ['Longitude', 'longitude', 'lon', 'Lon'], labels, 'longitude'
+ )
+ lat_label, labels = get_label(
+ ['Latitude', 'latitude', 'lat', 'Lat'], labels, 'latitude'
+ )
+ stn_file = {'Stations': []}
+ for stn_id, stn in selected_stn.iterrows():
+ # Collecting station data
+ tmp = {'ID': stn_id, 'Longitude': stn[lon_label], 'Latitude': stn[lat_label]}
+ stn_file['Stations'].append(tmp)
+ # Saving data to the output file
+ if output_file:
+ with open(output_file, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(stn_file, f, indent=2)
+ # Returning the final run state
+ return stn_file
diff --git a/modules/performRegionalEventSimulation/regionalWindField/HurricaneSimulation.py b/modules/performRegionalEventSimulation/regionalWindField/HurricaneSimulation.py
index b71c1143d..8f68aba05 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/HurricaneSimulation.py
+++ b/modules/performRegionalEventSimulation/regionalWindField/HurricaneSimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2021 Leland Stanford Junior University
# Copyright (c) 2021 The Regents of the University of California
#
@@ -38,28 +37,29 @@
# Kuanshi Zhong
#
+import argparse
+import json
+import logging
import os
import sys
-import argparse, posixpath, json
-import numpy as np
-import pandas as pd
-import logging
-from CreateStation import *
-from CreateScenario import *
-from ComputeIntensityMeasure import *
-if __name__ == '__main__':
+from ComputeIntensityMeasure import * # noqa: F403
+from CreateScenario import * # noqa: F403
+from CreateStation import * # noqa: F403
+if __name__ == '__main__':
logger = logging.getLogger()
- handlerStream = logging.StreamHandler(sys.stdout)
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ handlerStream = logging.StreamHandler(sys.stdout) # noqa: N816
+ formatter = logging.Formatter(
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ )
handlerStream.setFormatter(formatter)
logger.addHandler(handlerStream)
parser = argparse.ArgumentParser()
parser.add_argument('--hazard_config')
args = parser.parse_args()
- with open(args.hazard_config) as f:
+ with open(args.hazard_config) as f: # noqa: PLW1514, PTH123
hazard_info = json.load(f)
# Directory
@@ -68,58 +68,74 @@
input_dir = dir_info['Input']
output_dir = dir_info['Output']
try:
- os.mkdir(f"{output_dir}")
- except:
- print('HurricaneSimulation: output folder already exists.')
+ os.mkdir(f'{output_dir}') # noqa: PTH102
+ except: # noqa: E722
+ print('HurricaneSimulation: output folder already exists.') # noqa: T201
# Sites and stations
- print('HurricaneSimulation: creating stations.')
+ print('HurricaneSimulation: creating stations.') # noqa: T201
site_info = hazard_info['Site']
if site_info['Type'] == 'From_CSV':
- input_file = os.path.join(input_dir,site_info['input_file'])
- output_file = site_info.get('output_file',False)
+ input_file = os.path.join(input_dir, site_info['input_file']) # noqa: PTH118
+ output_file = site_info.get('output_file', False)
if output_file:
- output_file = os.path.join(output_dir, output_file)
- min_ID = site_info['min_ID']
- max_ID = site_info['max_ID']
+ output_file = os.path.join(output_dir, output_file) # noqa: PTH118
+ min_ID = site_info['min_ID'] # noqa: N816
+ max_ID = site_info['max_ID'] # noqa: N816
# Creating stations from the csv input file
- stations = create_stations(input_file, output_file, min_ID, max_ID)
+ stations = create_stations(input_file, output_file, min_ID, max_ID) # noqa: F405
if stations:
- print('HurricaneSimulation: stations created.')
+ print('HurricaneSimulation: stations created.') # noqa: T201
else:
- print('HurricaneSimulation: please check the "Input" directory in the configuration json file.')
- exit()
+ print( # noqa: T201
+ 'HurricaneSimulation: please check the "Input" directory in the configuration json file.'
+ )
+ exit() # noqa: PLR1722
# Scenarios
- print('HurricaneSimulation: creating scenarios.')
+ print('HurricaneSimulation: creating scenarios.') # noqa: T201
scenario_info = hazard_info['Scenario']
if scenario_info['Type'] == 'Wind':
# Creating wind scenarios
event_info = hazard_info['Event']
- scenarios = create_wind_scenarios(scenario_info, event_info, stations, input_dir)
+ scenarios = create_wind_scenarios( # noqa: F405
+ scenario_info, event_info, stations, input_dir
+ )
else:
- print('HurricaneSimulation: currently only supports wind simulations.')
- print('HurricaneSimulation: scenarios created.')
+ print('HurricaneSimulation: currently only supports wind simulations.') # noqa: T201
+ print('HurricaneSimulation: scenarios created.') # noqa: T201
# Computing intensity measures
- print('HurricaneSimulation: computing intensity measures.')
+ print('HurricaneSimulation: computing intensity measures.') # noqa: T201
if scenario_info['Type'] == 'Wind':
if 'Simulation' in scenario_info['Generator']:
if scenario_info['ModelType'] == 'LinearAnalyticalPy':
# simulating storm
- storm_simu = simulate_storm(scenarios, event_info, 'LinearAnalytical')
+ storm_simu = simulate_storm( # noqa: F405
+ scenarios, event_info, 'LinearAnalytical'
+ )
elif scenario_info['ModelType'] == 'LinearAnalytical':
# simulation storm (c++ binary)
- storm_simu = simulate_storm_cpp(site_info, scenario_info, scenarios, event_info, 'LinearAnalytical', dir_info)
+ storm_simu = simulate_storm_cpp( # noqa: F405
+ site_info,
+ scenario_info,
+ scenarios,
+ event_info,
+ 'LinearAnalytical',
+ dir_info,
+ )
else:
- print('HurricaneSimulation: currently supporting LinearAnalytical model type.')
+ print( # noqa: T201
+ 'HurricaneSimulation: currently supporting LinearAnalytical model type.'
+ )
# converting peak wind speed
- pws = convert_wind_speed(event_info, storm_simu)
+ pws = convert_wind_speed(event_info, storm_simu) # noqa: F405
# saving results
- export_pws(stations, pws, output_dir, filename = 'EventGrid.csv')
+ export_pws(stations, pws, output_dir, filename='EventGrid.csv') # noqa: F405
else:
- print('HurricaneSimulation: currently only supporting wind simulations.')
+ print('HurricaneSimulation: currently only supporting wind simulations.') # noqa: T201
else:
- print('HurricaneSimulation currently only supports earthquake and wind simulations.')
- print('HurricaneSimulation: intensity measures computed.')
-
+ print( # noqa: T201
+ 'HurricaneSimulation currently only supports earthquake and wind simulations.'
+ )
+ print('HurricaneSimulation: intensity measures computed.') # noqa: T201
diff --git a/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.cpp b/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.cpp
index 4aa483181..533c494b2 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.cpp
+++ b/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.cpp
@@ -205,7 +205,7 @@ int WindFieldModel::ConfigSimu(std::string config_file, std::string stn_file,
{
this->Lat_w(i) = tmp_latw[i];
}
- // Calculate longitude of refence values (Long_w)
+ // Calculate longitude of reference values (Long_w)
// This is done by interpolating
int value;
for (int ii = 0; ii < num_latw; ii++)
@@ -247,7 +247,7 @@ int WindFieldModel::PertubPath(std::string dpath_file)
std::cout << "WinFieldModel: error - can't read the perturbation json file" << std::endl;
}
- // \delta Anlge
+ // \delta Angle
this->del_par(2) = json_number_value(json_object_get(dpath_doc, "dAngle"));
// \delta Latitude
this->del_par(0) = json_number_value(json_object_get(dpath_doc, "dLatitude"));
@@ -275,7 +275,7 @@ int WindFieldModel::DefineTern(std::string refz0_file)
json_t *feat = json_object_get(z0_doc, "features");
this->num_region = int(json_array_size(feat));
- // initialzing z0
+ // initializing z0
this->z0r = Eigen::ArrayXd::Zero(this->num_region);
this->Wr_sizes = Eigen::ArrayXd::Zero(this->num_region);
// getting z0 values and sizes
@@ -720,4 +720,4 @@ int main(int argc, char *argv[])
a.ComputeStationZ0(z0_dir);
a.SimulateWind(pws_dir);
return 0;
-}
\ No newline at end of file
+}
diff --git a/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.h b/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.h
index 908d08cd7..8a4cede10 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.h
+++ b/modules/performRegionalEventSimulation/regionalWindField/WindFieldModel.h
@@ -28,12 +28,12 @@ class WindFieldModel
// delta_p file
// delta_p(0): inner radius of the meshed storm cycle
// delta_p(1): division size along radius
- // delta_p(2): outter radius of the meshed storm cycle
+ // delta_p(2): outer radius of the meshed storm cycle
// delta_p(3): starting angle of the meshed storm cycle
// delta_p(4): angle interval
// delta_p(5): ending angle of the meshed storm cycle
// delta_p(6): wind speed evaluation height (bottom)
- // delta_p(7): heigth interval
+ // delta_p(7): height interval
// delta_p(8): wind speed evaluation height (top)
Eigen::ArrayXd delta_p;
@@ -89,4 +89,4 @@ class WindFieldModel
};
-#endif
\ No newline at end of file
+#endif
diff --git a/modules/performRegionalEventSimulation/regionalWindField/WindFieldSimulation.py b/modules/performRegionalEventSimulation/regionalWindField/WindFieldSimulation.py
index 156703792..d74a470a2 100644
--- a/modules/performRegionalEventSimulation/regionalWindField/WindFieldSimulation.py
+++ b/modules/performRegionalEventSimulation/regionalWindField/WindFieldSimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -40,24 +39,25 @@
# Frank Mckenna
# Sanjay Govindjee
#
-# Special thanks to the original authors Snaiki and Wu for
+# Special thanks to the original authors Snaiki and Wu for
# sharing the Matlab scripts
# Reference:
# 1. Snaiki, R. and Wu, T. (2017). Modeling tropical cyclone boundary layer: Height-
-# resolving pressure and wind fields. Journal of Wind Engineering and Industrial
+# resolving pressure and wind fields. Journal of Wind Engineering and Industrial
# Aerodynamics, 170, pp. 18-27.
-# 2. Snaiki, R. and Wu, T. (2017). A linear height-resolving wind field model for
-# tropical cyclone boundary layer. Journal of Wind Engineering and Industrial
+# 2. Snaiki, R. and Wu, T. (2017). A linear height-resolving wind field model for
+# tropical cyclone boundary layer. Journal of Wind Engineering and Industrial
# Aerodynamics, 171, pp. 248-260.
+from itertools import starmap
+
import numpy as np
from shapely.geometry import Point, Polygon
-class LinearAnalyticalModel_SnaikiWu_2017:
- def __init__(self, cyclone_param = [], storm_track = []):
- """
- __init__: initializing the tropical cyclone
+class LinearAnalyticalModel_SnaikiWu_2017: # noqa: D101
+ def __init__(self, cyclone_param=[], storm_track=[]): # noqa: B006
+ """__init__: initializing the tropical cyclone
cyclone_param: 6-dimensional array
- cyclone_param[0]: landfall Latitude
- cyclone_param[1]: landfall Longitude
@@ -65,10 +65,10 @@ def __init__(self, cyclone_param = [], storm_track = []):
- cyclone_param[3]: central pressure different (hPa)
- cyclone_param[4]: moving speed (km/h)
- cyclone_param[5]: cyclone radius of the maximum winnds (km)
- storm_track:
+ storm_track:
- storm_track['Latitude']: latitude values of the storm track
- storm_track['Longittude']: longitude values of the storm track
- """
+ """ # noqa: D205, D400
# constants
self.R = 6371.0 * 1e3
self.EDDY_VISCOCITY = 75.0
@@ -85,20 +85,30 @@ def __init__(self, cyclone_param = [], storm_track = []):
self.cyclone_sped = cyclone_param[4] * 1000.0 / 3600.0
self.cyclone_radi = cyclone_param[5]
self.cyclone_radm = self.cyclone_radi * 1000.0
- self.Holland_B = 1.38 + 0.00184 * self.cyclone_pres / 100.0 - 0.00309 * self.cyclone_radi
- except:
- print('WindFieldSimulaiton: please check the cyclone_param input.')
+ self.Holland_B = (
+ 1.38
+ + 0.00184 * self.cyclone_pres / 100.0
+ - 0.00309 * self.cyclone_radi
+ )
+ except: # noqa: E722
+ print('WindFieldSimulaiton: please check the cyclone_param input.') # noqa: T201
# saving storm track data
try:
self.track_lat = storm_track['Latitude']
self.track_lon = storm_track['Longitude']
- if (len(self.track_lat) != len(self.track_lon)):
- print('WindFieldSimulation: warning - storm track Latitude and Longitude sizes are different, data truncated.')
- self.track_lat = self.track_lat[0:int(min(len(self.track_lat), len(self.track_lon)))]
- self.track_lon = self.track_lon[0:int(min(len(self.track_lat), len(self.track_lon)))]
- except:
- print('WindFieldSimulaiton: please check the strom_track input.')
+ if len(self.track_lat) != len(self.track_lon):
+ print( # noqa: T201
+ 'WindFieldSimulation: warning - storm track Latitude and Longitude sizes are different, data truncated.'
+ )
+ self.track_lat = self.track_lat[
+ 0 : int(min(len(self.track_lat), len(self.track_lon)))
+ ]
+ self.track_lon = self.track_lon[
+ 0 : int(min(len(self.track_lat), len(self.track_lon)))
+ ]
+ except: # noqa: E722
+ print('WindFieldSimulaiton: please check the strom_track input.') # noqa: T201
# initiation
self.station_num = 0
@@ -106,11 +116,7 @@ def __init__(self, cyclone_param = [], storm_track = []):
'Latitude': [],
'Longitude': [],
'z0': [],
- 'PWS': {
- 'height': [],
- 'duration': 600.0,
- 'windspeed': []
- }
+ 'PWS': {'height': [], 'duration': 600.0, 'windspeed': []},
}
self.terrain_num = 0
self.terrain_poly = []
@@ -121,101 +127,102 @@ def __init__(self, cyclone_param = [], storm_track = []):
self.zp = []
self.mesh_info = []
-
def set_delta_path(self, delta_path):
- """
- set_delta_path: perturbing the path coordiates and heading angle of the storm track
- """
- if (len(delta_path) == 3):
+ """set_delta_path: perturbing the path coordinates and heading angle of the storm track""" # noqa: D400
+ if len(delta_path) == 3: # noqa: PLR2004
self.delta_path = delta_path
else:
- print('WindFieldSimulation: the delta_path should have a size of 3, default delta_path used.')
+ print( # noqa: T201
+ 'WindFieldSimulation: the delta_path should have a size of 3, default delta_path used.'
+ )
-
def set_delta_feat(self, delta_feat):
- """
- set_delta_feat: perturbing the central pressure difference, traslational speed, and max-wind-speed radius
- """
- if (len(delta_feat) == 3):
+ """set_delta_feat: perturbing the central pressure difference, traslational speed, and max-wind-speed radius""" # noqa: D400
+ if len(delta_feat) == 3: # noqa: PLR2004
self.cyclone_pres = delta_feat[0] * 100.0
self.cyclone_sped = delta_feat[1] * 1000.0 / 3600.0
self.cyclone_radi = delta_feat[2]
self.cyclone_radm = self.cyclone_radi * 1000.0
- self.Holland_B = 1.38 + 0.00184 * self.cyclone_pres / 100.0 - 0.00309 * self.cyclone_radi
+ self.Holland_B = (
+ 1.38
+ + 0.00184 * self.cyclone_pres / 100.0
+ - 0.00309 * self.cyclone_radi
+ )
else:
- print('WindFieldSimulation: the delta_feat should have a size of 3, default delta_feat used.')
+ print( # noqa: T201
+ 'WindFieldSimulation: the delta_feat should have a size of 3, default delta_feat used.'
+ )
-
def __interp_z0(self, lat, lon):
- """
- __interp_z0: finding the z0 at (lat, lon) by interpolating reference terrain polygons
- """
+ """__interp_z0: finding the z0 at (lat, lon) by interpolating reference terrain polygons""" # noqa: D400
z0 = []
- if (not self.terrain_z0):
+ if not self.terrain_z0:
# no reference terrain provided, using default reference z0 = 0.03
z0 = 0.03
else:
- #pt = Point(lat, lon)
+ # pt = Point(lat, lon)
pt = Point(lon, lat)
for p, z in zip(self.terrain_poly, self.terrain_z0):
if pt.within(p):
z0 = z
- if (not z0):
+ if not z0:
z0 = 0.01
# return
return z0
-
def add_reference_terrain(self, terrain_info):
- """
- add_reference_terrainL specifying reference z0 values for a set of polygons
+ """add_reference_terrainL specifying reference z0 values for a set of polygons
terrain_info: geojson formatted polygon and z0 data
- """
+ """ # noqa: D205, D400
for p in terrain_info['features']:
- if (p['geometry']['type'] == 'Polygon'):
+ if p['geometry']['type'] == 'Polygon':
# creating a new polygon
new_poly = Polygon(p['geometry']['coordinates'][0])
self.terrain_poly.append(new_poly)
self.terrain_z0.append(p['properties']['z0'])
self.terrain_num += 1
-
def set_cyclone_mesh(self, mesh_info):
- """
- set_cyclone_meesh: meshing the cyclone in radius and cycle
- mesh_info[0]: interal R
+ """set_cyclone_meesh: meshing the cyclone in radius and cycle
+ mesh_info[0]: interval R
mesh_info[1]: interval delta_R
mesh_info[2]: external R
mesh_info[3]: starting angle (usually 0)
mesh_info[4]: interval angle
mesh_info[5]: ending angle (usually 360)
- """
+ """ # noqa: D205, D400
try:
self.mesh_info = mesh_info
- self.r = np.arange(mesh_info[0], mesh_info[2] + mesh_info[1], mesh_info[1])
- self.theta = np.arange(mesh_info[3], mesh_info[5] + mesh_info[4], mesh_info[4])
- print('WindFieldSimulation: cyclone meshed.')
- except:
- print('WindFieldSimulation: input format error in set_cyclone_mesh.')
+ self.r = np.arange(
+ mesh_info[0], mesh_info[2] + mesh_info[1], mesh_info[1]
+ )
+ self.theta = np.arange(
+ mesh_info[3], mesh_info[5] + mesh_info[4], mesh_info[4]
+ )
+ print('WindFieldSimulation: cyclone meshed.') # noqa: T201
+ except: # noqa: E722
+ print('WindFieldSimulation: input format error in set_cyclone_mesh.') # noqa: T201
-
def set_track_mesh(self, mesh_lat):
- """
- set_track_meesh: meshing the storm track
+ """set_track_meesh: meshing the storm track
mesh_lat[0]: starting latitude value of the meshed track
mesh_lat[1]: interval latitude value
mesh_lat[2]: ending latitude value of the meshed track
- """
+ """ # noqa: D205, D400
try:
lat0 = mesh_lat[0]
dlat = mesh_lat[1]
lat1 = mesh_lat[2]
- except:
- print('WindFieldSimulation: input format error in set_track_mesh.')
+ except: # noqa: E722
+ print('WindFieldSimulation: input format error in set_track_mesh.') # noqa: T201
# boundary checks
- if (max(lat0, lat1) > max(self.track_lat)) or (min(lat0, lat1) < min(self.track_lat)):
- print('WindFieldSimulation: warning - forcing the track mesh consistent with the original track boundary.')
+ if (max(lat0, lat1) > max(self.track_lat)) or (
+ min(lat0, lat1) < min(self.track_lat)
+ ):
+ print( # noqa: T201
+ 'WindFieldSimulation: warning - forcing the track mesh consistent with the original track boundary.'
+ )
lat0 = min(lat0, max(self.track_lat))
lat1 = min(lat1, max(self.track_lat))
lat0 = max(lat0, min(self.track_lat))
@@ -223,83 +230,87 @@ def set_track_mesh(self, mesh_lat):
# computing meshed track's Latitude and Longitude values
self.track_lat_m = np.arange(lat0, lat1, dlat).tolist()
- self.track_lon_m = np.abs(np.interp(self.track_lat_m, self.track_lat, self.track_lon))
- print('WindFieldSimulation: track meshed.')
+ self.track_lon_m = np.abs(
+ np.interp(self.track_lat_m, self.track_lat, self.track_lon)
+ )
+ print('WindFieldSimulation: track meshed.') # noqa: T201
-
def define_track(self, track_lat):
- """
- set_track_meesh: meshing the storm track
+ """set_track_meesh: meshing the storm track
mesh_lat[0]: starting latitude value of the meshed track
mesh_lat[1]: interval latitude value
mesh_lat[2]: ending latitude value of the meshed track
- """
-
+ """ # noqa: D205, D400
# computing meshed track's Latitude and Longitude values
self.track_lat_m = track_lat
- self.track_lon_m = np.abs(np.interp(self.track_lat_m, self.track_lat, self.track_lon))
- print('WindFieldSimulation: track defined.')
-
+ self.track_lon_m = np.abs(
+ np.interp(self.track_lat_m, self.track_lat, self.track_lon)
+ )
+ print('WindFieldSimulation: track defined.') # noqa: T201
def set_measure_height(self, measure_info):
- """
- set_measure_height: defining the height for calculating wind speed
- """
+ """set_measure_height: defining the height for calculating wind speed""" # noqa: D400
try:
- self.zp = np.arange(measure_info[0], measure_info[2] + measure_info[1], measure_info[1]).tolist()
- print('WindFieldSimulation: measurement height defined.')
- except:
- print('WindFieldSimulation: input format error in set_measure_height.')
-
+ self.zp = np.arange(
+ measure_info[0], measure_info[2] + measure_info[1], measure_info[1]
+ ).tolist()
+ print('WindFieldSimulation: measurement height defined.') # noqa: T201
+ except: # noqa: E722
+ print('WindFieldSimulation: input format error in set_measure_height.') # noqa: T201
def add_stations(self, station_list):
- """
- add_stations: adding stations to the model
+ """add_stations: adding stations to the model
station_list:
- station_list['Latitude']: latitude values of stations
- station_list['Longitude']: longitude values of stations
- station_list['z0']: surface roughness (optional)
- """
+ """ # noqa: D205, D400
# z0 default
- if ('z0' not in station_list.keys()):
+ if 'z0' not in station_list.keys(): # noqa: SIM118
# default value = 0 (no specified z0)
station_list['z0'] = np.zeros(len(station_list['Latitude']))
# adding stations (without duplication)
- for lat, lon, z0 in zip(station_list['Latitude'], station_list['Longitude'], station_list['z0']):
+ for lat, lon, z0 in zip(
+ station_list['Latitude'], station_list['Longitude'], station_list['z0']
+ ):
self.station['Latitude'].append(lat)
self.station['Longitude'].append(lon)
- if (z0 == 0):
+ if z0 == 0:
# interpolating z0 from terrain feature
self.station['z0'].append(self.__interp_z0(lat, lon))
else:
self.station['z0'].append(z0)
# updating station number
self.station_num += 1
-
def __calculate_heading(self):
- """
- __calculate_heading: computing the heading path
- """
+ """__calculate_heading: computing the heading path""" # noqa: D400
self.beta_c = np.zeros(len(self.track_lat_m))
for i in range(len(self.track_lat_m) - 1):
- Delta = self.track_lon_m[i + 1] - self.track_lon_m[i] + self.EPS ** 2
- self.beta_c[i] = -self.delta_path[2] + 90.0 + self.RA * np.arctan2(np.sin(Delta / self.RA) \
- * np.cos(self.track_lat_m[i + 1] / self.RA), np.cos(self.track_lat_m[i] / self.RA) \
- * np.sin(self.track_lat_m[i + 1] / self.RA) - np.sin(self.track_lat_m[i] / self.RA) \
- * np.cos(self.track_lat_m[i + 1] / self.RA) * np.cos(Delta / self.RA))
+ Delta = self.track_lon_m[i + 1] - self.track_lon_m[i] + self.EPS**2 # noqa: N806
+ self.beta_c[i] = (
+ -self.delta_path[2]
+ + 90.0
+ + self.RA
+ * np.arctan2(
+ np.sin(Delta / self.RA)
+ * np.cos(self.track_lat_m[i + 1] / self.RA),
+ np.cos(self.track_lat_m[i] / self.RA)
+ * np.sin(self.track_lat_m[i + 1] / self.RA)
+ - np.sin(self.track_lat_m[i] / self.RA)
+ * np.cos(self.track_lat_m[i + 1] / self.RA)
+ * np.cos(Delta / self.RA),
+ )
+ )
# positive angle values for beta_c
self.beta_c = [x if x >= 0 else x + 360.0 for x in self.beta_c]
# fixing the last value
self.beta_c[-1] = self.beta_c[-2]
-
- def compute_wind_field(self):
- """
- compute_wind_field: computing the peak wind speed (10-min gust duraiton)
- """
- print('WindFieldSimulation: running linear analytical model.')
+ def compute_wind_field(self): # noqa: PLR0914
+ """compute_wind_field: computing the peak wind speed (10-min gust duraiton)""" # noqa: D400
+ print('WindFieldSimulation: running linear analytical model.') # noqa: T201
# checking if all parameters are defined
# calculating heading
@@ -317,118 +328,271 @@ def compute_wind_field(self):
for i in range(len(self.track_lat_m)):
# location and heading
lat = self.track_lat_m[i] + self.delta_path[0]
- lon = self.track_lon_m[i] -0.3 * self.delta_path[1]
+ lon = self.track_lon_m[i] - 0.3 * self.delta_path[1]
beta = self.beta_c[i]
# coriolis
omega = 0.7292 * 1e-4
f = 2.0 * omega * np.sin(lat * np.pi / 180.0)
# looping over different polar coordinates theta
for j in range(len(self.theta)):
- Ctheta = -self.cyclone_sped * np.sin((self.theta[j] - beta) / self.RA)
- if (self.theta[j] >= 0) and (self.theta[j] <= 90):
- THETA = 90.0 - self.theta[j]
+ Ctheta = -self.cyclone_sped * np.sin( # noqa: N806
+ (self.theta[j] - beta) / self.RA
+ )
+ if (self.theta[j] >= 0) and (self.theta[j] <= 90): # noqa: PLR2004
+ THETA = 90.0 - self.theta[j] # noqa: N806
else:
- THETA = 450 - self.theta[j]
-
- lat_t = self.RA * np.arcsin(np.sin(lat / self.RA) * np.cos(self.r / self.R) \
- + np.cos(lat / self.RA) * np.sin(self.r / self.R) * np.cos(THETA / self.RA))
- lon_t = lon + self.RA * np.arctan2(np.sin(THETA / self.RA) * np.sin(self.r / self.R) \
- * np.cos(lat / self.RA), np.cos(self.r / self.R) - np.sin(lat / self.RA) * np.sin(lat_t))
+ THETA = 450 - self.theta[j] # noqa: N806
+
+ lat_t = self.RA * np.arcsin(
+ np.sin(lat / self.RA) * np.cos(self.r / self.R)
+ + np.cos(lat / self.RA)
+ * np.sin(self.r / self.R)
+ * np.cos(THETA / self.RA)
+ )
+ lon_t = lon + self.RA * np.arctan2(
+ np.sin(THETA / self.RA)
+ * np.sin(self.r / self.R)
+ * np.cos(lat / self.RA),
+ np.cos(self.r / self.R) - np.sin(lat / self.RA) * np.sin(lat_t),
+ )
# looping over different polar coordinates r
for k in range(len(self.r)):
z0[k] = self.__interp_z0(lat_t[k], lon_t[k])
# configuring coefficients
z10 = 10.0
- A = 11.4
- h = A * z0 ** 0.86
+ A = 11.4 # noqa: N806
+ h = A * z0**0.86
d = 0.75 * h
kappa = 0.40
- Cd = kappa ** 2 / (np.log((z10 + h - d) / z0)) ** 2
- #
- der_p = self.Holland_B * self.cyclone_radm ** self.Holland_B * self.cyclone_pres * (self.r ** (-self.Holland_B - 1)) \
- * np.exp(-(self.cyclone_radm * self.r ** (-1.0)) ** self.Holland_B)
- der_p_2 = (-(self.Holland_B + 1) * (self.r ** (-1.0)) + self.Holland_B * self.cyclone_radm ** self.Holland_B \
- * (self.r ** (-self.Holland_B - 1))) * der_p
- #
- vg1[j, :] = 0.5 * (Ctheta - f * self.r) + ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** 0.5
- der_vg1_r = -0.5 * f + 0.5 * ((((Ctheta - f * self.r) / 2.0) ** 2.0 + self.r / self.AIR_DENSITY * der_p) ** (-0.5)) \
- * (-(Ctheta - f * self.r) * f / 2.0 + 1.0 / self.AIR_DENSITY * der_p + 1.0 / self.AIR_DENSITY * self.r * der_p_2)
- der_vg1_theta = -self.cyclone_sped * np.cos((self.theta[j] - beta) / self.RA) / 2.0 \
- + 0.25 * self.cyclone_sped * np.cos((self.theta[j] - beta) / self.RA) * (-Ctheta + f * self.r) \
- * ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** (-0.5)
- BB = 1.0 / (2.0 * self.EDDY_VISCOCITY * self.r) * der_vg1_theta
- Eta = ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** 0.5
- ALPHA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * (f + 2.0 * vg1[j, :] / self.r)
- BETA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * (f + vg1[j, :] / self.r + der_vg1_r)
- GAMMA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * vg1[j, :] / self.r
- ALPHA = np.array([complex(x, y) for x, y in zip(np.real(ALPHA), np.imag(ALPHA))])
- BETA = np.array([complex(x, y) for x, y in zip(np.real(BETA), np.imag(BETA))])
- #
- XXX = -(ALPHA * BETA) ** 0.25
- YYY = -(ALPHA * BETA) ** 0.25
- PP_zero = np.array([complex(x, y) for x, y in zip(XXX, YYY)])
- PP_one = -complex(1, 1) * ((GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5)
- PP_minus_one = -complex(1, 1) * ((-GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5)
- #
- X1 = PP_zero + f * self.r * Cd / self.EDDY_VISCOCITY - 2.0 * Eta * Cd / self.EDDY_VISCOCITY \
- - self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (PP_one - np.conj(PP_minus_one))) \
- + self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (np.conj(PP_one) - PP_minus_one))
-
- X2 = -np.conj(PP_zero) - f * self.r * Cd / self.EDDY_VISCOCITY + 2.0 * Eta * Cd / self.EDDY_VISCOCITY \
- - self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (PP_one - np.conj(PP_minus_one))) \
- + self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (np.conj(PP_one) - PP_minus_one))
-
- X3 = complex(0, -2) * Cd / self.EDDY_VISCOCITY * (Eta - f * self.r / 2.0) ** 2.0
-
- X4 = -(-PP_zero - f * self.r * Cd / (2.0 * self.EDDY_VISCOCITY) + Eta * Cd / self.EDDY_VISCOCITY) \
- / (-np.conj(PP_zero) - f * self.r * Cd / (2.0 *self.EDDY_VISCOCITY) + Eta * Cd / self.EDDY_VISCOCITY)
-
- A_zero = -X3 / (X1 + X2 * X4)
- A_one = complex(0, 1) * self.cyclone_sped * Cd * np.exp(complex(0, -1) * beta) \
- / (4.0 * self.EDDY_VISCOCITY * (PP_one - np.conj(PP_minus_one))) * (A_zero + np.conj(A_zero))
- A_minus_one = -np.conj(A_one)
+ Cd = kappa**2 / (np.log((z10 + h - d) / z0)) ** 2 # noqa: N806
+ der_p = (
+ self.Holland_B
+ * self.cyclone_radm**self.Holland_B
+ * self.cyclone_pres
+ * (self.r ** (-self.Holland_B - 1))
+ * np.exp(
+ -((self.cyclone_radm * self.r ** (-1.0)) ** self.Holland_B)
+ )
+ )
+ der_p_2 = (
+ -(self.Holland_B + 1) * (self.r ** (-1.0))
+ + self.Holland_B
+ * self.cyclone_radm**self.Holland_B
+ * (self.r ** (-self.Holland_B - 1))
+ ) * der_p
+ vg1[j, :] = (
+ 0.5 * (Ctheta - f * self.r)
+ + (
+ (0.5 * (Ctheta - f * self.r)) ** 2.0
+ + (self.r / self.AIR_DENSITY) * der_p
+ )
+ ** 0.5
+ )
+ der_vg1_r = -0.5 * f + 0.5 * (
+ (
+ ((Ctheta - f * self.r) / 2.0) ** 2.0
+ + self.r / self.AIR_DENSITY * der_p
+ )
+ ** (-0.5)
+ ) * (
+ -(Ctheta - f * self.r) * f / 2.0
+ + 1.0 / self.AIR_DENSITY * der_p
+ + 1.0 / self.AIR_DENSITY * self.r * der_p_2
+ )
+ der_vg1_theta = -self.cyclone_sped * np.cos(
+ (self.theta[j] - beta) / self.RA
+ ) / 2.0 + 0.25 * self.cyclone_sped * np.cos(
+ (self.theta[j] - beta) / self.RA
+ ) * (-Ctheta + f * self.r) * (
+ (0.5 * (Ctheta - f * self.r)) ** 2.0
+ + (self.r / self.AIR_DENSITY) * der_p
+ ) ** (-0.5)
+ BB = 1.0 / (2.0 * self.EDDY_VISCOCITY * self.r) * der_vg1_theta # noqa: N806
+ Eta = ( # noqa: N806
+ (0.5 * (Ctheta - f * self.r)) ** 2.0
+ + (self.r / self.AIR_DENSITY) * der_p
+ ) ** 0.5
+ ALPHA = ( # noqa: N806
+ 1.0
+ / (2.0 * self.EDDY_VISCOCITY)
+ * (f + 2.0 * vg1[j, :] / self.r)
+ )
+ BETA = ( # noqa: N806
+ 1.0
+ / (2.0 * self.EDDY_VISCOCITY)
+ * (f + vg1[j, :] / self.r + der_vg1_r)
+ )
+ GAMMA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * vg1[j, :] / self.r # noqa: N806
+ ALPHA = np.array( # noqa: N806
+ list(starmap(complex, zip(np.real(ALPHA), np.imag(ALPHA))))
+ )
+ BETA = np.array( # noqa: N806
+ list(starmap(complex, zip(np.real(BETA), np.imag(BETA))))
+ )
+ XXX = -((ALPHA * BETA) ** 0.25) # noqa: N806
+ YYY = -((ALPHA * BETA) ** 0.25) # noqa: N806
+ PP_zero = np.array(list(starmap(complex, zip(XXX, YYY)))) # noqa: N806
+ PP_one = -complex(1, 1) * ( # noqa: N806
+ (GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5
+ )
+ PP_minus_one = -complex(1, 1) * ( # noqa: N806
+ (-GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5
+ )
+ X1 = ( # noqa: N806
+ PP_zero
+ + f * self.r * Cd / self.EDDY_VISCOCITY
+ - 2.0 * Eta * Cd / self.EDDY_VISCOCITY
+ - self.cyclone_sped**2.0
+ * Cd**2.0
+ / (
+ 4.0
+ * self.EDDY_VISCOCITY**2.0
+ * (PP_one - np.conj(PP_minus_one))
+ )
+ + self.cyclone_sped**2.0
+ * Cd**2.0
+ / (
+ 4.0
+ * self.EDDY_VISCOCITY**2.0
+ * (np.conj(PP_one) - PP_minus_one)
+ )
+ )
+
+ X2 = ( # noqa: N806
+ -np.conj(PP_zero)
+ - f * self.r * Cd / self.EDDY_VISCOCITY
+ + 2.0 * Eta * Cd / self.EDDY_VISCOCITY
+ - self.cyclone_sped**2.0
+ * Cd**2.0
+ / (
+ 4.0
+ * self.EDDY_VISCOCITY**2.0
+ * (PP_one - np.conj(PP_minus_one))
+ )
+ + self.cyclone_sped**2.0
+ * Cd**2.0
+ / (
+ 4.0
+ * self.EDDY_VISCOCITY**2.0
+ * (np.conj(PP_one) - PP_minus_one)
+ )
+ )
+
+ X3 = ( # noqa: N806
+ complex(0, -2)
+ * Cd
+ / self.EDDY_VISCOCITY
+ * (Eta - f * self.r / 2.0) ** 2.0
+ )
+
+ X4 = -( # noqa: N806
+ -PP_zero
+ - f * self.r * Cd / (2.0 * self.EDDY_VISCOCITY)
+ + Eta * Cd / self.EDDY_VISCOCITY
+ ) / (
+ -np.conj(PP_zero)
+ - f * self.r * Cd / (2.0 * self.EDDY_VISCOCITY)
+ + Eta * Cd / self.EDDY_VISCOCITY
+ )
+
+ A_zero = -X3 / (X1 + X2 * X4) # noqa: N806
+ A_one = ( # noqa: N806
+ complex(0, 1)
+ * self.cyclone_sped
+ * Cd
+ * np.exp(complex(0, -1) * beta)
+ / (4.0 * self.EDDY_VISCOCITY * (PP_one - np.conj(PP_minus_one)))
+ * (A_zero + np.conj(A_zero))
+ )
+ A_minus_one = -np.conj(A_one) # noqa: N806
# looping over different heights zp
for ii in range(len(self.zp)):
- u_zero = np.sqrt(ALPHA / BETA) * np.real(A_zero * np.exp(PP_zero * self.zp[ii]))
+ u_zero = np.sqrt(ALPHA / BETA) * np.real(
+ A_zero * np.exp(PP_zero * self.zp[ii])
+ )
v_zero = np.imag(A_zero * np.exp(PP_zero * self.zp[ii]))
- u_one = np.sqrt(ALPHA / BETA) * np.real(A_one * np.exp(PP_one * self.zp[ii] + complex(0, 1) * self.theta[j] / self.RA))
- u_minus_one = np.sqrt(ALPHA / BETA) * np.real(A_minus_one * np.exp(PP_minus_one * self.zp[ii] - complex(0, 1) * self.theta[j] / self.RA))
- v_one = np.imag(A_one * np.exp(PP_one * self.zp[ii] + complex(0, 1) * self.theta[j] / self.RA))
- v_minus_one = np.imag(A_minus_one * np.exp(PP_minus_one * self.zp[ii] - complex(0, 1) * self.theta[j] / self.RA))
- #
+ u_one = np.sqrt(ALPHA / BETA) * np.real(
+ A_one
+ * np.exp(
+ PP_one * self.zp[ii]
+ + complex(0, 1) * self.theta[j] / self.RA
+ )
+ )
+ u_minus_one = np.sqrt(ALPHA / BETA) * np.real(
+ A_minus_one
+ * np.exp(
+ PP_minus_one * self.zp[ii]
+ - complex(0, 1) * self.theta[j] / self.RA
+ )
+ )
+ v_one = np.imag(
+ A_one
+ * np.exp(
+ PP_one * self.zp[ii]
+ + complex(0, 1) * self.theta[j] / self.RA
+ )
+ )
+ v_minus_one = np.imag(
+ A_minus_one
+ * np.exp(
+ PP_minus_one * self.zp[ii]
+ - complex(0, 1) * self.theta[j] / self.RA
+ )
+ )
for tmptag in range(u.shape[1]):
- u[j, tmptag, ii] = np.real(u_zero)[tmptag] + np.real(u_one)[tmptag] + np.real(u_minus_one)[tmptag]
- v[j, tmptag, ii] = v_zero[tmptag] + v_one[tmptag] + v_minus_one[tmptag]
+ u[j, tmptag, ii] = (
+ np.real(u_zero)[tmptag]
+ + np.real(u_one)[tmptag]
+ + np.real(u_minus_one)[tmptag]
+ )
+ v[j, tmptag, ii] = (
+ v_zero[tmptag] + v_one[tmptag] + v_minus_one[tmptag]
+ )
# wind speed components
v1 = v
for m in range(v.shape[2]):
- v1[:, :, m] = v1[:, :, m] + vg1
- U = (v1 ** 2.0 + u ** 2.0) ** 0.5
+ v1[:, :, m] = v1[:, :, m] + vg1 # noqa: PLR6104
+ U = (v1**2.0 + u**2.0) ** 0.5 # noqa: N806
# mapping to staitons
- dd = np.arccos(np.cos(np.array(station_lat) / self.RA) * np.cos(lat / self.RA) * np.cos((np.abs(np.array(station_lon)) - lon) / self.RA) \
- + np.sin(np.array(station_lat) / self.RA) * np.sin(lat / self.RA)) * 6371.0 * 180.0 / np.pi / self.RA * 1000.0
- Delta = np.abs(np.array(station_lon)) - lon + self.EPS ** 2.0
- bearing = 90.0 + self.RA * np.arctan2(np.sin(Delta / self.RA) * np.cos(np.array(station_lat) / self.RA), \
- np.cos(lat / self.RA) * np.sin(np.array(station_lat) /self.RA) - np.sin(lat / self.RA) * np.cos(np.array(station_lat) / self.RA) * np.cos(Delta / self.RA))
+ dd = (
+ np.arccos(
+ np.cos(np.array(station_lat) / self.RA)
+ * np.cos(lat / self.RA)
+ * np.cos((np.abs(np.array(station_lon)) - lon) / self.RA)
+ + np.sin(np.array(station_lat) / self.RA) * np.sin(lat / self.RA)
+ )
+ * 6371.0
+ * 180.0
+ / np.pi
+ / self.RA
+ * 1000.0
+ )
+ Delta = np.abs(np.array(station_lon)) - lon + self.EPS**2.0 # noqa: N806
+ bearing = 90.0 + self.RA * np.arctan2(
+ np.sin(Delta / self.RA) * np.cos(np.array(station_lat) / self.RA),
+ np.cos(lat / self.RA) * np.sin(np.array(station_lat) / self.RA)
+ - np.sin(lat / self.RA)
+ * np.cos(np.array(station_lat) / self.RA)
+ * np.cos(Delta / self.RA),
+ )
bearing = [x if x >= 0 else x + 360.0 for x in bearing]
jj = [int(x / self.mesh_info[4]) for x in bearing]
kk = [min(int(x / self.mesh_info[1]), len(self.r) - 1) for x in dd]
for ii in range(len(self.zp)):
tmp = U[:, :, ii].tolist()
wind_speed = [tmp[jtag][ktag] for jtag, ktag in zip(jj, kk)]
- station_umax[:, ii] = [max(x, y) for x, y in zip(wind_speed, station_umax[:, ii])]
+ station_umax[:, ii] = list(
+ starmap(max, zip(wind_speed, station_umax[:, ii]))
+ )
# copying results
self.station['PWS']['height'] = self.zp
self.station['PWS']['windspeed'] = station_umax.tolist()
- print('WindFieldSimulation: linear analytical simulation completed.')
+ print('WindFieldSimulation: linear analytical simulation completed.') # noqa: T201
-
def get_station_data(self):
- """
- get_station_data: returning station data
- """
+ """get_station_data: returning station data""" # noqa: D400
# return station dictionary
- return self.station
\ No newline at end of file
+ return self.station
diff --git a/modules/performRegionalEventSimulation/siteResponse/RegionalSiteResponse.py b/modules/performRegionalEventSimulation/siteResponse/RegionalSiteResponse.py
index 32d8ee301..892e6b8d9 100644
--- a/modules/performRegionalEventSimulation/siteResponse/RegionalSiteResponse.py
+++ b/modules/performRegionalEventSimulation/siteResponse/RegionalSiteResponse.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 The Regents of the University of California
#
# This file is part of the SimCenter Backend Applications
@@ -37,146 +36,159 @@
# fmk
#
-import os, sys
-import argparse, json
-import subprocess
+import argparse
+import json
+import os
import shutil
-
+import subprocess # noqa: S404
+import sys
from pathlib import Path
#
# some filePath and python exe stuff
#
-thisDir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
-mainDir = thisDir.parents[1]
-mainDir = thisDir.parents[1]
-currentDir=os.getcwd()
+thisDir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120, N816
+mainDir = thisDir.parents[1] # noqa: N816
+mainDir = thisDir.parents[1] # noqa: N816
+currentDir = os.getcwd() # noqa: PTH109, N816
-pythonEXE = sys.executable
+pythonEXE = sys.executable # noqa: N816
-thisDir = str(thisDir)
-mainDir = str(mainDir)
-currentDir = str(currentDir)
+thisDir = str(thisDir) # noqa: N816
+mainDir = str(mainDir) # noqa: N816
+currentDir = str(currentDir) # noqa: N816
-print(f"thisDir: {thisDir}")
-print(f"mainDir: {mainDir}")
-print(f"currentDir: {currentDir}")
+print(f'thisDir: {thisDir}') # noqa: T201
+print(f'mainDir: {mainDir}') # noqa: T201
+print(f'currentDir: {currentDir}') # noqa: T201
-def runHazardSimulation(inputFILE):
+def runHazardSimulation(inputFILE): # noqa: N802, N803, D103
# log_msg('Startring simulation script...')
- sys.path.insert(0, os.getcwd())
+ sys.path.insert(0, os.getcwd()) # noqa: PTH109
#
# open input & parse json
#
-
- print(f'inputFILE: {inputFILE}')
- with open(inputFILE, 'r') as f:
- inputJSON = json.load(f)
+
+ print(f'inputFILE: {inputFILE}') # noqa: T201
+ with open(inputFILE) as f: # noqa: PLW1514, PTH123
+ inputJSON = json.load(f) # noqa: N806
#
- # read needed input data
+ # read needed input data
#
-
- unitData = inputJSON['units']
- inputApplications = inputJSON['Applications']
- hazardApplication = inputApplications['Hazard']
- regionalMappingApplication = inputApplications['RegionalMapping']
- uqApplication = inputApplications['UQ']
-
- hazardAppData = hazardApplication['ApplicationData']
-
- soilFile = hazardAppData["soilGridParametersFile"]
- soilPath = hazardAppData["soilParametersPath"]
- responseScript = hazardAppData["siteResponseScript"]
- scriptPath = hazardAppData["scriptPath"]
- filters = hazardAppData["filter"]
- eventFile = hazardAppData["inputEventFile"]
- motionDir = hazardAppData["inputMotionDir"]
- outputDir = hazardAppData["outputMotionDir"]
+
+ unitData = inputJSON['units'] # noqa: N806
+ inputApplications = inputJSON['Applications'] # noqa: N806
+ hazardApplication = inputApplications['Hazard'] # noqa: N806
+ regionalMappingApplication = inputApplications['RegionalMapping'] # noqa: N806
+ uqApplication = inputApplications['UQ'] # noqa: N806
+
+ hazardAppData = hazardApplication['ApplicationData'] # noqa: N806
+
+ soilFile = hazardAppData['soilGridParametersFile'] # noqa: N806
+ soilPath = hazardAppData['soilParametersPath'] # noqa: N806
+ responseScript = hazardAppData['siteResponseScript'] # noqa: N806
+ scriptPath = hazardAppData['scriptPath'] # noqa: N806
+ filters = hazardAppData['filter']
+ eventFile = hazardAppData['inputEventFile'] # noqa: N806
+ motionDir = hazardAppData['inputMotionDir'] # noqa: N806
+ outputDir = hazardAppData['outputMotionDir'] # noqa: N806
# now create an input for siteResponseWHALE
-
- srtFILE = "sc_srt.json"
-
- outputs = dict(
- EDP = True,
- DM = False,
- DV = False,
- every_realization = False
- )
- edpApplication = dict(
- Application = 'DummyEDP',
- ApplicationData = dict()
- )
+ srtFILE = 'sc_srt.json' # noqa: N806
- eventApp = dict(
- EventClassification = "Earthquake",
- Application = "RegionalSiteResponse",
- ApplicationData = dict(
- pathEventData = motionDir,
- mainScript = responseScript,
- modelPath = scriptPath,
- ndm = 3
- )
- )
+ outputs = dict(EDP=True, DM=False, DV=False, every_realization=False) # noqa: C408
- regionalMappingAppData = regionalMappingApplication['ApplicationData']
- regionalMappingAppData['filenameEVENTgrid']=eventFile
-
- buildingApplication = dict (
- Application = "CSV_to_BIM",
- ApplicationData = dict (
- buildingSourceFile = f'{soilPath}{soilFile}',
- filter = filters
- )
+ edpApplication = dict(Application='DummyEDP', ApplicationData=dict()) # noqa: C408, N806
+
+ eventApp = dict( # noqa: C408, N806
+ EventClassification='Earthquake',
+ Application='RegionalSiteResponse',
+ ApplicationData=dict( # noqa: C408
+ pathEventData=motionDir,
+ mainScript=responseScript,
+ modelPath=scriptPath,
+ ndm=3,
+ ),
)
- Applications = dict(
- UQ = uqApplication,
- RegionalMapping = regionalMappingApplication,
- Events = [eventApp],
- EDP = edpApplication,
- Building = buildingApplication
+ regionalMappingAppData = regionalMappingApplication['ApplicationData'] # noqa: N806
+ regionalMappingAppData['filenameEVENTgrid'] = eventFile
+
+ buildingApplication = dict( # noqa: C408, N806
+ Application='CSV_to_BIM',
+ ApplicationData=dict( # noqa: C408
+ buildingSourceFile=f'{soilPath}{soilFile}', filter=filters
+ ),
)
- srt = dict(
- units = unitData,
- outputs = outputs,
- Applications = Applications
+ Applications = dict( # noqa: C408, N806
+ UQ=uqApplication,
+ RegionalMapping=regionalMappingApplication,
+ Events=[eventApp],
+ EDP=edpApplication,
+ Building=buildingApplication,
)
-
- with open(srtFILE, 'w') as f:
+
+ srt = dict(units=unitData, outputs=outputs, Applications=Applications) # noqa: C408
+
+ with open(srtFILE, 'w') as f: # noqa: PLW1514, PTH123
json.dump(srt, f, indent=2)
#
# now invoke siteResponseWHALE
#
- inputDir = currentDir + "/input_data"
- tmpDir = currentDir + "/input_data/siteResponseRunningDir"
-
- print(f'RUNNING {pythonEXE} {mainDir}/Workflow/siteResponseWHALE.py ./sc_srt.json --registry {mainDir}/Workflow/WorkflowApplications.json --referenceDir {inputDir} -w {tmpDir}')
+ inputDir = currentDir + '/input_data' # noqa: N806
+ tmpDir = currentDir + '/input_data/siteResponseRunningDir' # noqa: N806
+
+ print( # noqa: T201
+ f'RUNNING {pythonEXE} {mainDir}/Workflow/siteResponseWHALE.py ./sc_srt.json --registry {mainDir}/Workflow/WorkflowApplications.json --referenceDir {inputDir} -w {tmpDir}'
+ )
- subprocess.run([pythonEXE, mainDir+"/Workflow/siteResponseWHALE.py", "./sc_srt.json","--registry", mainDir+"/Workflow/WorkflowApplications.json", "--referenceDir", inputDir, "-w", tmpDir])
+ subprocess.run( # noqa: S603
+ [
+ pythonEXE,
+ mainDir + '/Workflow/siteResponseWHALE.py',
+ './sc_srt.json',
+ '--registry',
+ mainDir + '/Workflow/WorkflowApplications.json',
+ '--referenceDir',
+ inputDir,
+ '-w',
+ tmpDir,
+ ],
+ check=False,
+ )
#
# gather results, creating new EventGrid file
# and moving all the motions created
#
- outputMotionDir = currentDir + "/input_data/" + outputDir;
-
- print(f'RUNNING {pythonEXE} {mainDir}/createEVENT/siteResponse/createGM4BIM.py -i {tmpDir} -o {outputMotionDir} --removeInput')
-
- subprocess.run([pythonEXE, mainDir+"/createEVENT/siteResponse/createGM4BIM.py", "-i", tmpDir, "-o", outputMotionDir])
+ outputMotionDir = currentDir + '/input_data/' + outputDir # noqa: N806
+ print( # noqa: T201
+ f'RUNNING {pythonEXE} {mainDir}/createEVENT/siteResponse/createGM4BIM.py -i {tmpDir} -o {outputMotionDir} --removeInput'
+ )
- #subprocess.run([pythonEXE, mainDir+"/createEVENT/siteResponse/createGM4BIM.py", "-i", tmpDir, "-o", outputMotionDir], "--removeInput")
+ subprocess.run( # noqa: S603
+ [
+ pythonEXE,
+ mainDir + '/createEVENT/siteResponse/createGM4BIM.py',
+ '-i',
+ tmpDir,
+ '-o',
+ outputMotionDir,
+ ],
+ check=False,
+ )
+ # subprocess.run([pythonEXE, mainDir+"/createEVENT/siteResponse/createGM4BIM.py", "-i", tmpDir, "-o", outputMotionDir], "--removeInput")
#
# remove tmp dir
@@ -185,30 +197,30 @@ def runHazardSimulation(inputFILE):
try:
shutil.rmtree(tmpDir)
except OSError as e:
- print("Error: %s : %s" % (tmpDir, e.strerror))
-
+ print('Error: %s : %s' % (tmpDir, e.strerror)) # noqa: T201, UP031
+
#
- # modify inputFILE to provide new event file for regional mapping
+ # modify inputFILE to provide new event file for regional mapping
#
- regionalMappingAppData = regionalMappingApplication['ApplicationData']
- regionalMappingAppData['filenameEVENTgrid']=f'{outputDir}/EventGrid.csv'
+ regionalMappingAppData = regionalMappingApplication['ApplicationData'] # noqa: N806
+ regionalMappingAppData['filenameEVENTgrid'] = f'{outputDir}/EventGrid.csv'
- with open(inputFILE, 'w') as f:
+ with open(inputFILE, 'w') as f: # noqa: PLW1514, PTH123
json.dump(inputJSON, f, indent=2)
-
+
#
# we are done
#
-
- #log_msg('Simulation script finished.')
+
+ # log_msg('Simulation script finished.')
return 0
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument('--input',default=None)
+ parser.add_argument('--input', default=None)
args = parser.parse_args()
runHazardSimulation(args.input)
diff --git a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
index 6aee42b8b..cfa244098 100644
--- a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
+++ b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,40 +38,45 @@
# Tamika Bassman
#
-import argparse, json
-import numpy as np
-import pandas as pd
+import argparse
import importlib
-
+import json
from pathlib import Path
+import numpy as np
+import pandas as pd
from sklearn.neighbors import NearestNeighbors
-def find_neighbors(
- asset_file, event_grid_file, samples, neighbors, filter_label, seed, doParallel
+def find_neighbors( # noqa: C901, D103
+ asset_file,
+ event_grid_file,
+ samples,
+ neighbors,
+ filter_label,
+ seed,
+ doParallel, # noqa: N803
):
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
+ from mpi4py import MPI # noqa: PLC0415
- runParallel = True
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank()
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
# read the event grid data file
event_grid_path = Path(event_grid_file).resolve()
@@ -82,55 +86,57 @@ def find_neighbors(
grid_df = pd.read_csv(event_dir / event_grid_file, header=0)
# store the locations of the grid points in X
- lat_E = grid_df["Latitude"]
- lon_E = grid_df["Longitude"]
- X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)])
+ lat_E = grid_df['Latitude'] # noqa: N806
+ lon_E = grid_df['Longitude'] # noqa: N806
+ X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
- if filter_label == "":
+ if filter_label == '': # noqa: PLC1901
grid_extra_keys = list(
- grid_df.drop(["GP_file", "Longitude", "Latitude"], axis=1).columns
+ grid_df.drop(['GP_file', 'Longitude', 'Latitude'], axis=1).columns
)
# prepare the tree for the nearest neighbor search
- if filter_label != "" or len(grid_extra_keys) > 0:
+ if filter_label != '' or len(grid_extra_keys) > 0: # noqa: PLC1901
neighbors_to_get = min(neighbors * 10, len(lon_E))
else:
neighbors_to_get = neighbors
- nbrs = NearestNeighbors(n_neighbors=neighbors_to_get, algorithm="ball_tree").fit(X)
+ nbrs = NearestNeighbors(n_neighbors=neighbors_to_get, algorithm='ball_tree').fit(
+ X
+ )
# load the building data file
- with open(asset_file, "r", encoding="utf-8") as f:
+ with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
asset_dict = json.load(f)
# prepare a dataframe that holds asset filenames and locations
- AIM_df = pd.DataFrame(
- columns=["Latitude", "Longitude", "file"], index=np.arange(len(asset_dict))
+ AIM_df = pd.DataFrame( # noqa: N806
+ columns=['Latitude', 'Longitude', 'file'], index=np.arange(len(asset_dict))
)
count = 0
for i, asset in enumerate(asset_dict):
- if runParallel == False or (i % numP) == procID:
- with open(asset["file"], "r", encoding="utf-8") as f:
+ if runParallel == False or (i % numP) == procID: # noqa: E712
+ with open(asset['file'], encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
- asset_loc = asset_data["GeneralInformation"]["location"]
- AIM_df.iloc[count]["Longitude"] = asset_loc["longitude"]
- AIM_df.iloc[count]["Latitude"] = asset_loc["latitude"]
- AIM_df.iloc[count]["file"] = asset["file"]
- count = count + 1
+ asset_loc = asset_data['GeneralInformation']['location']
+ AIM_df.iloc[count]['Longitude'] = asset_loc['longitude']
+ AIM_df.iloc[count]['Latitude'] = asset_loc['latitude']
+ AIM_df.iloc[count]['file'] = asset['file']
+ count = count + 1 # noqa: PLR6104
# store building locations in Y
- Y = np.array(
+ Y = np.array( # noqa: N806
[
[lo, la]
- for lo, la in zip(AIM_df["Longitude"], AIM_df["Latitude"])
+ for lo, la in zip(AIM_df['Longitude'], AIM_df['Latitude'])
if not np.isnan(lo) and not np.isnan(la)
]
)
# collect the neighbor indices and distances for every building
distances, indices = nbrs.kneighbors(Y)
- distances = distances + 1e-20
+ distances = distances + 1e-20 # noqa: PLR6104
# initialize the random generator
if seed is not None:
@@ -141,25 +147,25 @@ def find_neighbors(
count = 0
# iterate through the buildings and store the selected events in the AIM
- for asset_i, (AIM_id, dist_list, ind_list) in enumerate(
+ for asset_i, (AIM_id, dist_list, ind_list) in enumerate( # noqa: B007, N806
zip(AIM_df.index, distances, indices)
):
# open the AIM file
- asst_file = AIM_df.iloc[AIM_id]["file"]
+ asst_file = AIM_df.iloc[AIM_id]['file']
- with open(asst_file, "r", encoding="utf-8") as f:
+ with open(asst_file, encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
- if filter_label != "":
+ if filter_label != '': # noqa: PLC1901
# soil type of building
- asset_label = asset_data["GeneralInformation"][filter_label]
+ asset_label = asset_data['GeneralInformation'][filter_label]
# soil types of all initial neighbors
grid_label = grid_df[filter_label][ind_list]
# only keep the distances and indices corresponding to neighbors
# with the same soil type
- dist_list = dist_list[(grid_label == asset_label).values]
- ind_list = ind_list[(grid_label == asset_label).values]
+ dist_list = dist_list[(grid_label == asset_label).values] # noqa: PD011, PLW2901
+ ind_list = ind_list[(grid_label == asset_label).values] # noqa: PD011, PLW2901
# return dist_list & ind_list with a length equals neighbors
# assuming that at least neighbors grid points exist with
@@ -167,26 +173,26 @@ def find_neighbors(
# because dist_list, ind_list sorted initially in order of increasing
# distance, just take the first neighbors grid points of each
- dist_list = dist_list[:neighbors]
- ind_list = ind_list[:neighbors]
+ dist_list = dist_list[:neighbors] # noqa: PLW2901
+ ind_list = ind_list[:neighbors] # noqa: PLW2901
if len(grid_extra_keys) > 0:
filter_labels = []
- for key in asset_data["GeneralInformation"].keys():
+ for key in asset_data['GeneralInformation'].keys(): # noqa: SIM118
if key in grid_extra_keys:
- filter_labels.append(key)
+ filter_labels.append(key) # noqa: PERF401
filter_list = [True for i in dist_list]
- for filter_label in filter_labels:
- asset_label = asset_data["GeneralInformation"][filter_label]
+ for filter_label in filter_labels: # noqa: PLR1704
+ asset_label = asset_data['GeneralInformation'][filter_label]
grid_label = grid_df[filter_label][ind_list]
- filter_list_i = (grid_label == asset_label).values
+ filter_list_i = (grid_label == asset_label).values # noqa: PD011
filter_list = filter_list and filter_list_i
# only keep the distances and indices corresponding to neighbors
# with the same soil type
- dist_list = dist_list[filter_list]
- ind_list = ind_list[filter_list]
+ dist_list = dist_list[filter_list] # noqa: PLW2901
+ ind_list = ind_list[filter_list] # noqa: PLW2901
# return dist_list & ind_list with a length equals neighbors
# assuming that at least neighbors grid points exist with
@@ -194,18 +200,18 @@ def find_neighbors(
# because dist_list, ind_list sorted initially in order of increasing
# distance, just take the first neighbors grid points of each
- dist_list = dist_list[:neighbors]
- ind_list = ind_list[:neighbors]
+ dist_list = dist_list[:neighbors] # noqa: PLW2901
+ ind_list = ind_list[:neighbors] # noqa: PLW2901
# calculate the weights for each neighbor based on their distance
- dist_list = 1.0 / (dist_list**2.0)
+ dist_list = 1.0 / (dist_list**2.0) # noqa: PLW2901
weights = np.array(dist_list) / np.sum(dist_list)
# get the pre-defined number of samples for each neighbor
nbr_samples = np.where(rng.multinomial(1, weights, samples) == 1)[1]
- # this is the preferred behavior, the else caluse is left for legacy inputs
- if grid_df.iloc[0]["GP_file"][-3:] == "csv":
+ # this is the preferred behavior, the else clause is left for legacy inputs
+ if grid_df.iloc[0]['GP_file'][-3:] == 'csv':
# We assume that every grid point has the same type and number of
# event data. That is, you cannot mix ground motion records and
# intensity measures and you cannot assign 10 records to one point
@@ -214,11 +220,13 @@ def find_neighbors(
# Load the first file and identify if this is a grid of IM or GM
# information. GM grids have GM record filenames defined in the
# grid point files.
- first_file = pd.read_csv(event_dir / grid_df.iloc[0]["GP_file"], header=0)
- if first_file.columns[0] == "TH_file":
- event_type = "timeHistory"
+ first_file = pd.read_csv(
+ event_dir / grid_df.iloc[0]['GP_file'], header=0
+ )
+ if first_file.columns[0] == 'TH_file':
+ event_type = 'timeHistory'
else:
- event_type = "intensityMeasure"
+ event_type = 'intensityMeasure'
event_count = first_file.shape[0]
# collect the list of events and scale factors
@@ -234,10 +242,12 @@ def find_neighbors(
nbr_index = ind_list[nbr]
# if the grid has ground motion records...
- if event_type == "timeHistory":
+ if event_type == 'timeHistory':
# load the file for the selected grid point
- event_collection_file = grid_df.iloc[nbr_index]["GP_file"]
- event_df = pd.read_csv(event_dir / event_collection_file, header=0)
+ event_collection_file = grid_df.iloc[nbr_index]['GP_file']
+ event_df = pd.read_csv(
+ event_dir / event_collection_file, header=0
+ )
# append the GM record name to the event list
event_list.append(event_df.iloc[event_j, 0])
@@ -249,21 +259,21 @@ def find_neighbors(
scale_list.append(1.0)
# if the grid has intensity measures
- elif event_type == "intensityMeasure":
+ elif event_type == 'intensityMeasure':
# save the collection file name and the IM row id
event_list.append(
- grid_df.iloc[nbr_index]["GP_file"] + f"x{event_j}"
+ grid_df.iloc[nbr_index]['GP_file'] + f'x{event_j}'
)
# IM collections are not scaled
scale_list.append(1.0)
- # TODO: update the LLNL input data and remove this clause
+ # TODO: update the LLNL input data and remove this clause # noqa: TD002
else:
event_list = []
for e, i in zip(nbr_samples, ind_list):
event_list += [
- grid_df.iloc[i]["GP_file"],
+ grid_df.iloc[i]['GP_file'],
] * e
scale_list = np.ones(len(event_list))
@@ -277,42 +287,42 @@ def find_neighbors(
# "factor": scale_list[e_i],
# #"type": event_type
# })
- event_list_json.append([f"{event}x{e_i:05d}", scale_list[e_i]])
+ event_list_json.append([f'{event}x{e_i:05d}', scale_list[e_i]])
# save the event dictionary to the AIM
- # TODO: we assume there is only one event
+ # TODO: we assume there is only one event # noqa: TD002
# handling multiple events will require more sophisticated inputs
- if "Events" not in asset_data:
- asset_data["Events"] = [{}]
- elif len(asset_data["Events"]) == 0:
- asset_data["Events"].append({})
+ if 'Events' not in asset_data:
+ asset_data['Events'] = [{}]
+ elif len(asset_data['Events']) == 0:
+ asset_data['Events'].append({})
- asset_data["Events"][0].update(
+ asset_data['Events'][0].update(
{
# "EventClassification": "Earthquake",
- "EventFolderPath": str(event_dir),
- "Events": event_list_json,
- "type": event_type
+ 'EventFolderPath': str(event_dir),
+ 'Events': event_list_json,
+ 'type': event_type,
# "type": "SimCenterEvents"
}
)
- with open(asst_file, "w", encoding="utf-8") as f:
+ with open(asst_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(asset_data, f, indent=2)
-if __name__ == "__main__":
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument("--assetFile")
- parser.add_argument("--filenameEVENTgrid")
- parser.add_argument("--samples", type=int)
- parser.add_argument("--neighbors", type=int)
- parser.add_argument("--filter_label", default="")
- parser.add_argument("--doParallel", default="False")
- parser.add_argument("-n", "--numP", default="8")
- parser.add_argument("-m", "--mpiExec", default="mpiexec")
- parser.add_argument("--seed", type=int, default=None)
+ parser.add_argument('--assetFile')
+ parser.add_argument('--filenameEVENTgrid')
+ parser.add_argument('--samples', type=int)
+ parser.add_argument('--neighbors', type=int)
+ parser.add_argument('--filter_label', default='')
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpiexec')
+ parser.add_argument('--seed', type=int, default=None)
args = parser.parse_args()
find_neighbors(
diff --git a/modules/performRegionalMapping/SiteSpecifiedEvents/SSE.py b/modules/performRegionalMapping/SiteSpecifiedEvents/SSE.py
index c29d1369c..683702dcb 100644
--- a/modules/performRegionalMapping/SiteSpecifiedEvents/SSE.py
+++ b/modules/performRegionalMapping/SiteSpecifiedEvents/SSE.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -40,107 +39,117 @@
# Tamika Bassman
#
-import argparse, json
+import argparse
+import importlib
+import json
+import os
from pathlib import Path
+
import numpy as np
import pandas as pd
from scipy.cluster.vq import vq
-import importlib
-import os
-
-def create_event(asset_file, event_grid_file, multipleEvents, doParallel):
+def create_event(asset_file, event_grid_file, multipleEvents, doParallel): # noqa: C901, N803, D103
# check if running parallel
- numP = 1
- procID = 0
- runParallel = False
-
- if doParallel == "True":
- mpi_spec = importlib.util.find_spec("mpi4py")
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+ runParallel = False # noqa: N806
+
+ if doParallel == 'True':
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
- import mpi4py
- from mpi4py import MPI
- runParallel = True
+ from mpi4py import MPI # noqa: PLC0415
+
+ runParallel = True # noqa: N806
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = "False"
- runParallel = False
- numP = 1
- procID = 0
-
+ numP = comm.Get_size() # noqa: N806
+ procID = comm.Get_rank() # noqa: N806
+ if numP < 2: # noqa: PLR2004
+ doParallel = 'False' # noqa: N806
+ runParallel = False # noqa: N806
+ numP = 1 # noqa: N806
+ procID = 0 # noqa: N806
+
# read the event grid data file
event_grid_path = Path(event_grid_file).resolve()
event_dir = event_grid_path.parent
event_grid_file = event_grid_path.name
grid_df = pd.read_csv(event_dir / event_grid_file, header=0)
-
+
# store the locations of the grid points in X
- lat_E = grid_df['Latitude']
- lon_E = grid_df['Longitude']
- X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)])
-
+ lat_E = grid_df['Latitude'] # noqa: N806
+ lon_E = grid_df['Longitude'] # noqa: N806
+ X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
+
# load the asset data file
- with open(asset_file, 'r', encoding="utf-8") as f:
+ with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
asset_dict = json.load(f)
# prepare a dataframe that holds asset filenames and locations
- AIM_df = pd.DataFrame(columns=['Latitude', 'Longitude', 'file'], index=np.arange(len(asset_dict)))
+ AIM_df = pd.DataFrame( # noqa: N806
+ columns=['Latitude', 'Longitude', 'file'], index=np.arange(len(asset_dict))
+ )
- count = 0
+ count = 0
for i, asset in enumerate(asset_dict):
-
- if runParallel == False or (i % numP) == procID:
-
- with open(asset['file'], 'r', encoding="utf-8") as f:
+ if runParallel == False or (i % numP) == procID: # noqa: E712
+ with open(asset['file'], encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
asset_loc = asset_data['GeneralInformation']['location']
AIM_df.iloc[count]['Longitude'] = asset_loc['longitude']
AIM_df.iloc[count]['Latitude'] = asset_loc['latitude']
AIM_df.iloc[count]['file'] = asset['file']
- count = count + 1
-
+ count = count + 1 # noqa: PLR6104
+
# store asset locations in Y
- Y = np.array([[lo, la] for lo, la in zip(AIM_df['Longitude'], AIM_df['Latitude']) if not np.isnan(lo) and not np.isnan(la)])
-
- #print(Y)
- #print(sub_grid)
-
+ Y = np.array( # noqa: N806
+ [
+ [lo, la]
+ for lo, la in zip(AIM_df['Longitude'], AIM_df['Latitude'])
+ if not np.isnan(lo) and not np.isnan(la)
+ ]
+ )
+
+ # print(Y)
+ # print(sub_grid)
+
# Find the index of the closest point - each index corresponds to the gridpoint index
- closest, distances = vq(Y, X)
-
-# print("****closest",closest)
-# print("****distances",distances)
-#
-# print("****num found",len(closest))
-# print("****num Y",np.size(Y, 0))
-# print("****num X",np.size(X, 0))
+ closest, distances = vq(Y, X) # noqa: F841
+ # print("****closest",closest)
+ # print("****distances",distances)
+ #
+ # print("****num found",len(closest))
+ # print("****num Y",np.size(Y, 0))
+ # print("****num X",np.size(X, 0))
# check to ensure we found all of the assets
- if len(closest) != np.size(Y, 0) :
- print("Error, the number of assets needs to be equal to the number of grid points")
- print("The number of assets is "+str(np.size(Y, 0))+" and the number of grid points is " + len(closest))
+ if len(closest) != np.size(Y, 0):
+ print( # noqa: T201
+ 'Error, the number of assets needs to be equal to the number of grid points'
+ )
+ print( # noqa: T201
+ 'The number of assets is '
+ + str(np.size(Y, 0))
+ + ' and the number of grid points is '
+ + len(closest)
+ )
return 1
-
-
- # iterate through the assets and store the selected events in the AIM
- for idx, AIM_id in enumerate(AIM_df.index):
+ # iterate through the assets and store the selected events in the AIM
+ for idx, AIM_id in enumerate(AIM_df.index): # noqa: N806, PLR1702, RET503
# open the AIM file
asset_file = AIM_df.iloc[AIM_id]['file']
-
- with open(asset_file, 'r', encoding="utf-8") as f:
+
+ with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
- # this is the preferred behavior, the else caluse is left for legacy inputs
+ # this is the preferred behavior, the else clause is left for legacy inputs
if grid_df.iloc[0]['GP_file'][-3:] == 'csv':
-
# We assume that every grid point has the same type and number of
# event data. That is, you cannot mix ground motion records and
# intensity measures and you cannot assign 10 records to one point
@@ -149,81 +158,83 @@ def create_event(asset_file, event_grid_file, multipleEvents, doParallel):
# Load the first file and identify if this is a grid of IM or GM
# information. GM grids have GM record filenames defined in the
# grid point files.
- first_file = pd.read_csv(event_dir / grid_df.iloc[0]['GP_file'],header=0)
-
- if first_file.columns[0]=='TH_file':
+ first_file = pd.read_csv(
+ event_dir / grid_df.iloc[0]['GP_file'], header=0
+ )
+
+ if first_file.columns[0] == 'TH_file':
event_type = 'timeHistory'
else:
event_type = 'intensityMeasure'
-
- event_count = first_file.shape[0]
+
+ event_count = first_file.shape[0] # noqa: F841
# collect the list of events and scale factors
event_list = []
scale_list = []
-
- closestPnt = grid_df.iloc[closest[idx]]
+
+ closestPnt = grid_df.iloc[closest[idx]] # noqa: N806
# if the grid has ground motion records...
if event_type == 'timeHistory':
-
# load the file for the selected grid point
event_collection_file = closestPnt['GP_file']
-
+
event_df = pd.read_csv(event_dir / event_collection_file, header=0)
# append the GM record name to the event list
- event_list.append(event_df.iloc[0,0])
+ event_list.append(event_df.iloc[0, 0])
# append the scale factor (or 1.0) to the scale list
if len(event_df.columns) > 1:
- scale_list.append(float(event_df.iloc[0,1]))
+ scale_list.append(float(event_df.iloc[0, 1]))
else:
scale_list.append(1.0)
-
- # If GP_file contains multiple events
+
+ # If GP_file contains multiple events
if multipleEvents:
# Read the GP_file
if event_df.shape[0] > 1:
- for row in range(1,event_df.shape[0]):
- event_list.append(event_df.iloc[row,0])
+ for row in range(1, event_df.shape[0]):
+ event_list.append(event_df.iloc[row, 0])
# append the scale factor (or 1.0) to the scale list
if len(event_df.columns) > 1:
- scale_list.append(float(event_df.iloc[row,1]))
+ scale_list.append(float(event_df.iloc[row, 1]))
else:
scale_list.append(1.0)
# if the grid has intensity measures
elif event_type == 'intensityMeasure':
-
# save the collection file name and the IM row id
- event_list.append(closestPnt['GP_file']+f'x{0}')
+ event_list.append(closestPnt['GP_file'] + f'x{0}')
# IM collections are not scaled
scale_list.append(1.0)
- # If GP_file contains multiple events
+ # If GP_file contains multiple events
if multipleEvents:
# Read the GP_file
- GP_file = os.path.join(event_dir, closestPnt['GP_file'])
- GP_file_df = pd.read_csv(GP_file, header=0)
+ GP_file = os.path.join(event_dir, closestPnt['GP_file']) # noqa: PTH118, N806
+ GP_file_df = pd.read_csv(GP_file, header=0) # noqa: N806
if GP_file_df.shape[0] > 1:
- for row in range(1,GP_file_df.shape[0]):
- event_list.append(closestPnt['GP_file']+f'x{row}')
+ for row in range(1, GP_file_df.shape[0]):
+ event_list.append(closestPnt['GP_file'] + f'x{row}')
scale_list.append(1.0)
- # TODO: update the LLNL input data and remove this clause
+ # TODO: update the LLNL input data and remove this clause # noqa: TD002
else:
event_list = []
- for e, i in zip(nbr_samples, ind_list):
- event_list += [closestPnt['GP_file'],]*e
+ for e, i in zip(nbr_samples, ind_list): # noqa: B007, F821
+ event_list += [
+ closestPnt['GP_file'],
+ ] * e
scale_list = np.ones(len(event_list))
# prepare a dictionary of events
event_list_json = []
for e_i, event in enumerate(event_list):
- #event_list_json.append({
+ # event_list_json.append({
# #"EventClassification": "Earthquake",
# "fileName": f'{event}x{e_i:05d}',
# "factor": scale_list[e_i],
@@ -231,31 +242,32 @@ def create_event(asset_file, event_grid_file, multipleEvents, doParallel):
# })
event_list_json.append([f'{event}x{e_i:05d}', scale_list[e_i]])
-
# save the event dictionary to the AIM
- # save the event dictionary to the BIM
- asset_data['Events'] = [{}]
+ # save the event dictionary to the BIM
+ asset_data['Events'] = [{}]
asset_data['Events'][0] = {
- #"EventClassification": "Earthquake",
- "EventFolderPath": str(event_dir),
- "Events": event_list_json,
- "type": event_type
- #"type": "SimCenterEvents"
+ # "EventClassification": "Earthquake",
+ 'EventFolderPath': str(event_dir),
+ 'Events': event_list_json,
+ 'type': event_type,
+ # "type": "SimCenterEvents"
}
-
- with open(asset_file, 'w', encoding="utf-8") as f:
+
+ with open(asset_file, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(asset_data, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--assetFile')
parser.add_argument('--filenameEVENTgrid')
- parser.add_argument('--multipleEvents', default="True")
- parser.add_argument('--doParallel', default="False")
- parser.add_argument("-n", "--numP", default='8')
- parser.add_argument("-m", "--mpiExec", default='mpixece')
+ parser.add_argument('--multipleEvents', default='True')
+ parser.add_argument('--doParallel', default='False')
+ parser.add_argument('-n', '--numP', default='8')
+ parser.add_argument('-m', '--mpiExec', default='mpixece')
args = parser.parse_args()
- create_event(args.assetFile, args.filenameEVENTgrid, args.multipleEvents, args.doParallel)
+ create_event(
+ args.assetFile, args.filenameEVENTgrid, args.multipleEvents, args.doParallel
+ )
diff --git a/modules/performSIMULATION/IMasEDP/IMasEDP.py b/modules/performSIMULATION/IMasEDP/IMasEDP.py
index 6b9673a69..bee9bd787 100644
--- a/modules/performSIMULATION/IMasEDP/IMasEDP.py
+++ b/modules/performSIMULATION/IMasEDP/IMasEDP.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -38,23 +37,26 @@
# Adam Zsarnóczay
#
-import os, sys
-import argparse, json
-import string
-import numpy as np
+import argparse
+import json
+import sys
from pathlib import Path, PurePath
-def write_RV(EVENT_input_path):
+import numpy as np
+
+def write_RV(EVENT_input_path): # noqa: C901, N802, N803, D103
# open the event file and get the list of events
- with open(EVENT_input_path, 'r', encoding="utf-8") as f:
- EVENT_in = json.load(f)
+ with open(EVENT_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EVENT_in = json.load(f) # noqa: N806
# if there is a list of possible events, load all of them
- if len(EVENT_in['randomVariables'])>0:
+ if len(EVENT_in['randomVariables']) > 0:
event_list = EVENT_in['randomVariables'][0]['elements']
else:
- event_list = [EVENT_in['Events'][0]['event_id'],]
+ event_list = [
+ EVENT_in['Events'][0]['event_id'],
+ ]
evt = EVENT_in['Events'][0]
data_dir = Path(evt['data_dir'])
@@ -65,24 +67,27 @@ def write_RV(EVENT_input_path):
for e_i, event in enumerate(event_list):
filename, sample_id, __ = event.split('x')
- if filename not in file_sample_dict.keys():
+ if filename not in file_sample_dict:
file_sample_dict.update({filename: [[], []]})
file_sample_dict[filename][0].append(e_i)
file_sample_dict[filename][1].append(int(sample_id))
- EDP_output = None
-
- for filename in file_sample_dict.keys():
+ EDP_output = None # noqa: N806
+ for filename in file_sample_dict:
# get the header
- header_data = np.genfromtxt(data_dir / filename, delimiter=',',
- names=None, max_rows=1, dtype=str,
- ndmin=1)
- header = header_data #.dtype.
-
- data = np.genfromtxt(data_dir / filename, delimiter=',',
- skip_header=1)
+ header_data = np.genfromtxt(
+ data_dir / filename,
+ delimiter=',',
+ names=None,
+ max_rows=1,
+ dtype=str,
+ ndmin=1,
+ )
+ header = header_data # .dtype.
+
+ data = np.genfromtxt(data_dir / filename, delimiter=',', skip_header=1)
# get the number of columns and reshape the data
col_count = len(header)
@@ -96,81 +101,86 @@ def write_RV(EVENT_input_path):
if EDP_output is None:
if len(samples.shape) > 1:
- EDP_output = np.zeros((len(event_list), samples.shape[1]))
+ EDP_output = np.zeros((len(event_list), samples.shape[1])) # noqa: N806
else:
- EDP_output = np.zeros(len(event_list))
+ EDP_output = np.zeros(len(event_list)) # noqa: N806
EDP_output[file_sample_dict[filename][0]] = samples
if len(EDP_output.shape) == 1:
- EDP_output = np.reshape(EDP_output, (EDP_output.shape[0], 1))
+ EDP_output = np.reshape(EDP_output, (EDP_output.shape[0], 1)) # noqa: N806
- EDP_output = EDP_output.T
+ EDP_output = EDP_output.T # noqa: N806
for c_i, col in enumerate(header):
f_i = f_scale.get(col.strip(), f_scale.get('ALL', None))
if f_i is None:
- raise ValueError(f"No units defined for {col}")
+ raise ValueError(f'No units defined for {col}') # noqa: EM102, TRY003
EDP_output[c_i] *= f_i
- EDP_output = EDP_output.T
+ EDP_output = EDP_output.T # noqa: N806
- index = np.reshape(np.arange(EDP_output.shape[0]), (EDP_output.shape[0],1))
+ index = np.reshape(np.arange(EDP_output.shape[0]), (EDP_output.shape[0], 1))
- EDP_output = np.concatenate([index, EDP_output], axis=1)
+ EDP_output = np.concatenate([index, EDP_output], axis=1) # noqa: N806
working_dir = Path(PurePath(EVENT_input_path).parent)
- #working_dir = posixpath.dirname(EVENT_input_path)
+ # working_dir = posixpath.dirname(EVENT_input_path)
# prepare the header
header_out = []
for h_label in header:
# remove leading and trailing whitespace
- h_label = h_label.strip()
+ h_label = h_label.strip() # noqa: PLW2901
# convert suffixes to the loc-dir format used by the SimCenter
- if h_label.endswith('_h'): # horizontal
+ if h_label.endswith('_h'): # horizontal
header_out.append(f'1-{h_label[:-2]}-1-1')
- elif h_label.endswith('_v'): # vertical
+ elif h_label.endswith('_v'): # vertical
header_out.append(f'1-{h_label[:-2]}-1-3')
- elif h_label.endswith('_x'): # x direction
+ elif h_label.endswith('_x'): # x direction
header_out.append(f'1-{h_label[:-2]}-1-1')
- elif h_label.endswith('_y'): # y direction
+ elif h_label.endswith('_y'): # y direction
header_out.append(f'1-{h_label[:-2]}-1-2')
- else: # if none of the above is given, default to 1-1
+ else: # if none of the above is given, default to 1-1
header_out.append(f'1-{h_label.strip()}-1-1')
- np.savetxt(working_dir / 'response.csv', EDP_output, delimiter=',',
- header=','+', '.join(header_out), comments='')
+ np.savetxt(
+ working_dir / 'response.csv',
+ EDP_output,
+ delimiter=',',
+ header=',' + ', '.join(header_out),
+ comments='',
+ )
-# TODO: consider removing this function
-# It is not used currently
-def create_EDP(EVENT_input_path, EDP_input_path):
+# TODO: consider removing this function # noqa: TD002
+# It is not used currently
+def create_EDP(EVENT_input_path, EDP_input_path): # noqa: N802, N803, D103
# load the EDP file
- with open(EDP_input_path, 'r', encoding="utf-8") as f:
- EDP_in = json.load(f)
+ with open(EDP_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EDP_in = json.load(f) # noqa: N806
# load the EVENT file
- with open(EVENT_input_path, 'r', encoding="utf-8") as f:
- EVENT_in = json.load(f)
+ with open(EVENT_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EVENT_in = json.load(f) # noqa: N806
# store the IM(s) in the EDP file
- for edp in EDP_in["EngineeringDemandParameters"][0]["responses"]:
- for im in EVENT_in["Events"]:
- if edp["type"] in im.keys():
- edp["scalar_data"] = [im[edp["type"]]]
+ for edp in EDP_in['EngineeringDemandParameters'][0]['responses']:
+ for im in EVENT_in['Events']:
+ if edp['type'] in im.keys(): # noqa: SIM118
+ edp['scalar_data'] = [im[edp['type']]]
- with open(EDP_input_path, 'w', encoding="utf-8") as f:
+ with open(EDP_input_path, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(EDP_in, f, indent=2)
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM', default=None)
@@ -183,4 +193,4 @@ def create_EDP(EVENT_input_path, EDP_input_path):
if args.getRV:
sys.exit(write_RV(args.filenameEVENT))
else:
- sys.exit(create_EDP(args.filenameEVENT, args.filenameEDP))
\ No newline at end of file
+ sys.exit(create_EDP(args.filenameEVENT, args.filenameEDP))
diff --git a/modules/performSIMULATION/customPy/customPySimulation.py b/modules/performSIMULATION/customPy/customPySimulation.py
index d7de2d4ab..ebc8f1edd 100644
--- a/modules/performSIMULATION/customPy/customPySimulation.py
+++ b/modules/performSIMULATION/customPy/customPySimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2022 Leland Stanford Junior University
# Copyright (c) 2022 The Regents of the University of California
#
@@ -38,116 +37,126 @@
# Adam Zsarnóczay
#
-import os, sys
-import argparse, json
-import importlib, shutil
-
+import argparse
+import importlib
+import json
+import os
+import shutil
+import sys
from pathlib import Path
# import the common constants and methods
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import *
+from simcenter_common import * # noqa: E402, F403
-convert_EDP = {
- 'max_abs_acceleration' : 'PFA',
- 'max_rel_disp' : 'PFD',
- 'max_drift' : 'PID',
+convert_EDP = { # noqa: N816
+ 'max_abs_acceleration': 'PFA',
+ 'max_rel_disp': 'PFD',
+ 'max_drift': 'PID',
'max_roof_drift': 'PRD',
'residual_drift': 'RID',
- 'residual_disp': 'RFD'
+ 'residual_disp': 'RFD',
}
-def write_RV():
+def write_RV(): # noqa: N802, D103
# create an empty SIM file
- SIM = {}
+ SIM = {} # noqa: N806
- with open('SIM.json', 'w', encoding="utf-8") as f:
+ with open('SIM.json', 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(SIM, f, indent=2)
- # TODO: check simulation data exists and contains all important fields
- # TODO: get simulation data & write to SIM file
+ # TODO: check simulation data exists and contains all important fields # noqa: TD002
+ # TODO: get simulation data & write to SIM file # noqa: TD002
-def run_simulation(EVENT_input_path, SAM_input_path, AIM_input_path,
- EDP_input_path):
+def run_simulation(EVENT_input_path, SAM_input_path, AIM_input_path, EDP_input_path): # noqa: C901, N803, D103
# these imports are here to save time when the app is called without
# the -getRV flag
- import sys
+ import sys # noqa: PLC0415
- log_msg('Startring simulation script...')
+ log_msg('Startring simulation script...') # noqa: F405
- working_dir = os.getcwd()
+ working_dir = os.getcwd() # noqa: PTH109
- sys.path.insert(0, os.getcwd())
+ sys.path.insert(0, os.getcwd()) # noqa: PTH109
# load the AIM file
- with open(AIM_input_path, 'r', encoding="utf-8") as f:
- AIM_in = json.load(f)
+ with open(AIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ AIM_in = json.load(f) # noqa: N806
# load the SAM file
- with open(SAM_input_path, 'r', encoding="utf-8") as f:
- SAM_in = json.load(f)
+ with open(SAM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ SAM_in = json.load(f) # noqa: N806
# load the event file
- with open(EVENT_input_path, 'r', encoding="utf-8") as f:
- EVENT_in = json.load(f)['Events'][0]
+ with open(EVENT_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EVENT_in = json.load(f)['Events'][0] # noqa: N806
# load the EDP file
- with open(EDP_input_path, 'r', encoding="utf-8") as f:
- EDP_in = json.load(f)
+ with open(EDP_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EDP_in = json.load(f) # noqa: N806
# KZ: commented out --> we're running at the current workdir
- #sys.path.insert(0, SAM_in['modelPath'])
- #os.chdir(SAM_in['modelPath'])
- #print(os.listdir(os.getcwd()))
- #print(os.getcwd())
+ # sys.path.insert(0, SAM_in['modelPath'])
+ # os.chdir(SAM_in['modelPath'])
+ # print(os.listdir(os.getcwd()))
+ # print(os.getcwd())
custom_script_path = SAM_in['mainScript']
# copy the custom scripts to the current directory if not yet
- if os.path.exists(custom_script_path):
+ if os.path.exists(custom_script_path): # noqa: PTH110
pass
else:
- custom_script_dir = SAM_in.get('modelPath',None)
+ custom_script_dir = SAM_in.get('modelPath', None)
if custom_script_dir is None:
- log_msg('No modelPath found in the SAM file.')
+ log_msg('No modelPath found in the SAM file.') # noqa: F405
else:
- shutil.copytree(custom_script_dir,os.getcwd(),dirs_exist_ok=True)
- log_msg('Custom scripts copied from {} to {}'.format(custom_script_dir,os.getcwd()))
+ shutil.copytree(custom_script_dir, os.getcwd(), dirs_exist_ok=True) # noqa: PTH109
+ log_msg( # noqa: F405
+ f'Custom scripts copied from {custom_script_dir} to {os.getcwd()}' # noqa: PTH109
+ )
custom_script = importlib.__import__(
- custom_script_path[:-3], globals(), locals(), ['custom_analysis',], 0)
+ custom_script_path[:-3],
+ globals(),
+ locals(),
+ [
+ 'custom_analysis',
+ ],
+ 0,
+ )
custom_analysis = custom_script.custom_analysis
# run the analysis
- EDP_res = custom_analysis(AIM=AIM_in, EVENT=EVENT_in, SAM=SAM_in, EDP=EDP_in)
+ EDP_res = custom_analysis(AIM=AIM_in, EVENT=EVENT_in, SAM=SAM_in, EDP=EDP_in) # noqa: N806
os.chdir(working_dir)
- results_txt = ""
+ results_txt = ''
- EDP_list = EDP_in['EngineeringDemandParameters'][0]['responses']
+ EDP_list = EDP_in['EngineeringDemandParameters'][0]['responses'] # noqa: N806
# KZ: rewriting the parsing step of EDP_res to EDP_list
- for response in EDP_list:
- print('response = ', response)
+ for response in EDP_list: # noqa: PLR1702
+ print('response = ', response) # noqa: T201
response['scalar_data'] = []
try:
val = EDP_res.get(response['type'], None)
- print('val = ', val)
+ print('val = ', val) # noqa: T201
if val is None:
# try conversion
edp_name = convert_EDP.get(response['type'], None)
- print('edp_name = ', edp_name)
+ print('edp_name = ', edp_name) # noqa: T201
if edp_name is not None:
if 'PID' in edp_name:
cur_floor = response['floor2']
- dofs = response.get('dofs',[])
+ dofs = response.get('dofs', [])
elif 'PRD' in edp_name:
cur_floor = response['floor2']
dofs = response['dofs']
@@ -155,12 +164,14 @@ def run_simulation(EVENT_input_path, SAM_input_path, AIM_input_path,
cur_floor = response['floor']
dofs = response['dofs']
if len(dofs) == 0:
- dofs = [1, 2] #default is bidirection
+ dofs = [1, 2] # default is bidirection
response['dofs'] = dofs
- print('dofs = ', dofs)
+ print('dofs = ', dofs) # noqa: T201
for cur_dof in dofs:
- key_name = '1-'+edp_name+'-{}-{}'.format(int(cur_floor), int(cur_dof))
- print('key_name = ', key_name)
+ key_name = (
+ '1-' + edp_name + f'-{int(cur_floor)}-{int(cur_dof)}'
+ )
+ print('key_name = ', key_name) # noqa: T201
res = EDP_res.get(key_name, None)
if res is None:
response['scalar_data'].append('NaN')
@@ -168,27 +179,27 @@ def run_simulation(EVENT_input_path, SAM_input_path, AIM_input_path,
else:
response['scalar_data'].append(float(EDP_res[key_name]))
results_txt += str(float(EDP_res[key_name])) + ' '
- print('response = ', response)
+ print('response = ', response) # noqa: T201
else:
response['scalar_data'] = ['NaN']
results_txt += 'NaN '
else:
response['scalar_data'] = [float(val)]
results_txt += str(float(EDP_res[response['type']])) + ' '
- except:
+ except: # noqa: E722
response['scalar_data'] = ['NaN']
results_txt += 'NaN '
- #edp = EDP_res[response['type']][response['id']]
- #print(edp)
+ # edp = EDP_res[response['type']][response['id']]
+ # print(edp)
- #response['scalar_data'] = edp # [val for dof, val in edp.items()]
- #print(response)
+ # response['scalar_data'] = edp # [val for dof, val in edp.items()]
+ # print(response)
results_txt = results_txt[:-1]
- with open(EDP_input_path, 'w', encoding="utf-8") as f:
+ with open(EDP_input_path, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(EDP_in, f, indent=2)
- with open('results.out', 'w', encoding="utf-8") as f:
+ with open('results.out', 'w', encoding='utf-8') as f: # noqa: FURB103, PTH123
f.write(results_txt)
"""
@@ -293,29 +304,29 @@ def run_simulation(EVENT_input_path, SAM_input_path, AIM_input_path,
json.dump(EDP_in, f, indent=2)
"""
- log_msg('Simulation script finished.')
+ log_msg('Simulation script finished.') # noqa: F405
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument('--filenameAIM',
- default=None)
+ parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM')
parser.add_argument('--filenameEVENT')
- parser.add_argument('--filenameEDP',
- default=None)
- parser.add_argument('--filenameSIM',
- default=None)
- parser.add_argument('--getRV',
- default=False,
- nargs='?', const=True)
+ parser.add_argument('--filenameEDP', default=None)
+ parser.add_argument('--filenameSIM', default=None)
+ parser.add_argument('--getRV', default=False, nargs='?', const=True)
args = parser.parse_args()
if args.getRV:
sys.exit(write_RV())
else:
- sys.exit(run_simulation(
- args.filenameEVENT, args.filenameSAM, args.filenameAIM,
- args.filenameEDP))
+ sys.exit(
+ run_simulation(
+ args.filenameEVENT,
+ args.filenameSAM,
+ args.filenameAIM,
+ args.filenameEDP,
+ )
+ )
diff --git a/modules/performSIMULATION/openSees/OpenSeesPreprocessor.cpp b/modules/performSIMULATION/openSees/OpenSeesPreprocessor.cpp
index 8f720a3a4..8d0e2a088 100644
--- a/modules/performSIMULATION/openSees/OpenSeesPreprocessor.cpp
+++ b/modules/performSIMULATION/openSees/OpenSeesPreprocessor.cpp
@@ -46,7 +46,7 @@ OpenSeesPreprocessor::writeRV(const char *AIM,
//
// TO DO .. check simulation data exists and contains all fields
- // .. would stop dakota from runnning
+ // .. would stop dakota from running
//
//
@@ -981,7 +981,7 @@ OpenSeesPreprocessor::processEvents(ofstream &s){
}
-// seperate for multi events
+// separate for multi events
int
OpenSeesPreprocessor::processEvent(ofstream &s,
json_t *event,
diff --git a/modules/performSIMULATION/openSees/OpenSeesSimulation.py b/modules/performSIMULATION/openSees/OpenSeesSimulation.py
index 33a86a404..c89a0bf1c 100644
--- a/modules/performSIMULATION/openSees/OpenSeesSimulation.py
+++ b/modules/performSIMULATION/openSees/OpenSeesSimulation.py
@@ -1,50 +1,54 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3 # noqa: CPY001, D100, EXE001
-import sys
import os
-import subprocess
-#from pathlib import Path
+import subprocess # noqa: S404
+import sys
-def main(args):
+# from pathlib import Path
+
+def main(args): # noqa: D103
# set filenames
- aimName = args[1]
- samName = args[3]
- evtName = args[5]
- edpName = args[7]
- simName = args[9]
+ aimName = args[1] # noqa: N806
+ samName = args[3] # noqa: N806
+ evtName = args[5] # noqa: N806
+ edpName = args[7] # noqa: N806
+ simName = args[9] # noqa: N806
# remove path to AIM file, so recorders are not messed up
# .. AIM file ro be read is in current dir (copy elsewhere)
- aimName = os.path.basename(aimName)
- scriptDir = os.path.dirname(os.path.realpath(__file__))
+ aimName = os.path.basename(aimName) # noqa: PTH119, N806
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
# aimName = Path(args[1]).name
# scriptDir = Path(__file__).resolve().parent
- #If requesting random variables run getUncertainty
- #Otherwise, Run Opensees
- if "--getRV" in args:
- getUncertaintyCommand = '"{}/OpenSeesPreprocessor" {} {} {} {}'.format(scriptDir, aimName, samName, evtName, simName)
- exit_code = subprocess.Popen(getUncertaintyCommand, shell=True).wait()
- #exit_code = subprocess.run(getUncertaintyCommand, shell=True).returncode
- #if not exit_code==0:
+ # If requesting random variables run getUncertainty
+ # Otherwise, Run Opensees
+ if '--getRV' in args:
+ getUncertaintyCommand = f'"{scriptDir}/OpenSeesPreprocessor" {aimName} {samName} {evtName} {simName}' # noqa: N806
+ exit_code = subprocess.Popen(getUncertaintyCommand, shell=True).wait() # noqa: S602
+ # exit_code = subprocess.run(getUncertaintyCommand, shell=True).returncode
+ # if not exit_code==0:
# exit(exit_code)
else:
- #Run preprocessor
- preprocessorCommand = '"{}/OpenSeesPreprocessor" {} {} {} {} {} example.tcl'.format(scriptDir, aimName, samName, evtName, edpName, simName)
- exit_code = subprocess.Popen(preprocessorCommand, shell=True).wait()
+ # Run preprocessor
+ preprocessorCommand = f'"{scriptDir}/OpenSeesPreprocessor" {aimName} {samName} {evtName} {edpName} {simName} example.tcl' # noqa: N806
+ exit_code = subprocess.Popen(preprocessorCommand, shell=True).wait() # noqa: S602
# exit_code = subprocess.run(preprocessorCommand, shell=True).returncode # Maybe better for compatibility - jb
- #if not exit_code==0:
+ # if not exit_code==0:
# exit(exit_code)
- #Run OpenSees
- exit_code = subprocess.Popen("OpenSees example.tcl >> workflow.err 2>&1", shell=True).wait()
+ # Run OpenSees
+ exit_code = subprocess.Popen( # noqa: S602
+ 'OpenSees example.tcl >> workflow.err 2>&1', # noqa: S607
+ shell=True,
+ ).wait()
# Maybe better for compatibility, need to doublecheck - jb
- #exit_code = subprocess.run("OpenSees example.tcl >> workflow.err 2>&1", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).returncode
-
- #if os.path.isfile("./workflow.err"):
- # with open("./workflow.err", 'r') as file:
+ # exit_code = subprocess.run("OpenSees example.tcl >> workflow.err 2>&1", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).returncode
+
+ # if os.path.isfile("./workflow.err"):
+ # with open("./workflow.err", 'r') as file:
# lines = file.readlines()
# # Iterate through each line
# for line in lines:
@@ -53,14 +57,13 @@ def main(args):
# exit_code = -1
# exit(exit_code)
- #Run postprocessor
- postprocessorCommand = '"{}/OpenSeesPostprocessor" {} {} {} {}'.format(scriptDir, aimName, samName, evtName, edpName)
- exit_code = subprocess.Popen(postprocessorCommand, shell=True).wait()
+ # Run postprocessor
+ postprocessorCommand = f'"{scriptDir}/OpenSeesPostprocessor" {aimName} {samName} {evtName} {edpName}' # noqa: N806
+ exit_code = subprocess.Popen(postprocessorCommand, shell=True).wait() # noqa: S602, F841
# exit_code = subprocess.run(postprocessorCommand, shell=True).returncode # Maybe better for compatibility - jb
# if not exit_code==0:
# exit(exit_code)
if __name__ == '__main__':
-
main(sys.argv[1:])
diff --git a/modules/performSIMULATION/openSeesPy/OpenSeesPySimulation.py b/modules/performSIMULATION/openSeesPy/OpenSeesPySimulation.py
index 2884963b7..0995cc299 100644
--- a/modules/performSIMULATION/openSeesPy/OpenSeesPySimulation.py
+++ b/modules/performSIMULATION/openSeesPy/OpenSeesPySimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,57 +38,58 @@
# Joanna J. Zou
#
-import os, sys
-import argparse, json
+import argparse
import importlib
-
+import json
+import os
+import sys
from pathlib import Path
# import the common constants and methods
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import *
+from simcenter_common import * # noqa: E402, F403
-convert_EDP = {
- 'max_abs_acceleration' : 'PFA',
- 'max_rel_disp' : 'PFD',
- 'max_drift' : 'PID',
+convert_EDP = { # noqa: N816
+ 'max_abs_acceleration': 'PFA',
+ 'max_rel_disp': 'PFD',
+ 'max_drift': 'PID',
'max_roof_drift': 'PRD',
'residual_drift': 'RID',
- 'residual_disp': 'RFD'
+ 'residual_disp': 'RFD',
}
-def write_RV():
+def write_RV(): # noqa: N802, D103
pass
- # TODO: check simulation data exists and contains all important fields
- # TODO: get simulation data & write to SIM file
+ # TODO: check simulation data exists and contains all important fields # noqa: TD002
+ # TODO: get simulation data & write to SIM file # noqa: TD002
-def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path,
- EDP_input_path):
+def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path, EDP_input_path): # noqa: C901, N802, N803, D103
# these imports are here to save time when the app is called without
# the -getRV flag
- import sys
- import numpy as np
- import openseespy.opensees as ops
+ import sys # noqa: PLC0415
+
+ import numpy as np # noqa: PLC0415
+ import openseespy.opensees as ops # noqa: PLC0415
- log_msg('Startring simulation script...')
+ log_msg('Startring simulation script...') # noqa: F405
- sys.path.insert(0, os.getcwd())
+ sys.path.insert(0, os.getcwd()) # noqa: PTH109
# load the model builder script
- with open(BIM_input_path, 'r', encoding="utf-8") as f:
- BIM_in = json.load(f)
+ with open(BIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ BIM_in = json.load(f) # noqa: N806
model_params = BIM_in['GeneralInformation']
- with open(SAM_input_path, 'r', encoding="utf-8") as f:
- SAM_in = json.load(f)
+ with open(SAM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ SAM_in = json.load(f) # noqa: N806
sys.path.insert(0, SAM_in['modelPath'])
@@ -97,11 +97,22 @@ def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path,
dof_map = [int(dof) for dof in SAM_in['dofMap'].split(',')]
- node_map = dict([(int(entry['floor']), int(entry['node']))
- for entry in SAM_in['NodeMapping']])
+ node_map = dict( # noqa: C404
+ [
+ (int(entry['floor']), int(entry['node']))
+ for entry in SAM_in['NodeMapping']
+ ]
+ )
model_script = importlib.__import__(
- model_script_path[:-3], globals(), locals(), ['build_model',], 0)
+ model_script_path[:-3],
+ globals(),
+ locals(),
+ [
+ 'build_model',
+ ],
+ 0,
+ )
build_model = model_script.build_model
ops.wipe()
@@ -110,31 +121,61 @@ def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path,
build_model(model_params=model_params)
# load the event file
- with open(EVENT_input_path, 'r', encoding="utf-8") as f:
- EVENT_in = json.load(f)['Events'][0]
+ with open(EVENT_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EVENT_in = json.load(f)['Events'][0] # noqa: N806
event_list = EVENT_in['timeSeries']
pattern_list = EVENT_in['pattern']
- # TODO: use dictionary
+ # TODO: use dictionary # noqa: TD002
pattern_ts_link = [p['timeSeries'] for p in pattern_list]
- TS_list = []
+ TS_list = [] # noqa: N806
# define the time series
for evt_i, event in enumerate(event_list):
-
- ops.timeSeries('Path', evt_i+2, '-dt', event['dT'], '-factor', 1.0,
- '-values', *event['data'], '-prependZero')
+ ops.timeSeries(
+ 'Path',
+ evt_i + 2,
+ '-dt',
+ event['dT'],
+ '-factor',
+ 1.0,
+ '-values',
+ *event['data'],
+ '-prependZero',
+ )
pat = pattern_list[pattern_ts_link.index(event['name'])]
- ops.pattern('UniformExcitation', evt_i+2, dof_map[pat['dof']-1], '-accel', evt_i+2)
-
- TS_list.append(list(np.array([0.,] + event['data'])))
+ ops.pattern(
+ 'UniformExcitation',
+ evt_i + 2,
+ dof_map[pat['dof'] - 1],
+ '-accel',
+ evt_i + 2,
+ )
+
+ TS_list.append(
+ list(
+ np.array(
+ [
+ 0.0,
+ ]
+ + event['data']
+ )
+ )
+ )
# load the analysis script
analysis_script = importlib.__import__(
- model_script_path[:-3], globals(), locals(), ['run_analysis',], 0)
+ model_script_path[:-3],
+ globals(),
+ locals(),
+ [
+ 'run_analysis',
+ ],
+ 0,
+ )
run_analysis = analysis_script.run_analysis
recorder_nodes = SAM_in['recorderNodes']
@@ -142,33 +183,39 @@ def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path,
# create the EDP specification
# load the EDP file
- with open(EDP_input_path, 'r', encoding="utf-8") as f:
- EDP_in = json.load(f)
+ with open(EDP_input_path, encoding='utf-8') as f: # noqa: PTH123
+ EDP_in = json.load(f) # noqa: N806
- EDP_list = EDP_in['EngineeringDemandParameters'][0]['responses']
+ EDP_list = EDP_in['EngineeringDemandParameters'][0]['responses'] # noqa: N806
edp_specs = {}
for response in EDP_list:
-
if response['type'] in list(convert_EDP.keys()):
response['type'] = convert_EDP[response['type']]
- if response['type'] not in edp_specs.keys():
+ if response['type'] not in edp_specs:
edp_specs.update({response['type']: {}})
if 'node' in list(response.keys()):
-
if response.get('id', None) is None:
response.update({'id': 0})
- edp_specs[response['type']].update({
- response['id']: dict([(dof, list(np.atleast_1d(response['node'])))
- for dof in response['dofs']])})
+ edp_specs[response['type']].update(
+ {
+ response['id']: dict( # noqa: C404
+ [
+ (dof, list(np.atleast_1d(response['node'])))
+ for dof in response['dofs']
+ ]
+ )
+ }
+ )
else:
-
if response.get('floor', None) is not None:
floor = int(response['floor'])
- node_list = [node_map[floor],]
+ node_list = [
+ node_map[floor],
+ ]
else:
floor = int(response['floor2'])
floor1 = int(response['floor1'])
@@ -177,57 +224,64 @@ def run_openseesPy(EVENT_input_path, SAM_input_path, BIM_input_path,
if response.get('id', None) is None:
response.update({'id': floor})
if floor is not None:
- edp_specs[response['type']].update({
- response['id']: dict([(dof, node_list)
- for dof in response['dofs']])})
-
- #for edp_name, edp_data in edp_specs.items():
+ edp_specs[response['type']].update(
+ {
+ response['id']: dict( # noqa: C404
+ [(dof, node_list) for dof in response['dofs']]
+ )
+ }
+ )
+
+ # for edp_name, edp_data in edp_specs.items():
# print(edp_name, edp_data)
# run the analysis
- # TODO: default analysis script
- EDP_res = run_analysis(GM_dt = EVENT_in['dT'],
+ # TODO: default analysis script # noqa: TD002
+ EDP_res = run_analysis( # noqa: N806
+ GM_dt=EVENT_in['dT'],
GM_npts=EVENT_in['numSteps'],
- TS_List = TS_list, EDP_specs = edp_specs,
- model_params = model_params)
+ TS_List=TS_list,
+ EDP_specs=edp_specs,
+ model_params=model_params,
+ )
# save the EDP results
- #print(EDP_res)
+ # print(EDP_res)
for response in EDP_list:
edp = EDP_res[response['type']][response['id']]
- #print(edp)
+ # print(edp)
- response['scalar_data'] = edp # [val for dof, val in edp.items()]
- #print(response)
+ response['scalar_data'] = edp # [val for dof, val in edp.items()]
+ # print(response)
- with open(EDP_input_path, 'w', encoding="utf-8") as f:
+ with open(EDP_input_path, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(EDP_in, f, indent=2)
- log_msg('Simulation script finished.')
+ log_msg('Simulation script finished.') # noqa: F405
-if __name__ == '__main__':
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument('--filenameAIM',
- default=None)
+ parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM')
parser.add_argument('--filenameEVENT')
- parser.add_argument('--filenameEDP',
- default=None)
- parser.add_argument('--filenameSIM',
- default=None)
- parser.add_argument('--getRV',
- default=False,
- nargs='?', const=True)
+ parser.add_argument('--filenameEDP', default=None)
+ parser.add_argument('--filenameSIM', default=None)
+ parser.add_argument('--getRV', default=False, nargs='?', const=True)
args = parser.parse_args()
if args.getRV:
sys.exit(write_RV())
else:
- sys.exit(run_openseesPy(
- args.filenameEVENT, args.filenameSAM, args.filenameAIM,
- args.filenameEDP))
+ sys.exit(
+ run_openseesPy(
+ args.filenameEVENT,
+ args.filenameSAM,
+ args.filenameAIM,
+ args.filenameEDP,
+ )
+ )
diff --git a/modules/performSIMULATION/openSees_R/OpenSeesConcreteShearWalls.cpp b/modules/performSIMULATION/openSees_R/OpenSeesConcreteShearWalls.cpp
index f0bde7483..aa7134eb7 100644
--- a/modules/performSIMULATION/openSees_R/OpenSeesConcreteShearWalls.cpp
+++ b/modules/performSIMULATION/openSees_R/OpenSeesConcreteShearWalls.cpp
@@ -1030,7 +1030,7 @@ int OpenSeesConcreteShearWalls::processEvents(ofstream &s)
return 0;
}
-// seperate for multi events
+// separate for multi events
int OpenSeesConcreteShearWalls::processEvent(ofstream &s,
json_t *event,
int &numPattern,
@@ -1040,7 +1040,7 @@ int OpenSeesConcreteShearWalls::processEvent(ofstream &s,
json_t *pattern;
const char *eventType = json_string_value(json_object_get(event, "type"));
- if (strcmp(eventType, "Seismic") == 0 || strcmp(eventType, "Cyclic") == 0) // TODO: seperate Seismic with static cyclic
+ if (strcmp(eventType, "Seismic") == 0 || strcmp(eventType, "Cyclic") == 0) // TODO: separate Seismic with static cyclic
{
analysisType = 1;
numSteps = json_integer_value(json_object_get(event, "numSteps"));
diff --git a/modules/performSIMULATION/openSees_R/OpenSeesPreprocessor.cpp b/modules/performSIMULATION/openSees_R/OpenSeesPreprocessor.cpp
index ee13ae9d5..50bef72a8 100644
--- a/modules/performSIMULATION/openSees_R/OpenSeesPreprocessor.cpp
+++ b/modules/performSIMULATION/openSees_R/OpenSeesPreprocessor.cpp
@@ -579,7 +579,7 @@ OpenSeesPreprocessor::processEvents(ofstream &s){
}
-// seperate for multi events
+// separate for multi events
int
OpenSeesPreprocessor::processEvent(ofstream &s,
json_t *event,
diff --git a/modules/performSIMULATION/openSees_R/OpenSeesSimulation.py b/modules/performSIMULATION/openSees_R/OpenSeesSimulation.py
index 9e0ab1131..5b132f84f 100644
--- a/modules/performSIMULATION/openSees_R/OpenSeesSimulation.py
+++ b/modules/performSIMULATION/openSees_R/OpenSeesSimulation.py
@@ -1,31 +1,35 @@
+import os # noqa: CPY001, D100, INP001
+import subprocess # noqa: S404
import sys
-import os
-import subprocess
-inputArgs = sys.argv
+inputArgs = sys.argv # noqa: N816
# set filenames
-bimName = sys.argv[2]
-samName = sys.argv[4]
-evtName = sys.argv[6]
-edpName = sys.argv[8]
-simName = sys.argv[10]
+bimName = sys.argv[2] # noqa: N816
+samName = sys.argv[4] # noqa: N816
+evtName = sys.argv[6] # noqa: N816
+edpName = sys.argv[8] # noqa: N816
+simName = sys.argv[10] # noqa: N816
-scriptDir = os.path.dirname(os.path.realpath(__file__))
+scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
-#If requesting random variables run getUncertainty
-#Otherwise, Run Opensees
-if (("-getRV" in inputArgs) or ("--getRV" in inputArgs)):
- getUncertaintyCommand = '"{}/getUncertainty" {} {} {} {}'.format(scriptDir, bimName, samName, evtName, simName)
+# If requesting random variables run getUncertainty
+# Otherwise, Run Opensees
+if ('-getRV' in inputArgs) or ('--getRV' in inputArgs):
+ getUncertaintyCommand = ( # noqa: N816
+ f'"{scriptDir}/getUncertainty" {bimName} {samName} {evtName} {simName}'
+ )
subprocess.Popen(args=getUncertaintyCommand, shell=True).wait()
else:
- #Run preprocessor
- preprocessorCommand = u'"{}/mainPreprocessor" {} {} {} {} example.tcl'.format(scriptDir, bimName, samName, evtName, edpName)
- subprocess.Popen(preprocessorCommand, shell=True).wait()
+ # Run preprocessor
+ preprocessorCommand = f'"{scriptDir}/mainPreprocessor" {bimName} {samName} {evtName} {edpName} example.tcl' # noqa: N816
+ subprocess.Popen(preprocessorCommand, shell=True).wait() # noqa: S602
- #Run OpenSees
- subprocess.Popen("OpenSees example.tcl", shell=True).wait()
+ # Run OpenSees
+ subprocess.Popen('OpenSees example.tcl', shell=True).wait() # noqa: S602, S607
- #Run postprocessor
- postprocessorCommand = u'"{}/mainPostprocessor" {} {} {} {}'.format(scriptDir, bimName, samName, evtName, edpName)
- subprocess.Popen(postprocessorCommand, shell=True).wait()
\ No newline at end of file
+ # Run postprocessor
+ postprocessorCommand = ( # noqa: N816
+ f'"{scriptDir}/mainPostprocessor" {bimName} {samName} {evtName} {edpName}'
+ )
+ subprocess.Popen(postprocessorCommand, shell=True).wait() # noqa: S602
diff --git a/modules/performSIMULATION/surrogateRegionalPy/SurrogateRegionalPy.py b/modules/performSIMULATION/surrogateRegionalPy/SurrogateRegionalPy.py
index 6f982510b..d40023a6a 100644
--- a/modules/performSIMULATION/surrogateRegionalPy/SurrogateRegionalPy.py
+++ b/modules/performSIMULATION/surrogateRegionalPy/SurrogateRegionalPy.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -34,7 +33,7 @@
# You should have received a copy of the BSD 3-Clause License along with
# this file. If not, see .
#
-# Contributors:
+# Contributors:
# Sang-ri
#
@@ -44,245 +43,330 @@
#
-import sys
-import os
-import subprocess
-import importlib
import argparse
+import importlib
import json
-from pathlib import Path
+import os
+import sys
-def main(aimName,samName, evtName,
- edpName, simName, getRV):
-
+
+def main(aimName, samName, evtName, edpName, simName, getRV): # noqa: N803, D103
#
# Find the GI and SAM files
#
- with open(aimName, 'r', encoding='utf-8') as f:
- root_AIM = json.load(f)
- GI = root_AIM['GeneralInformation']
+ with open(aimName, encoding='utf-8') as f: # noqa: PTH123
+ root_AIM = json.load(f) # noqa: N806
+ GI = root_AIM['GeneralInformation'] # noqa: N806
- with open(samName, 'r', encoding='utf-8') as f:
- SAM = json.load(f)
+ with open(samName, encoding='utf-8') as f: # noqa: PTH123
+ SAM = json.load(f) # noqa: N806
#
# Get user-uploaded filter script
#
# sy - so far works only for single model
- filterFileName = root_AIM['Simulation']['filterFileName']
- filateFilePath = root_AIM['Simulation']['filterFilePath']
+ filterFileName = root_AIM['Simulation']['filterFileName'] # noqa: N806
+ filateFilePath = root_AIM['Simulation']['filterFilePath'] # noqa: N806
sys.path.insert(0, filateFilePath)
- analysis_script = importlib.__import__(filterFileName[:-3], globals(), locals(), ['model_distributor',], 0)
+ analysis_script = importlib.__import__(
+ filterFileName[:-3],
+ globals(),
+ locals(),
+ [
+ 'model_distributor',
+ ],
+ 0,
+ )
model_distributor = analysis_script.model_distributor
- modelName = model_distributor(GI,SAM)
+ modelName = model_distributor(GI, SAM) # noqa: N806
if getRV:
- runDefault(root_AIM, aimName,samName, evtName, edpName, simName, getRV)
+ runDefault(root_AIM, aimName, samName, evtName, edpName, simName, getRV)
return
#
# Parse filter file
#
- if modelName.lower() == "none":
- pass
- elif modelName.lower() =="error":
+ if modelName.lower() == 'none' or modelName.lower() == 'error':
pass
- elif modelName.lower() =="default":
- runDefault(root_AIM, aimName,samName, evtName, edpName, simName)
+ elif modelName.lower() == 'default':
+ runDefault(root_AIM, aimName, samName, evtName, edpName, simName)
else:
runSurrogate(modelName, GI, SAM, root_AIM, aimName, edpName)
-def runDefault(root_AIM, aimName,samName, evtName, edpName, simName, getRV=False):
-
- #
- # Find app name
- #
- mySimAppName = root_AIM['Simulation']['DefaultAnalysis']['Buildings']['Application']
-
- #
- # overwrite with default AIM.json file
- #
- root_AIM['Simulation'] = root_AIM['Simulation']['DefaultAnalysis']['Buildings']
-
- currentDir = os.getcwd()
- newAimName = os.path.join(currentDir,os.path.basename(aimName))
-
- with open(newAimName, 'w', encoding='utf-8') as f:
- json_object = json.dumps(root_AIM)
- f.write(json_object)
- #
- # overwrite with default AIM.json file
- #
- s=[os.path.dirname( __file__ ),'..','..','Workflow','WorkflowApplications.json']
- workflowAppJsonPath = os.path.join(*s)
- with open(workflowAppJsonPath, 'r', encoding='utf-8') as f:
- workflowAppDict = json.load(f)
- appList = workflowAppDict["SimulationApplications"]["Applications"]
- myApp = next(item for item in appList if item["Name"] == mySimAppName)
- s = [os.path.dirname( __file__ ),'..','..','..', os.path.dirname(myApp["ExecutablePath"])]
- mySimAppPath = os.path.join(*s)
- mySimAppName = os.path.basename(myApp["ExecutablePath"])
-
- #
- # run correct backend app
- #
- # print(newAimName)
- sys.path.insert(0, mySimAppPath)
- sim_module = importlib.__import__(mySimAppName[:-3], globals(), locals(), ['main'], 0)
-
- if getRV:
- sim_module.main(["--filenameAIM", newAimName, "--filenameSAM", samName, "--filenameEVENT", evtName, "--filenameEDP", edpName, "--filenameSIM", simName, "--getRV"])
-
- else:
- sim_module.main(["--filenameAIM", newAimName, "--filenameSAM", samName, "--filenameEVENT", evtName, "--filenameEDP", edpName, "--filenameSIM", simName])
-
- return
-def runSurrogate(modelName, GI, SAM, root_AIM, aimName, edpName):
+def runDefault(root_AIM, aimName, samName, evtName, edpName, simName, getRV=False): # noqa: FBT002, N802, N803, D103
+ #
+ # Find app name
+ #
+ mySimAppName = root_AIM['Simulation']['DefaultAnalysis']['Buildings'][ # noqa: N806
+ 'Application'
+ ]
- #
- # Augment to params.in file
- #
+ #
+ # overwrite with default AIM.json file
+ #
+ root_AIM['Simulation'] = root_AIM['Simulation']['DefaultAnalysis']['Buildings']
- GIkeys = ["Latitude","Longitude","NumberOfStories","YearBuilt","OccupancyClass","StructureType","PlanArea","ReplacementCost"]
- SAMkeys_properties = ["dampingRatio","K0","Sy","eta","C","gamma","alpha","beta","omega","eta_soft","a_k"]
- SAMkeys_nodes = ["mass"]
+ currentDir = os.getcwd() # noqa: PTH109, N806
+ newAimName = os.path.join(currentDir, os.path.basename(aimName)) # noqa: PTH118, PTH119, N806
- with open('params.in', 'r') as f:
- paramsStr = f.read()
- nAddParams =0
+ with open(newAimName, 'w', encoding='utf-8') as f: # noqa: FURB103, PTH123
+ json_object = json.dumps(root_AIM)
+ f.write(json_object)
+ #
+ # overwrite with default AIM.json file
+ #
+ s = [
+ os.path.dirname(__file__), # noqa: PTH120
+ '..',
+ '..',
+ 'Workflow',
+ 'WorkflowApplications.json',
+ ]
+ workflowAppJsonPath = os.path.join(*s) # noqa: PTH118, N806
+ with open(workflowAppJsonPath, encoding='utf-8') as f: # noqa: PTH123
+ workflowAppDict = json.load(f) # noqa: N806
+ appList = workflowAppDict['SimulationApplications']['Applications'] # noqa: N806
+ myApp = next(item for item in appList if item['Name'] == mySimAppName) # noqa: N806
+ s = [
+ os.path.dirname(__file__), # noqa: PTH120
+ '..',
+ '..',
+ '..',
+ os.path.dirname(myApp['ExecutablePath']), # noqa: PTH120
+ ]
+ mySimAppPath = os.path.join(*s) # noqa: PTH118, N806
+ mySimAppName = os.path.basename(myApp['ExecutablePath']) # noqa: PTH119, N806
- for key in GI:
- if key in GIkeys:
- val = GI[key]
- if not isinstance(val, str):
- paramsStr += "{} {}\n".format(key, val)
- else:
- paramsStr += "{} \"{}\"\n".format(key, val)
- nAddParams +=1
+ #
+ # run correct backend app
+ #
+ # print(newAimName)
+ sys.path.insert(0, mySimAppPath)
+ sim_module = importlib.__import__(
+ mySimAppName[:-3], globals(), locals(), ['main'], 0
+ )
- # For damping
- for key in SAM["Properties"]:
+ if getRV:
+ sim_module.main(
+ [
+ '--filenameAIM',
+ newAimName,
+ '--filenameSAM',
+ samName,
+ '--filenameEVENT',
+ evtName,
+ '--filenameEDP',
+ edpName,
+ '--filenameSIM',
+ simName,
+ '--getRV',
+ ]
+ )
+
+ else:
+ sim_module.main(
+ [
+ '--filenameAIM',
+ newAimName,
+ '--filenameSAM',
+ samName,
+ '--filenameEVENT',
+ evtName,
+ '--filenameEDP',
+ edpName,
+ '--filenameSIM',
+ simName,
+ ]
+ )
+
+
+def runSurrogate(modelName, GI, SAM, root_AIM, aimName, edpName): # noqa: C901, N802, N803, D103
+ #
+ # Augment to params.in file
+ #
+
+ GIkeys = [ # noqa: N806
+ 'Latitude',
+ 'Longitude',
+ 'NumberOfStories',
+ 'YearBuilt',
+ 'OccupancyClass',
+ 'StructureType',
+ 'PlanArea',
+ 'ReplacementCost',
+ ]
+ SAMkeys_properties = [ # noqa: N806
+ 'dampingRatio',
+ 'K0',
+ 'Sy',
+ 'eta',
+ 'C',
+ 'gamma',
+ 'alpha',
+ 'beta',
+ 'omega',
+ 'eta_soft',
+ 'a_k',
+ ]
+ SAMkeys_nodes = ['mass'] # noqa: N806
+
+ with open('params.in') as f: # noqa: FURB101, PLW1514, PTH123
+ paramsStr = f.read() # noqa: N806
+ nAddParams = 0 # noqa: N806
+
+ for key in GI:
+ if key in GIkeys:
+ val = GI[key]
+ if not isinstance(val, str):
+ paramsStr += f'{key} {val}\n' # noqa: N806
+ else:
+ paramsStr += f'{key} "{val}"\n' # noqa: N806
+ nAddParams += 1 # noqa: N806
+
+ # For damping
+ for key in SAM['Properties']:
+ if key in SAMkeys_properties:
+ val = SAM['Properties'][key]
+ if not isinstance(val, str):
+ paramsStr += f'{key} {val}\n' # noqa: N806
+ else:
+ paramsStr += f'{key} "{val}"\n' # noqa: N806
+ nAddParams += 1 # noqa: N806
+
+ # For material properties
+ for SAM_elem in SAM['Properties']['uniaxialMaterials']: # noqa: N806
+ for key in SAM_elem:
if key in SAMkeys_properties:
- val = SAM["Properties"][key]
+ val = SAM_elem[key]
+ if not isinstance(val, str):
+ paramsStr += '{}-{} {}\n'.format(key, SAM_elem['name'], val) # noqa: N806
+ else:
+ paramsStr += '{}-{} "{}"\n'.format(key, SAM_elem['name'], val) # noqa: N806
+ nAddParams += 1 # noqa: N806
+
+ # For mass
+ for SAM_node in SAM['Geometry']['nodes']: # noqa: N806
+ for key in SAM_node:
+ if key in SAMkeys_nodes:
+ val = SAM_node[key]
if not isinstance(val, str):
- paramsStr += "{} {}\n".format(key, val)
+ paramsStr += '{}-{} {}\n'.format(key, SAM_node['name'], val) # noqa: N806
else:
- paramsStr += "{} \"{}\"\n".format(key, val)
- nAddParams +=1
-
- # For material properties
- for SAM_elem in SAM["Properties"]["uniaxialMaterials"]:
- for key in SAM_elem:
- if key in SAMkeys_properties:
- val = SAM_elem[key]
- if not isinstance(val, str):
- paramsStr += "{}-{} {}\n".format(key, SAM_elem["name"],val)
- else:
- paramsStr += "{}-{} \"{}\"\n".format(key, SAM_elem["name"],val)
- nAddParams +=1
-
- # For mass
- for SAM_node in SAM["Geometry"]["nodes"]:
- for key in SAM_node:
- if key in SAMkeys_nodes:
- val = SAM_node[key]
- if not isinstance(val, str):
- paramsStr += "{}-{} {}\n".format(key, SAM_node["name"], val)
- else:
- paramsStr += "{}-{} \"{}\"\n".format(key, SAM_node["name"], val)
- nAddParams +=1
-
-
- stringList = paramsStr.split("\n")
- stringList.remove(stringList[0]) # remove # params (will be added later)
- stringList = set(stringList) # remove duplicates
- stringList = [i for i in stringList if i] # remove empty
- stringList = [str(len(stringList))]+stringList
- with open('params.in', 'w') as f:
- f.write("\n".join(stringList))
-
- f.close()
-
- #
- # get sur model info
- #
-
- surFileName = None
- for model in root_AIM['Simulation']['Models']:
- if model["modelName"] == modelName:
- surFileName = model["fileName"]
-
- if surFileName is None:
- print("surrogate model {} is not found".format(modelName))
- exit(-1)
-
- #
- # find surrogate model prediction app
- #
-
- s=[os.path.dirname( __file__ ),'..','..','Workflow','WorkflowApplications.json']
- workflowAppJsonPath = os.path.join(*s)
- with open(workflowAppJsonPath, 'r', encoding='utf-8') as f:
- workflowAppDict = json.load(f)
- appList = workflowAppDict["SimulationApplications"]["Applications"]
- simAppName = "SurrogateSimulation"
- myApp = next(item for item in appList if item["Name"] == simAppName)
- s = [os.path.dirname( __file__ ),'..','..','..', os.path.dirname(myApp["ExecutablePath"])]
- mySurrogatePath = os.path.join(*s)
- mySurrogateName = os.path.basename(myApp["ExecutablePath"])
-
- #
- # import surrogate functions
- #
-
- root_AIM['Applications']['Modeling']['ApplicationData']['MS_Path'] = ""
- root_AIM['Applications']['Modeling']['ApplicationData']['postprocessScript'] = ""
- root_AIM['Applications']['Modeling']['ApplicationData']['mainScript'] = r"..\\..\\..\\..\\input_data\\"+surFileName
-
- currentDir = os.getcwd()
- newAimName = os.path.join(currentDir,os.path.basename(aimName))
- with open(newAimName, 'w', encoding='utf-8') as f:
- json_object = json.dumps(root_AIM)
- f.write(json_object)
-
- sys.path.insert(0, mySurrogatePath)
- sur_module = importlib.__import__(mySurrogateName[:-3], globals(), locals(), ['run_surrogateGP','write_EDP'], 0)
-
-
- #
- # run prediction
- #
-
- sur_module.run_surrogateGP(newAimName, edpName)
-
- #
- # write EDP file
- #
-
- sur_module.write_EDP(newAimName, edpName)
+ paramsStr += '{}-{} "{}"\n'.format(key, SAM_node['name'], val) # noqa: N806
+ nAddParams += 1 # noqa: N806
- return
+ stringList = paramsStr.split('\n') # noqa: N806
+ stringList.remove(stringList[0]) # remove # params (will be added later)
+ stringList = set(stringList) # remove duplicates # noqa: N806
+ stringList = [i for i in stringList if i] # remove empty # noqa: N806
+ stringList = [str(len(stringList))] + stringList # noqa: N806, RUF005
+ with open('params.in', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('\n'.join(stringList))
+ f.close()
-if __name__ == '__main__':
+ #
+ # get sur model info
+ #
+
+ surFileName = None # noqa: N806
+ for model in root_AIM['Simulation']['Models']:
+ if model['modelName'] == modelName:
+ surFileName = model['fileName'] # noqa: N806
+
+ if surFileName is None:
+ print(f'surrogate model {modelName} is not found') # noqa: T201
+ exit(-1) # noqa: PLR1722
+
+ #
+ # find surrogate model prediction app
+ #
+
+ s = [
+ os.path.dirname(__file__), # noqa: PTH120
+ '..',
+ '..',
+ 'Workflow',
+ 'WorkflowApplications.json',
+ ]
+ workflowAppJsonPath = os.path.join(*s) # noqa: PTH118, N806
+ with open(workflowAppJsonPath, encoding='utf-8') as f: # noqa: PTH123
+ workflowAppDict = json.load(f) # noqa: N806
+ appList = workflowAppDict['SimulationApplications']['Applications'] # noqa: N806
+ simAppName = 'SurrogateSimulation' # noqa: N806
+ myApp = next(item for item in appList if item['Name'] == simAppName) # noqa: N806
+ s = [
+ os.path.dirname(__file__), # noqa: PTH120
+ '..',
+ '..',
+ '..',
+ os.path.dirname(myApp['ExecutablePath']), # noqa: PTH120
+ ]
+ mySurrogatePath = os.path.join(*s) # noqa: PTH118, N806
+ mySurrogateName = os.path.basename(myApp['ExecutablePath']) # noqa: PTH119, N806
+
+ #
+ # import surrogate functions
+ #
+ root_AIM['Applications']['Modeling']['ApplicationData']['MS_Path'] = ''
+ root_AIM['Applications']['Modeling']['ApplicationData']['postprocessScript'] = ''
+ root_AIM['Applications']['Modeling']['ApplicationData']['mainScript'] = (
+ r'..\\..\\..\\..\\input_data\\' + surFileName
+ )
+
+ currentDir = os.getcwd() # noqa: PTH109, N806
+ newAimName = os.path.join(currentDir, os.path.basename(aimName)) # noqa: PTH118, PTH119, N806
+ with open(newAimName, 'w', encoding='utf-8') as f: # noqa: FURB103, PTH123
+ json_object = json.dumps(root_AIM)
+ f.write(json_object)
+
+ sys.path.insert(0, mySurrogatePath)
+ sur_module = importlib.__import__(
+ mySurrogateName[:-3],
+ globals(),
+ locals(),
+ ['run_surrogateGP', 'write_EDP'],
+ 0,
+ )
+
+ #
+ # run prediction
+ #
+
+ sur_module.run_surrogateGP(newAimName, edpName)
+
+ #
+ # write EDP file
+ #
+
+ sur_module.write_EDP(newAimName, edpName)
+
+
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filenameAIM')
parser.add_argument('--filenameEVENT')
parser.add_argument('--filenameSAM')
parser.add_argument('--filenameEDP')
parser.add_argument('--filenameSIM')
- #parser.add_argument('--defaultModule', default=None)
- #parser.add_argument('--fileName', default=None)
- #parser.add_argument('--filePath', default=None)
+ # parser.add_argument('--defaultModule', default=None)
+ # parser.add_argument('--fileName', default=None)
+ # parser.add_argument('--filePath', default=None)
parser.add_argument('--getRV', nargs='?', const=True, default=False)
args = parser.parse_args()
- sys.exit(main(
- args.filenameAIM, args.filenameSAM, args.filenameEVENT,
- args.filenameEDP, args.filenameSIM, args.getRV))
-
-
+ sys.exit(
+ main(
+ args.filenameAIM,
+ args.filenameSAM,
+ args.filenameEVENT,
+ args.filenameEDP,
+ args.filenameSIM,
+ args.getRV,
+ )
+ )
diff --git a/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py b/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py
index 987438353..1f670def2 100644
--- a/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py
+++ b/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
@@ -39,194 +38,200 @@
# Joanna J. Zou
#
-import os, sys
-import argparse, json
-import importlib
+import argparse
+import json
+import os
import sys
-import numpy as np
-from pathlib import Path
+import numpy as np
-#from simcenter_common import *
+# from simcenter_common import *
-convert_EDP = {
- 'max_abs_acceleration' : 'PFA',
- 'max_rel_disp' : 'PFD',
- 'max_drift' : 'PID',
+convert_EDP = { # noqa: N816
+ 'max_abs_acceleration': 'PFA',
+ 'max_rel_disp': 'PFD',
+ 'max_drift': 'PID',
'max_roof_drift': 'PRD',
'residual_drift': 'RID',
- 'residual_disp': 'RFD'
+ 'residual_disp': 'RFD',
}
-def run_surrogateGP(AIM_input_path, EDP_input_path):
+def run_surrogateGP(AIM_input_path, EDP_input_path): # noqa: ARG001, N802, N803, D103
# these imports are here to save time when the app is called without
# the -getRV flag
- #import openseespy.opensees as ops
+ # import openseespy.opensees as ops
- with open(AIM_input_path, 'r', encoding='utf-8') as f:
- root_AIM = json.load(f)
- #root_GI = root_AIM['GeneralInformation']
+ with open(AIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ root_AIM = json.load(f) # noqa: N806
+ # root_GI = root_AIM['GeneralInformation']
- root_SAM = root_AIM['Applications']['Modeling']
+ root_SAM = root_AIM['Applications']['Modeling'] # noqa: N806
- surrogate_path = os.path.join(root_SAM['ApplicationData']['MS_Path'],root_SAM['ApplicationData']['mainScript'])
+ surrogate_path = os.path.join( # noqa: PTH118, F841
+ root_SAM['ApplicationData']['MS_Path'],
+ root_SAM['ApplicationData']['mainScript'],
+ )
# with open(surrogate_path, 'r') as f:
# surrogate_model = json.load(f)
-
#
- # Let's call GPdriver creater
+ # Let's call GPdriver creator?
#
- pythonEXE = sys.executable
-
- surrogatePredictionPath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- 'performFEM', 'surrogateGP', 'gpPredict.py')
-
-
- curpath = os.getcwd()
- params_name = os.path.join(curpath,"params.in")
- surrogate_name = os.path.join(curpath,root_SAM['ApplicationData']['postprocessScript']) # pickl
- surrogate_meta_name = os.path.join(curpath,root_SAM['ApplicationData']['mainScript']) # json
+ pythonEXE = sys.executable # noqa: N806
+
+ surrogatePredictionPath = os.path.join( # noqa: PTH118, N806
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), # noqa: PTH100, PTH120
+ 'performFEM',
+ 'surrogateGP',
+ 'gpPredict.py',
+ )
+
+ curpath = os.getcwd() # noqa: PTH109
+ params_name = os.path.join(curpath, 'params.in') # noqa: PTH118
+ surrogate_name = os.path.join( # noqa: PTH118
+ curpath, root_SAM['ApplicationData']['postprocessScript']
+ ) # pickl
+ surrogate_meta_name = os.path.join( # noqa: PTH118
+ curpath, root_SAM['ApplicationData']['mainScript']
+ ) # json
# compute IMs
# print(f"{pythonEXE} {surrogatePredictionPath} {params_name} {surrogate_meta_name} {surrogate_name}")
- os.system(f"{pythonEXE} {surrogatePredictionPath} {params_name} {surrogate_meta_name} {surrogate_name}")
+ os.system( # noqa: S605
+ f'{pythonEXE} {surrogatePredictionPath} {params_name} {surrogate_meta_name} {surrogate_name}'
+ )
#
# check if the correct workflow applications are selected
#
- if (root_AIM["Applications"]["Modeling"]["Application"] != "SurrogateGPBuildingModel") and (root_AIM["Applications"]["Simulation"]["Application"] != "SurrogateRegionalPy"):
- with open("../workflow.err","w") as f:
- f.write("Do not select [None] in the FEM tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.")
- exit(-1)
-
-
-def write_EDP(AIM_input_path,EDP_input_path, newEDP_input_path=None):
-
- with open(AIM_input_path, 'r', encoding='utf-8') as f:
- root_AIM = json.load(f)
-
- if newEDP_input_path ==None:
- newEDP_input_path = EDP_input_path
-
- root_SAM = root_AIM['Applications']['Modeling']
- curpath = os.getcwd()
- #surrogate_path = os.path.join(root_SAM['ApplicationData']['MS_Path'],root_SAM['ApplicationData']['mainScript'])
- surrogate_path = os.path.join(curpath,root_SAM['ApplicationData']['mainScript'])
-
- with open(surrogate_path, 'r', encoding='utf-8') as f:
+ if (
+ root_AIM['Applications']['Modeling']['Application']
+ != 'SurrogateGPBuildingModel'
+ ) and (
+ root_AIM['Applications']['Simulation']['Application']
+ != 'SurrogateRegionalPy'
+ ):
+ with open('../workflow.err', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write(
+ 'Do not select [None] in the FEM tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.'
+ )
+ exit(-1) # noqa: PLR1722
+
+
+def write_EDP(AIM_input_path, EDP_input_path, newEDP_input_path=None): # noqa: C901, N802, N803, D103
+ with open(AIM_input_path, encoding='utf-8') as f: # noqa: PTH123
+ root_AIM = json.load(f) # noqa: N806
+
+ if newEDP_input_path == None: # noqa: E711
+ newEDP_input_path = EDP_input_path # noqa: N806
+
+ root_SAM = root_AIM['Applications']['Modeling'] # noqa: N806
+ curpath = os.getcwd() # noqa: PTH109
+ # surrogate_path = os.path.join(root_SAM['ApplicationData']['MS_Path'],root_SAM['ApplicationData']['mainScript'])
+ surrogate_path = os.path.join(curpath, root_SAM['ApplicationData']['mainScript']) # noqa: PTH118
+
+ with open(surrogate_path, encoding='utf-8') as f: # noqa: PTH123
surrogate_model = json.load(f)
#
# EDP names and values to be mapped
#
- edp_names = surrogate_model["ylabels"]
-
- if not os.path.isfile('results.out'):
+ edp_names = surrogate_model['ylabels']
+
+ if not os.path.isfile('results.out'): # noqa: PTH113
# not found
- print("Skiping surrogateEDP - results.out does not exist in " + os.getcwd())
- exit(-1)
- elif os.stat('results.out').st_size == 0:
+ print('Skiping surrogateEDP - results.out does not exist in ' + os.getcwd()) # noqa: T201, PTH109
+ exit(-1) # noqa: PLR1722
+ elif os.stat('results.out').st_size == 0: # noqa: PTH116
# found but empty
- print("Skiping surrogateEDP - results.out is empty in " + os.getcwd())
- exit(-1)
-
+ print('Skiping surrogateEDP - results.out is empty in ' + os.getcwd()) # noqa: T201, PTH109
+ exit(-1) # noqa: PLR1722
edp_vals = np.loadtxt('results.out').tolist()
-
#
- # Read EDP file, mapping between EDPnames and EDP.json and write scalar_data
+ # Read EDP file, mapping between EDPnames and EDP.json and write scalar_data
#
- with open(EDP_input_path, 'r', encoding='utf-8') as f:
- rootEDP = json.load(f)
-
-
- numEvents = len(rootEDP['EngineeringDemandParameters'])
- numResponses = rootEDP["total_number_edp"];
-
-
- i = 0 # current event id
- event=rootEDP['EngineeringDemandParameters'][i]
- eventEDPs = event['responses'];
+ with open(EDP_input_path, encoding='utf-8') as f: # noqa: PTH123
+ rootEDP = json.load(f) # noqa: N806
+ numEvents = len(rootEDP['EngineeringDemandParameters']) # noqa: N806, F841
+ numResponses = rootEDP['total_number_edp'] # noqa: N806, F841
+ i = 0 # current event id
+ event = rootEDP['EngineeringDemandParameters'][i]
+ eventEDPs = event['responses'] # noqa: N806
for j in range(len(eventEDPs)):
- eventEDP = eventEDPs[j]
- eventType = eventEDP["type"];
-
- known = False;
- if (eventType == "max_abs_acceleration"):
- edpAcronym = "PFA";
- floor = eventEDP["floor"];
- known = True;
- elif (eventType == "max_drift"):
- edpAcronym = "PID";
- floor = eventEDP["floor2"];
- known = True;
- elif (eventType == "max_roof_drift"):
- edpAcronym = "PRD";
- floor = "1";
- known = True;
- elif (eventType == "residual_disp"):
- edpAcronym = "RD";
- floor = eventEDP["floor"];
- known = True;
- elif (eventType == "max_pressure"):
- edpAcronym = "PSP";
- floor = eventEDP["floor2"];
- known = True;
- elif (eventType == "max_rel_disp"):
- edpAcronym = "PFD";
- floor = eventEDP["floor"];
- known = True;
- elif (eventType == "peak_wind_gust_speed"):
- edpAcronym = "PWS";
- floor = eventEDP["floor"];
- known = True;
- else :
- edpList = [eventType];
+ eventEDP = eventEDPs[j] # noqa: N806
+ eventType = eventEDP['type'] # noqa: N806
+ known = False
+ if eventType == 'max_abs_acceleration':
+ edpAcronym = 'PFA' # noqa: N806
+ floor = eventEDP['floor']
+ known = True
+ elif eventType == 'max_drift':
+ edpAcronym = 'PID' # noqa: N806
+ floor = eventEDP['floor2']
+ known = True
+ elif eventType == 'max_roof_drift':
+ edpAcronym = 'PRD' # noqa: N806
+ floor = '1'
+ known = True
+ elif eventType == 'residual_disp':
+ edpAcronym = 'RD' # noqa: N806
+ floor = eventEDP['floor']
+ known = True
+ elif eventType == 'max_pressure':
+ edpAcronym = 'PSP' # noqa: N806
+ floor = eventEDP['floor2']
+ known = True
+ elif eventType == 'max_rel_disp':
+ edpAcronym = 'PFD' # noqa: N806
+ floor = eventEDP['floor']
+ known = True
+ elif eventType == 'peak_wind_gust_speed':
+ edpAcronym = 'PWS' # noqa: N806
+ floor = eventEDP['floor']
+ known = True
+ else:
+ edpList = [eventType] # noqa: N806
if known:
- dofs = eventEDP["dofs"];
-
+ dofs = eventEDP['dofs']
scalar_data = []
for dof in dofs:
- my_edp_name = '1-' + edpAcronym + '-' + floor + '-' + str(dof);
-
+ my_edp_name = '1-' + edpAcronym + '-' + floor + '-' + str(dof)
idscalar = edp_names.index(my_edp_name)
scalar_data += [edp_vals[idscalar]]
- edpList = [my_edp_name];
+ edpList = [my_edp_name] # noqa: N806, F841
- eventEDPs[j]["scalar_data"] = scalar_data
+ eventEDPs[j]['scalar_data'] = scalar_data
- rootEDP['EngineeringDemandParameters'][0].pop('name','') # Remove EQ name if exists because it is confusing
- rootEDP['EngineeringDemandParameters'][0]["responses"] = eventEDPs
+ rootEDP['EngineeringDemandParameters'][0].pop(
+ 'name', ''
+ ) # Remove EQ name if exists because it is confusing
+ rootEDP['EngineeringDemandParameters'][0]['responses'] = eventEDPs
-
- with open(newEDP_input_path, 'w', encoding='utf-8') as f:
+ with open(newEDP_input_path, 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(rootEDP, f, indent=2)
-
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
- parser.add_argument('--filenameAIM',
- default=None)
+ parser.add_argument('--filenameAIM', default=None)
parser.add_argument('--filenameSAM')
- parser.add_argument('--filenameEVENT') # not used
- parser.add_argument('--filenameEDP',default=None)
- parser.add_argument('--filenameSIM',default=None) # not used
- parser.add_argument('--getRV',default=False,nargs='?', const=True)
+ parser.add_argument('--filenameEVENT') # not used
+ parser.add_argument('--filenameEDP', default=None)
+ parser.add_argument('--filenameSIM', default=None) # not used
+ parser.add_argument('--getRV', default=False, nargs='?', const=True)
args = parser.parse_args()
if not args.getRV:
- run_surrogateGP(args.filenameAIM,args.filenameEDP)
+ run_surrogateGP(args.filenameAIM, args.filenameEDP)
write_EDP(args.filenameAIM, args.filenameEDP)
diff --git a/modules/performUQ/SimCenterUQ/PLoM/PLoM.py b/modules/performUQ/SimCenterUQ/PLoM/PLoM.py
index cfed90d09..ed6a60e11 100644
--- a/modules/performUQ/SimCenterUQ/PLoM/PLoM.py
+++ b/modules/performUQ/SimCenterUQ/PLoM/PLoM.py
@@ -1,33 +1,50 @@
-# -*- coding: utf-8 -*-
-#JGA
+# JGA # noqa: CPY001, D100, N999
+import importlib
import os
+import sys
+
+# import matplotlib.pyplot as plt
+# export DISPLAY=localhost:0.0
+from ctypes import * # noqa: F403
+from pathlib import Path
+
import numpy as np
import pandas as pd
-import random
-from math import pi, sqrt
import PLoM_library as plom
-#import matplotlib.pyplot as plt
-import warnings
-#export DISPLAY=localhost:0.0
-from ctypes import *
-import importlib
-from pathlib import Path
-import sys
-from general import *
-
-class PLoM:
- def __init__(self, model_name='plom', data='', separator=',', col_header=False, constraints = None, run_tag = False, plot_tag = False, num_rlz = 5, tol_pca = 1e-6, epsilon_kde = 25, tol_PCA2 = 1e-5, tol = 1e-6, max_iter = 50, runDiffMaps = True, db_path=None):
+from general import * # noqa: F403
+
+
+class PLoM: # noqa: D101
+ def __init__(
+ self,
+ model_name='plom',
+ data='',
+ separator=',',
+ col_header=False, # noqa: FBT002
+ constraints=None,
+ run_tag=False, # noqa: FBT002
+ plot_tag=False, # noqa: FBT002
+ num_rlz=5,
+ tol_pca=1e-6,
+ epsilon_kde=25,
+ tol_PCA2=1e-5, # noqa: N803
+ tol=1e-6,
+ max_iter=50,
+ runDiffMaps=True, # noqa: FBT002, N803
+ db_path=None,
+ ):
# basic setups
self._basic_config(model_name=model_name, db_path=db_path)
self.plot_tag = plot_tag
# initialize constraints
self.constraints = {}
self.num_constraints = 0
- #
self.runDiffMaps = runDiffMaps
# initialize input data
if self.initialize_data(data, separator, col_header):
- self.logfile.write_msg(msg='PLoM: data loading failed.',msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM: data loading failed.', msg_type='ERROR', msg_level=0
+ )
else:
"""
# plot data matrix
@@ -39,57 +56,109 @@ def __init__(self, model_name='plom', data='', separator=',', col_header=False,
plt.savefig(os.path.join(self.vl_path,'ScatterMatrix_X0.png'),dpi=480)
self.logfile.write_msg(msg='PLoM: {} saved in {}.'.format('ScatterMatrix_X0.png',self.vl_path),msg_type='RUNNING',msg_level=0)
"""
- if not self.constraints:
+ if not self.constraints:
if self.add_constraints(constraints_file=constraints):
- self.logfile.write_msg(msg='PLoM: constraints input failed.',msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM: constraints input failed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
# run
if run_tag:
- self.logfile.write_msg(msg='PLoM: Running all steps to generate new samples.',msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM: Running all steps to generate new samples.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
self.ConfigTasks()
- self.RunAlgorithm(n_mc = num_rlz, epsilon_pca = tol_pca, epsilon_kde = epsilon_kde, tol_PCA2 = tol_PCA2, tol = tol, max_iter = max_iter, plot_tag = plot_tag, runDiffMaps = self.runDiffMaps)
+ self.RunAlgorithm(
+ n_mc=num_rlz,
+ epsilon_pca=tol_pca,
+ epsilon_kde=epsilon_kde,
+ tol_PCA2=tol_PCA2,
+ tol=tol,
+ max_iter=max_iter,
+ plot_tag=plot_tag,
+ runDiffMaps=self.runDiffMaps,
+ )
else:
- self.logfile.write_msg(msg='PLoM: using ConfigTasks(task_list = FULL_TASK_LIST) to schedule a run.',msg_type='RUNNING',msg_level=0)
- self.logfile.write_msg(msg='PLoM: using RunAlgorithm(n_mc=n_mc,epsilon_pca=epsilon_pca,epsilon_kde) to run simulations.',msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM: using ConfigTasks(task_list = FULL_TASK_LIST) to schedule a run.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM: using RunAlgorithm(n_mc=n_mc,epsilon_pca=epsilon_pca,epsilon_kde) to run simulations.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
-
def _basic_config(self, model_name=None, db_path=None):
- """
- Basic setups
+ """Basic setups
- model_name: job name (used for database name)
- """
+ """ # noqa: D205, D400, D401
if not db_path:
- self.dir_log = os.path.join(os.path.dirname(os.path.abspath(__file__)),'RunDir')
- self.dir_run = os.path.join(os.path.dirname(os.path.abspath(__file__)),'RunDir',model_name)
+ self.dir_log = os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'RunDir',
+ )
+ self.dir_run = os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'RunDir',
+ model_name,
+ )
else:
self.dir_log = db_path
- self.dir_run = os.path.join(db_path, model_name)
+ self.dir_run = os.path.join(db_path, model_name) # noqa: PTH118
# initialize logfile
try:
- os.makedirs(self.dir_run, exist_ok=True)
- self.logfile = Logfile(logfile_dir = self.dir_log)
- self.logfile.write_msg(msg='PLoM: Running directory {} initialized.'.format(self.dir_run),msg_type='RUNNING',msg_level=0)
- except:
- self.logfile.write_msg(msg='PLoM: Running directory {} cannot be initialized.'.format(self.dir_run),msg_type='ERROR',msg_level=0)
+ os.makedirs(self.dir_run, exist_ok=True) # noqa: PTH103
+ self.logfile = Logfile(logfile_dir=self.dir_log) # noqa: F405
+ self.logfile.write_msg(
+ msg=f'PLoM: Running directory {self.dir_run} initialized.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM: Running directory {self.dir_run} cannot be initialized.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
# initialize database server
self.dbserver = None
- self.dbserver = DBServer(db_dir = self.dir_run, db_name=model_name+'.h5')
+ self.dbserver = DBServer(db_dir=self.dir_run, db_name=model_name + '.h5') # noqa: F405
try:
- self.dbserver = DBServer(db_dir = self.dir_run, db_name=model_name+'.h5')
- except:
- self.logfile.write_msg(msg='PLoM: database server initialization failed.',msg_type='ERROR',msg_level=0)
+ self.dbserver = DBServer(db_dir=self.dir_run, db_name=model_name + '.h5') # noqa: F405
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg='PLoM: database server initialization failed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
if self.dbserver:
- self.logfile.write_msg(msg='PLoM: database server initialized.',msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM: database server initialized.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
# initialize visualization output path
- self.vl_path = os.path.join(self.dir_run,'FigOut')
+ self.vl_path = os.path.join(self.dir_run, 'FigOut') # noqa: PTH118
try:
- os.makedirs(self.vl_path, exist_ok=True)
- self.logfile.write_msg(msg='PLoM: visualization folder {} initialized.'.format(self.vl_path),msg_type='RUNNING',msg_level=0)
- except:
- self.logfile.write_msg(msg='PLoM: visualization folder {} not initialized.'.format(self.vl_path),msg_type='WARNING',msg_level=0)
-
-
- def add_constraints(self, constraints_file = None):
-
+ os.makedirs(self.vl_path, exist_ok=True) # noqa: PTH103
+ self.logfile.write_msg(
+ msg=f'PLoM: visualization folder {self.vl_path} initialized.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM: visualization folder {self.vl_path} not initialized.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
+
+ def add_constraints(self, constraints_file=None): # noqa: D102
if not constraints_file:
self.g_c = None
self.D_x_g_c = None
@@ -97,85 +166,126 @@ def add_constraints(self, constraints_file = None):
self.beta_c_aux = None
self.lambda_i = 0
self.psi = 0
- self.logfile.write_msg(msg='PLoM.add_constraints: no user-defined constraint - please use add_constraints(constraints_file=X) to add new constraints if any.',msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.add_constraints: no user-defined constraint - please use add_constraints(constraints_file=X) to add new constraints if any.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
return 0
try:
# path
path_constraints = Path(constraints_file).resolve()
- sys.path.insert(0, str(path_constraints.parent)+'/')
+ sys.path.insert(0, str(path_constraints.parent) + '/')
# load the function
- new_constraints = importlib.__import__(path_constraints.name[:-3], globals(), locals(), [], 0)
- except:
- self.logfile.write_msg(msg='PLoM.add_constraints: could not add constraints {}'.format(constraints_file),msg_type='ERROR',msg_level=0)
+ new_constraints = importlib.__import__(
+ path_constraints.name[:-3], globals(), locals(), [], 0
+ )
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM.add_constraints: could not add constraints {constraints_file}',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return 1
- self.num_constraints = self.num_constraints+1
+ self.num_constraints = self.num_constraints + 1 # noqa: PLR6104
try:
- self.constraints.update({
- 'Constraint'+str(self.num_constraints): {
- 'filename': constraints_file,
- 'g_c': new_constraints.g_c,
- 'D_x_g_c': new_constraints.D_x_g_c,
- 'beta_c': new_constraints.beta_c(),
- 'beta_c_aux': new_constraints.beta_c_aux
+ self.constraints.update(
+ {
+ 'Constraint' + str(self.num_constraints): {
+ 'filename': constraints_file,
+ 'g_c': new_constraints.g_c,
+ 'D_x_g_c': new_constraints.D_x_g_c,
+ 'beta_c': new_constraints.beta_c(),
+ 'beta_c_aux': new_constraints.beta_c_aux,
+ }
}
- })
+ )
self.g_c = new_constraints.g_c
self.D_x_g_c = new_constraints.D_x_g_c
self.beta_c = new_constraints.beta_c()
self.beta_c_aux = new_constraints.beta_c_aux
- self.logfile.write_msg(msg='PLoM.add_constraints: constraints added.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item=[constraints_file],data_type='ConstraintsFile')
- except:
- self.logfile.write_msg(msg='PLoM.add_constraints: at least one attribute (i.e., g_c, D_x_gc, beta_c, or beta_c_aux) missing in {}'.format(constraints_file),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.add_constraints: constraints added.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item=[constraints_file], data_type='ConstraintsFile'
+ )
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM.add_constraints: at least one attribute (i.e., g_c, D_x_gc, beta_c, or beta_c_aux) missing in {constraints_file}',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return 1
return 0
-
- def switch_constraints(self, constraint_tag = 1):
- """
- Selecting different constraints
+ def switch_constraints(self, constraint_tag=1):
+ """Selecting different constraints
- constraint_tag: the tag of selected constraint
- """
-
+ """ # noqa: D205, D400, D401
if constraint_tag > self.num_constraints:
- self.logfile.write_msg(msg='PLoM.switch_constraints: sorry the maximum constraint tag is {}'.format(self.num_constraints),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.switch_constraints: sorry the maximum constraint tag is {self.num_constraints}',
+ msg_type='ERROR',
+ msg_level=0,
+ )
try:
- self.g_c = self.constraints.get('Constraint'+str(constraint_tag)).get('g_c')
- self.D_x_g_c = self.constraints.get('Constraint'+str(constraint_tag)).get('D_x_g_c')
- self.beta_c = self.constraints.get('Constraint'+str(constraint_tag)).get('beta_c')
- self.beta_c_aux = self.constraints.get('Constraint'+str(constraint_tag)).get('beta_c_aux')
- self.dbserver.add_item(item=[self.constraints.get('Constraint'+str(constraint_tag)).get('filename')],data_type='ConstraintsFile')
- except:
- self.logfile.write_msg(msg='PLoM.get_constraints: cannot get constraints',msg_type='ERROR',msg_level=0)
-
+ self.g_c = self.constraints.get('Constraint' + str(constraint_tag)).get(
+ 'g_c'
+ )
+ self.D_x_g_c = self.constraints.get(
+ 'Constraint' + str(constraint_tag)
+ ).get('D_x_g_c')
+ self.beta_c = self.constraints.get(
+ 'Constraint' + str(constraint_tag)
+ ).get('beta_c')
+ self.beta_c_aux = self.constraints.get(
+ 'Constraint' + str(constraint_tag)
+ ).get('beta_c_aux')
+ self.dbserver.add_item(
+ item=[
+ self.constraints.get('Constraint' + str(constraint_tag)).get(
+ 'filename'
+ )
+ ],
+ data_type='ConstraintsFile',
+ )
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg='PLoM.get_constraints: cannot get constraints',
+ msg_type='ERROR',
+ msg_level=0,
+ )
def delete_constraints(self):
- """
- Removing all current constraints
- """
-
+ """Removing all current constraints""" # noqa: D400, D401
self.g_c = None
self.D_x_g_c = None
self.beta_c = []
- self.dbserver.add_item(item=[''],data_type='ConstraintsFile')
-
-
- def load_data(self, filename, separator=',', col_header=False):
+ self.dbserver.add_item(item=[''], data_type='ConstraintsFile')
+ def load_data(self, filename, separator=',', col_header=False): # noqa: FBT002, C901, D102
# initialize the matrix and data size
- X = []
- N = 0
+ X = [] # noqa: N806
+ N = 0 # noqa: N806
n = 0
# check if the file exist
- import os
- if not os.path.exists(filename):
- self.logfile.write_msg(msg='load_data: the input file {} is not found'.format(filename),msg_type='ERROR',msg_level=0)
+ import os # noqa: PLC0415
+
+ if not os.path.exists(filename): # noqa: PTH110
+ self.logfile.write_msg(
+ msg=f'load_data: the input file {filename} is not found',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return X, N, n
# read data
- if os.path.splitext(filename)[-1] in ['.csv','.dat','.txt']:
+ if os.path.splitext(filename)[-1] in ['.csv', '.dat', '.txt']: # noqa: PLR6201, PTH122
# txt data
col = None
if col_header:
@@ -183,276 +293,407 @@ def load_data(self, filename, separator=',', col_header=False):
self.X0 = pd.read_table(filename, delimiter=separator, header=col)
# remove all-nan column if any
for cur_col in self.X0.columns:
- if all(np.isnan(self.X0.loc[:,cur_col])):
+ if all(np.isnan(self.X0.loc[:, cur_col])):
self.X0.drop(columns=cur_col)
- X = self.X0.to_numpy()
+ X = self.X0.to_numpy() # noqa: N806
- elif os.path.splitext(filename)[-1] in ['.mat', '.json']:
+ elif os.path.splitext(filename)[-1] in ['.mat', '.json']: # noqa: PLR6201, PTH122
# json or mat
- if os.path.splitext(filename)[-1] == '.mat':
- import scipy.io as scio
+ if os.path.splitext(filename)[-1] == '.mat': # noqa: PTH122
+ import scipy.io as scio # noqa: PLC0415
+
matdata = scio.loadmat(filename)
- var_names = [x for x in list(matdata.keys()) if not x.startswith('__')]
+ var_names = [
+ x for x in list(matdata.keys()) if not x.startswith('__')
+ ]
if len(var_names) == 1:
# single matrix
- X = matdata[var_names[0]]
- self.X0 = pd.DataFrame(X, columns=['Var'+str(x) for x in X.shape[1]])
+ X = matdata[var_names[0]] # noqa: N806
+ self.X0 = pd.DataFrame(
+ X, columns=['Var' + str(x) for x in X.shape[1]]
+ )
else:
n = len(var_names)
# multiple columns
for cur_var in var_names:
X.append(matdata[cur_var].tolist())
- X = np.array(X).T
- X = X[0,:,:]
+ X = np.array(X).T # noqa: N806
+ X = X[0, :, :] # noqa: N806
self.X0 = pd.DataFrame(X, columns=var_names)
else:
- import json
- with open(filename, 'r', encoding='utf-8') as f:
+ import json # noqa: PLC0415
+
+ with open(filename, encoding='utf-8') as f: # noqa: PTH123
jsondata = json.load(f)
var_names = list(jsondata.keys())
# multiple columns
for cur_var in var_names:
X.append(jsondata[cur_var])
- X = np.array(X).T
+ X = np.array(X).T # noqa: N806
self.X0 = pd.DataFrame(X, columns=var_names)
-
- elif os.path.splitext(filename)[-1] in ['.h5']:
+
+ elif os.path.splitext(filename)[-1] == '.h5': # noqa: PTH122
# this h5 can be either formatted by PLoM or not
# a separate method to deal with this file
- X = self.load_h5(filename)
+ X = self.load_h5(filename) # noqa: N806
else:
- self.logfile.write_msg(msg='PLoM.load_data: the file format is not supported yet.',msg_type='ERROR',msg_level=0)
- self.logfile.write_msg(msg='PLoM.load_data: accepted data formats - csv, dat, txt, mat, json.',msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.load_data: the file format is not supported yet.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.load_data: accepted data formats - csv, dat, txt, mat, json.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
# Update data sizes
- N, n = X.shape
- self.logfile.write_msg(msg='PLoM.load_data: loaded data size = ({}, {}).'.format(N,n),msg_type='RUNNING',msg_level=0)
+ N, n = X.shape # noqa: N806
+ self.logfile.write_msg(
+ msg=f'PLoM.load_data: loaded data size = ({N}, {n}).',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
# Return data and data sizes
return X.T, N, n
- #def check_var_name():
-
-
- def get_data(self):
+ # def check_var_name():
+ def get_data(self): # noqa: D102
# return data and data sizes
return self.X, self.N, self.n
-
def _load_h5_plom(self, filename):
- """
- Loading PLoM-formatted h5 database
- """
+ """Loading PLoM-formatted h5 database""" # noqa: D400, D401
try:
store = pd.HDFStore(filename, 'r')
- for cur_var in store.keys():
- if cur_var in self.dbserver.get_item_adds() and ATTR_MAP[cur_var]:
+ for cur_var in store.keys(): # noqa: SIM118
+ if cur_var in self.dbserver.get_item_adds() and ATTR_MAP[cur_var]: # noqa: F405
# read in
cur_data = store[cur_var]
- cur_dshape = tuple([x[0] for x in store['/DS_'+cur_var[1:]].values.tolist()])
- if cur_dshape==(1,):
- item_value = np.array(sum(cur_data.values.tolist(),[]))
- col_headers = list(cur_data.columns)[0]
+ cur_dshape = tuple(
+ [x[0] for x in store['/DS_' + cur_var[1:]].values.tolist()] # noqa: PD011
+ )
+ if cur_dshape == (1,):
+ item_value = np.array(sum(cur_data.values.tolist(), [])) # noqa: PD011, RUF017
+ col_headers = list(cur_data.columns)[0] # noqa: RUF015
else:
- item_value = cur_data.values
+ item_value = cur_data.values # noqa: PD011
col_headers = list(cur_data.columns)
- self.dbserver.add_item(item_name=cur_var.replace('/',''),col_names=col_headers,item=item_value,data_shape=cur_dshape)
+ self.dbserver.add_item(
+ item_name=cur_var.replace('/', ''),
+ col_names=col_headers,
+ item=item_value,
+ data_shape=cur_dshape,
+ )
# constraints
if cur_var == '/constraints_file':
cur_data = store[cur_var]
- self.dbserver.add_item(item=cur_data.values.tolist()[0],data_type='ConstraintsFile')
+ self.dbserver.add_item(
+ item=cur_data.values.tolist()[0], # noqa: PD011
+ data_type='ConstraintsFile',
+ )
store.close()
- except:
- self.logfile.write_msg(msg='PLoM._load_h5_plom: data in {} not compatible.'.format(filename),msg_type='ERROR',msg_level=0)
-
-
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM._load_h5_plom: data in {filename} not compatible.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
- def _load_h5_data_X(self, filename):
- """
- Loading a h5 data which is expected to contain X data
- """
+ def _load_h5_data_X(self, filename): # noqa: N802
+ """Loading a h5 data which is expected to contain X data""" # noqa: D400, D401
try:
store = pd.HDFStore(filename, 'r')
# Note a table is expected for the variable
self.X0 = store.get(store.keys()[0])
store.close()
- self.dbserver.add_item(item_name='X0',col_name=list(self.X0.columns),item=self.X0)
+ self.dbserver.add_item(
+ item_name='X0', col_name=list(self.X0.columns), item=self.X0
+ )
return self.X0.to_numpy()
- except:
+ except: # noqa: E722
return None
-
def _sync_data(self):
- """
- Sync database data to current attributes
- """
+ """Sync database data to current attributes""" # noqa: D400
avail_name_list = self.dbserver.get_name_list()
if not avail_name_list:
# empty database
- self.logfile.write_msg(msg='PLoM._sync_data: database is empty - no data to sync.',msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM._sync_data: database is empty - no data to sync.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
else:
for cur_item in avail_name_list:
if cur_item.startswith('/DS_'):
# skipping the data-shape attributes
continue
- if type(ATTR_MAP[cur_item]) is str:
- self.__setattr__(ATTR_MAP[cur_item],self.dbserver.get_item(cur_item[1:]))
- self.logfile.write_msg(msg='PLoM._sync_data: self.{} synced.'.format(ATTR_MAP[cur_item]),msg_type='RUNNING',msg_level=0)
+ if type(ATTR_MAP[cur_item]) is str: # noqa: F405
+ self.__setattr__( # noqa: PLC2801
+ ATTR_MAP[cur_item], # noqa: F405
+ self.dbserver.get_item(cur_item[1:]),
+ )
+ self.logfile.write_msg(
+ msg=f'PLoM._sync_data: self.{ATTR_MAP[cur_item]} synced.', # noqa: F405
+ msg_type='RUNNING',
+ msg_level=0,
+ )
else:
# None type (this is the 'basic' - skipped)
- self.logfile.write_msg(msg='PLoM._sync_data: data {} skipped.'.format(cur_item),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM._sync_data: data {cur_item} skipped.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
-
def _sync_constraints(self):
- """
- Sync constraints from dbserver to the attributes
- """
+ """Sync constraints from dbserver to the attributes""" # noqa: D400
avail_name_list = self.dbserver.get_name_list()
if '/constraints_file' not in avail_name_list:
# empty constraints
- self.logfile.write_msg(msg='PLoM._sync_data: no available constraint to sync.',msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM._sync_data: no available constraint to sync.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
else:
# get constraints file path
cfile = self.dbserver.get_item(data_type='ConstraintsFile')
# add the constraints
- self.add_constraints(constraints_file = cfile)
-
+ self.add_constraints(constraints_file=cfile)
def load_h5(self, filename):
- """
- Loading h5 database
- """
+ """Loading h5 database""" # noqa: D400, D401
try:
self._load_h5_plom(filename)
- self.logfile.write_msg(msg='PLoM.load_h5: h5 file loaded.',msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.load_h5: h5 file loaded.', msg_type='RUNNING', msg_level=0
+ )
# sync data
self._sync_data()
- self.logfile.write_msg(msg='PLoM.load_h5: data in {} synced.'.format(filename),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.load_h5: data in {filename} synced.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
self._sync_constraints()
- self.logfile.write_msg(msg='PLoM.load_h5: constraints in {} synced.'.format(filename),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.load_h5: constraints in {filename} synced.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
if '/X0' in self.dbserver.get_name_list():
- self.X0 = self.dbserver.get_item('X0',table_like=True)
+ self.X0 = self.dbserver.get_item('X0', table_like=True)
return self.X0.to_numpy()
- else:
- self.logfile.write_msg(msg='PLoM.load_h5: the original X0 data not found in the loaded data.',msg_type='ERROR',msg_level=0)
+ else: # noqa: RET505
+ self.logfile.write_msg(
+ msg='PLoM.load_h5: the original X0 data not found in the loaded data.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return None
- except:
- X = self._load_h5_data_X(filename)
+ except: # noqa: E722
+ X = self._load_h5_data_X(filename) # noqa: N806
if X is None:
- self.logfile.write_msg(msg='PLoM.load_h5: cannot load {}.'.format(filename),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.load_h5: cannot load {filename}.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return None
- else:
+ else: # noqa: RET505
return X
-
- def add_data(self, filename, separator=',', col_header=False):
-
+ def add_data(self, filename, separator=',', col_header=False): # noqa: FBT002, D102
# load new data
- new_X, new_N, new_n = self.load_data(filename, separator, col_header)
+ new_X, new_N, new_n = self.load_data(filename, separator, col_header) # noqa: N806
# check data sizes
if new_n != self.n:
- self.logfile.write_msg(msg='PLoM.add_data: incompatible column size when loading {}'.format(filename),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.add_data: incompatible column size when loading {filename}',
+ msg_type='ERROR',
+ msg_level=0,
+ )
else:
# update the X and N
self.X = np.concatenate((self.X, new_X))
- self.N = self.N + new_N
+ self.N = self.N + new_N # noqa: PLR6104
self.X0.append(pd.DataFrame(new_X.T, columns=list(self.X0.columns)))
- self.logfile.write_msg(msg='PLoM.add_data: current X0 size = ({}, {}).'.format(self.N,self.n),msg_type='RUNNING',msg_level=0)
-
-
- def initialize_data(self, filename, separator=',', col_header=False, constraints = ''):
-
+ self.logfile.write_msg(
+ msg=f'PLoM.add_data: current X0 size = ({self.N}, {self.n}).',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+
+ def initialize_data( # noqa: D102
+ self,
+ filename,
+ separator=',',
+ col_header=False, # noqa: FBT002
+ constraints='', # noqa: ARG002
+ ):
# initialize the data and data sizes
try:
self.X, self.N, self.n = self.load_data(filename, separator, col_header)
- except:
- self.logfile.write_msg(msg='PLoM.initialize_data: cannot initialize data with {}'.format(filename),msg_type='ERROR',msg_level=0)
+ except: # noqa: E722
+ self.logfile.write_msg(
+ msg=f'PLoM.initialize_data: cannot initialize data with {filename}',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return 1
# Save to database
- self.dbserver.add_item(item_name = 'X0', col_names = list(self.X0.columns), item = self.X.T, data_shape=self.X.shape)
- self.dbserver.add_item(item_name = 'N', item = np.array([self.N]))
- self.dbserver.add_item(item_name = 'n', item = np.array([self.n]))
- self.logfile.write_msg(msg='PLoM.initialize_data: current X0 size = ({}, {}).'.format(self.N,self.n),msg_type='RUNNING',msg_level=0)
- self.logfile.write_msg(msg='PLoM.initialize_data: X0 and X0_size saved to database.',msg_type='RUNNING',msg_level=0)
+ self.dbserver.add_item(
+ item_name='X0',
+ col_names=list(self.X0.columns),
+ item=self.X.T,
+ data_shape=self.X.shape,
+ )
+ self.dbserver.add_item(item_name='N', item=np.array([self.N]))
+ self.dbserver.add_item(item_name='n', item=np.array([self.n]))
+ self.logfile.write_msg(
+ msg=f'PLoM.initialize_data: current X0 size = ({self.N}, {self.n}).',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.initialize_data: X0 and X0_size saved to database.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
return 0
-
def _init_indv_tasks(self):
- """
- Initializing tasks
- """
- for cur_task in FULL_TASK_LIST:
- self.__setattr__('task_'+cur_task, Task(task_name=cur_task))
-
+ """Initializing tasks""" # noqa: D400, D401
+ for cur_task in FULL_TASK_LIST: # noqa: F405
+ self.__setattr__('task_' + cur_task, Task(task_name=cur_task)) # noqa: F405, PLC2801
- def ConfigTasks(self, task_list = FULL_TASK_LIST):
- """
- Creating a task list object
+ def ConfigTasks(self, task_list=FULL_TASK_LIST): # noqa: C901, N802, F405
+ """Creating a task list object
- task_list: a string list of tasks to run
- """
+ """ # noqa: D205, D400, D401
config_flag = True
self.cur_task_list = task_list
# check task orders
- if not all([x in FULL_TASK_LIST for x in self.cur_task_list]):
- self.logfile.write_msg(msg='PLoM.config_tasks: task name not recognized.',msg_type='ERROR',msg_level=0)
- self.logfile.write_msg(msg='PLoM.config_tasks: acceptable task names: {}.'.format(','.join(FULL_TASK_LIST)),msg_type='WARNING',msg_level=0)
+ if not all([x in FULL_TASK_LIST for x in self.cur_task_list]): # noqa: C419, F405
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: task name not recognized.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: acceptable task names: {}.'.format(
+ ','.join(FULL_TASK_LIST) # noqa: F405
+ ),
+ msg_type='WARNING',
+ msg_level=0,
+ )
return False
- map_order = [FULL_TASK_LIST.index(x) for x in self.cur_task_list]
+ map_order = [FULL_TASK_LIST.index(x) for x in self.cur_task_list] # noqa: F405
if map_order != sorted(map_order):
- self.logfile.write_msg(msg='PLoM.config_tasks: task order error.',msg_type='ERROR',msg_level=0)
- self.logfile.write_msg(msg='PLoM.config_tasks: please follow this order: {}.'.format('->'.join(FULL_TASK_LIST)),msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: task order error.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: please follow this order: {}.'.format(
+ '->'.join(FULL_TASK_LIST) # noqa: F405
+ ),
+ msg_type='WARNING',
+ msg_level=0,
+ )
return False
- if (max(map_order)-min(map_order)+1) != len(map_order):
+ if (max(map_order) - min(map_order) + 1) != len(map_order):
# intermediate tasks missing -> since the jobs are in chain, so here the default is to automatically fill in any missing tasks in the middle
- self.cur_task_list = FULL_TASK_LIST[min(map_order):max(map_order)+1]
- self.logfile.write_msg(msg='PLoM.config_tasks: intermediate task(s) missing and being filled in automatically.',msg_type='WARNING',msg_level=0)
- self.logfile.write_msg(msg='PLoM.config_tasks: the filled task list is: {}.'.format('->'.join(self.cur_task_list)),msg_type='RUNNING',msg_level=0)
- # intializing the task list
- self.task_list = TaskList()
- # intializing individual tasks and refreshing status
+ self.cur_task_list = FULL_TASK_LIST[min(map_order) : max(map_order) + 1] # noqa: F405
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: intermediate task(s) missing and being filled in automatically.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.config_tasks: the filled task list is: {}.'.format(
+ '->'.join(self.cur_task_list)
+ ),
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ # initializing the task list
+ self.task_list = TaskList() # noqa: F405
+ # initializing individual tasks and refreshing status
self._init_indv_tasks()
- for cur_task in FULL_TASK_LIST:
- self.__getattribute__('task_'+cur_task).full_var_list = TASK_ITEM_MAP[cur_task]
- for cur_item in TASK_ITEM_MAP[cur_task]:
- if '/'+cur_item in self.dbserver.get_name_list():
- self.__getattribute__('task_'+cur_task).avail_var_list.append(cur_item)
- self.__getattribute__('task_'+cur_task).refresh_status()
+ for cur_task in FULL_TASK_LIST: # noqa: F405
+ self.__getattribute__('task_' + cur_task).full_var_list = TASK_ITEM_MAP[ # noqa: F405, PLC2801
+ cur_task
+ ]
+ for cur_item in TASK_ITEM_MAP[cur_task]: # noqa: F405
+ if '/' + cur_item in self.dbserver.get_name_list():
+ self.__getattribute__('task_' + cur_task).avail_var_list.append( # noqa: PLC2801
+ cur_item
+ )
+ self.__getattribute__('task_' + cur_task).refresh_status() # noqa: PLC2801
# create the task list
for cur_task in self.cur_task_list:
- self.task_list.add_task(new_task=self.__getattribute__('task_'+cur_task))
-
+ self.task_list.add_task(
+ new_task=self.__getattribute__('task_' + cur_task) # noqa: PLC2801
+ )
+
self.task_list.refresh_status()
# need to check the task chain if all dependent tasks completed to go
# otherwise, the current run could not be completed
- pre_task_list = FULL_TASK_LIST[:FULL_TASK_LIST.index(self.cur_task_list[0])]
+ pre_task_list = FULL_TASK_LIST[: FULL_TASK_LIST.index(self.cur_task_list[0])] # noqa: F405
if len(pre_task_list):
for cur_task in pre_task_list:
- if not self.__getattribute__('task_'+cur_task).refresh_status():
+ if not self.__getattribute__('task_' + cur_task).refresh_status(): # noqa: PLC2801
config_flag = False
- self.logfile.write_msg(msg='PLoM.config_tasks: configuration failed with dependent task {} not completed.'.format(cur_task),msg_type='ERROR',msg_level=0)
-
- if config_flag:
- self.logfile.write_msg(msg='PLoM.config_tasks: the following tasks is configured to run: {}.'.format('->'.join(self.cur_task_list)),msg_type='RUNNING',msg_level=0)
-
-
- def RunAlgorithm(self, n_mc = 5, epsilon_pca = 1e-6, epsilon_kde = 25, tol_PCA2 = 1e-5, tol = 1e-6, max_iter = 50, plot_tag = False, runDiffMaps = None, seed_num=None, tolKDE=0.1):
- """
- Running the PLoM algorithm to train the model and generate new realizations
+ self.logfile.write_msg(
+ msg=f'PLoM.config_tasks: configuration failed with dependent task {cur_task} not completed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+
+ if config_flag: # noqa: RET503
+ self.logfile.write_msg( # noqa: RET503
+ msg='PLoM.config_tasks: the following tasks is configured to run: {}.'.format(
+ '->'.join(self.cur_task_list)
+ ),
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+
+ def RunAlgorithm( # noqa: C901, N802
+ self,
+ n_mc=5,
+ epsilon_pca=1e-6,
+ epsilon_kde=25,
+ tol_PCA2=1e-5, # noqa: N803
+ tol=1e-6,
+ max_iter=50,
+ plot_tag=False, # noqa: FBT002
+ runDiffMaps=None, # noqa: N803
+ seed_num=None,
+ tolKDE=0.1, # noqa: N803
+ ):
+ """Running the PLoM algorithm to train the model and generate new realizations
- n_mc: realization/sample size ratio
- - epsilon_pca: tolerance for selecting the number of considered componenets in PCA
+ - epsilon_pca: tolerance for selecting the number of considered components in PCA
- epsilon_kde: smoothing parameter in the kernel density estimation
- tol: tolerance in the PLoM iterations
- max_iter: maximum number of iterations of the PLoM algorithm
- """
- if runDiffMaps == None:
- runDiffMaps = self.runDiffMaps
+ """ # noqa: D205, D400, D401
+ if runDiffMaps == None: # noqa: E711
+ runDiffMaps = self.runDiffMaps # noqa: N806
else:
self.runDiffMaps = runDiffMaps
@@ -461,105 +702,276 @@ def RunAlgorithm(self, n_mc = 5, epsilon_pca = 1e-6, epsilon_kde = 25, tol_PCA2
cur_task = self.task_list.head_task
while cur_task:
if cur_task.task_name == 'DataNormalization':
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list = []
- #data normalization
- self.X_scaled, self.alpha, self.x_min, self.x_mean = self.DataNormalization(self.X)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: data normalization completed.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'X_range', item = self.alpha, data_shape=self.alpha.shape)
- self.dbserver.add_item(item_name = 'X_min', col_names = list(self.X0.columns), item = self.x_min.T, data_shape=self.x_min.shape)
- self.dbserver.add_item(item_name = 'X_scaled', col_names = list(self.X0.columns), item = self.X_scaled.T, data_shape=self.X_scaled.shape)
- self.dbserver.add_item(item_name = 'X_scaled_mean', col_names = list(self.X0.columns), item = self.x_mean.T, data_shape=self.x_mean.shape)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: X_range, X_min, X_scaled and X_scaled_mean saved.',msg_type='RUNNING',msg_level=0)
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list = []
+ # data normalization
+ self.X_scaled, self.alpha, self.x_min, self.x_mean = (
+ self.DataNormalization(self.X)
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: data normalization completed.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item_name='X_range', item=self.alpha, data_shape=self.alpha.shape
+ )
+ self.dbserver.add_item(
+ item_name='X_min',
+ col_names=list(self.X0.columns),
+ item=self.x_min.T,
+ data_shape=self.x_min.shape,
+ )
+ self.dbserver.add_item(
+ item_name='X_scaled',
+ col_names=list(self.X0.columns),
+ item=self.X_scaled.T,
+ data_shape=self.X_scaled.shape,
+ )
+ self.dbserver.add_item(
+ item_name='X_scaled_mean',
+ col_names=list(self.X0.columns),
+ item=self.x_mean.T,
+ data_shape=self.x_mean.shape,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: X_range, X_min, X_scaled and X_scaled_mean saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
elif cur_task.task_name == 'RunPCA':
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list = []
- #PCA
- self.H, self.mu, self.phi, self.nu, self.errPCA = self.RunPCA(self.X_scaled, epsilon_pca)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: PCA completed.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'X_PCA', col_names = ['Component'+str(i+1) for i in range(self.H.shape[0])], item = self.H.T, data_shape=self.H.shape)
- self.dbserver.add_item(item_name = 'EigenValue_PCA', item = self.mu, data_shape=self.mu.shape)
- self.dbserver.add_item(item_name = 'EigenVector_PCA', col_names = ['V'+str(i+1) for i in range(self.phi.shape[1])], item = self.phi, data_shape=self.phi.shape)
- self.dbserver.add_item(item_name = 'NumComp_PCA', item = np.array([self.nu]))
- self.dbserver.add_item(item_name = 'Error_PCA', item = np.array(self.errPCA))
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: X_PCA, EigenValue_PCA and EigenVector_PCA saved.',msg_type='RUNNING',msg_level=0)
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list = []
+ # PCA
+ self.H, self.mu, self.phi, self.nu, self.errPCA = self.RunPCA(
+ self.X_scaled, epsilon_pca
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: PCA completed.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item_name='X_PCA',
+ col_names=[
+ 'Component' + str(i + 1) for i in range(self.H.shape[0])
+ ],
+ item=self.H.T,
+ data_shape=self.H.shape,
+ )
+ self.dbserver.add_item(
+ item_name='EigenValue_PCA',
+ item=self.mu,
+ data_shape=self.mu.shape,
+ )
+ self.dbserver.add_item(
+ item_name='EigenVector_PCA',
+ col_names=['V' + str(i + 1) for i in range(self.phi.shape[1])],
+ item=self.phi,
+ data_shape=self.phi.shape,
+ )
+ self.dbserver.add_item(
+ item_name='NumComp_PCA', item=np.array([self.nu])
+ )
+ self.dbserver.add_item(
+ item_name='Error_PCA', item=np.array(self.errPCA)
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: X_PCA, EigenValue_PCA and EigenVector_PCA saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
elif cur_task.task_name == 'RunKDE':
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list = []
- #parameters KDE
- self.s_v, self.c_v, self.hat_s_v, self.K, self.b = self.RunKDE(self.H, epsilon_kde)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: kernel density estimation completed.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 's_v', item = np.array([self.s_v]))
- self.dbserver.add_item(item_name = 'c_v', item = np.array([self.c_v]))
- self.dbserver.add_item(item_name = 'hat_s_v', item = np.array([self.hat_s_v]))
- self.dbserver.add_item(item_name = 'X_KDE', item = self.K, data_shape=self.K.shape)
- self.dbserver.add_item(item_name = 'EigenValues_KDE', item = self.b, data_shape=self.b.shape)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: KDE, X_KDE and EigenValues_KDE saved.',msg_type='RUNNING',msg_level=0)
- #diff maps
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list = []
+ # parameters KDE
+ self.s_v, self.c_v, self.hat_s_v, self.K, self.b = self.RunKDE(
+ self.H, epsilon_kde
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: kernel density estimation completed.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(item_name='s_v', item=np.array([self.s_v]))
+ self.dbserver.add_item(item_name='c_v', item=np.array([self.c_v]))
+ self.dbserver.add_item(
+ item_name='hat_s_v', item=np.array([self.hat_s_v])
+ )
+ self.dbserver.add_item(
+ item_name='X_KDE', item=self.K, data_shape=self.K.shape
+ )
+ self.dbserver.add_item(
+ item_name='EigenValues_KDE', item=self.b, data_shape=self.b.shape
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: KDE, X_KDE and EigenValues_KDE saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ # diff maps
if runDiffMaps:
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list = []
- #diff maps
- self.g, self.m, self.a, self.Z, self.eigenKDE = self.DiffMaps(self.H, self.K, self.b, tol=tolKDE)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: diffusion maps completed.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'KDE_g', item = self.g, data_shape=self.g.shape)
- self.dbserver.add_item(item_name = 'KDE_m', item = np.array([self.m]))
- self.dbserver.add_item(item_name = 'KDE_a', item = self.a, data_shape=self.a.shape)
- self.dbserver.add_item(item_name = 'KDE_Z', item = self.Z, data_shape=self.Z.shape)
- self.dbserver.add_item(item_name = 'KDE_Eigen', item = self.eigenKDE, data_shape=self.eigenKDE.shape)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: KDE_g, KDE_m, KDE_a, KDE_Z, and KDE_Eigen saved.',msg_type='RUNNING',msg_level=0)
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list = []
+ # diff maps
+ self.g, self.m, self.a, self.Z, self.eigenKDE = self.DiffMaps(
+ self.H, self.K, self.b, tol=tolKDE
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: diffusion maps completed.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item_name='KDE_g', item=self.g, data_shape=self.g.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_m', item=np.array([self.m])
+ )
+ self.dbserver.add_item(
+ item_name='KDE_a', item=self.a, data_shape=self.a.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_Z', item=self.Z, data_shape=self.Z.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_Eigen',
+ item=self.eigenKDE,
+ data_shape=self.eigenKDE.shape,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: KDE_g, KDE_m, KDE_a, KDE_Z, and KDE_Eigen saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
else:
self.g = np.identity(self.N)
self.m = self.N
- self.a = self.g[:,:self.m].dot(np.linalg.inv(np.transpose(self.g[:,:self.m]).dot(self.g[:,:self.m])))
+ self.a = self.g[:, : self.m].dot(
+ np.linalg.inv(
+ np.transpose(self.g[:, : self.m]).dot(
+ self.g[:, : self.m]
+ )
+ )
+ )
self.Z = self.H.dot(self.a)
self.eigenKDE = np.array([])
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: diffusion map is inactivated.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'KDE_g', item = self.g, data_shape=self.g.shape)
- self.dbserver.add_item(item_name = 'KDE_m', item = np.array([self.m]))
- self.dbserver.add_item(item_name = 'KDE_a', item = self.a, data_shape=self.a.shape)
- self.dbserver.add_item(item_name = 'KDE_Z', item = self.Z, data_shape=self.Z.shape)
- self.dbserver.add_item(item_name = 'KDE_Eigen', item = self.eigenKDE, data_shape=self.eigenKDE.shape)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: KDE_g, KDE_m, KDE_a, KDE_Z, and KDE_Eigen saved.',msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: diffusion map is inactivated.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item_name='KDE_g', item=self.g, data_shape=self.g.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_m', item=np.array([self.m])
+ )
+ self.dbserver.add_item(
+ item_name='KDE_a', item=self.a, data_shape=self.a.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_Z', item=self.Z, data_shape=self.Z.shape
+ )
+ self.dbserver.add_item(
+ item_name='KDE_Eigen',
+ item=self.eigenKDE,
+ data_shape=self.eigenKDE.shape,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: KDE_g, KDE_m, KDE_a, KDE_Z, and KDE_Eigen saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
elif cur_task.task_name == 'ISDEGeneration':
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list = []
- #ISDE generation
- self.ISDEGeneration(n_mc = n_mc, tol_PCA2 = tol_PCA2, tol = tol, max_iter = max_iter, seed_num=seed_num)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: Realizations generated.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'X_new', col_names = list(self.X0.columns), item = self.Xnew.T, data_shape=self.Xnew.shape)
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: X_new saved.',msg_type='RUNNING',msg_level=0)
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list = []
+ # ISDE generation
+ self.ISDEGeneration(
+ n_mc=n_mc,
+ tol_PCA2=tol_PCA2,
+ tol=tol,
+ max_iter=max_iter,
+ seed_num=seed_num,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: Realizations generated.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(
+ item_name='X_new',
+ col_names=list(self.X0.columns),
+ item=self.Xnew.T,
+ data_shape=self.Xnew.shape,
+ )
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: X_new saved.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
else:
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: task {} not found.'.format(cur_task.task_name),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.RunAlgorithm: task {cur_task.task_name} not found.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
break
# refresh status
- for cur_item in TASK_ITEM_MAP[cur_task.task_name]:
- if '/'+cur_item in self.dbserver.get_name_list():
- self.__getattribute__('task_'+cur_task.task_name).avail_var_list.append(cur_item)
+ for cur_item in TASK_ITEM_MAP[cur_task.task_name]: # noqa: F405
+ if '/' + cur_item in self.dbserver.get_name_list():
+ self.__getattribute__( # noqa: PLC2801
+ 'task_' + cur_task.task_name
+ ).avail_var_list.append(cur_item)
if not cur_task.refresh_status():
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: simulation stopped with task {} not fully completed.'.format(cur_task.task_name),msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.RunAlgorithm: simulation stopped with task {cur_task.task_name} not fully completed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
break
# move to the next task
cur_task = cur_task.next_task
if self.task_list.refresh_status():
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: simulation completed with task(s) {} done.'.format('->'.join(self.cur_task_list)),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: simulation completed with task(s) {} done.'.format(
+ '->'.join(self.cur_task_list)
+ ),
+ msg_type='RUNNING',
+ msg_level=0,
+ )
else:
- self.logfile.write_msg(msg='PLoM.RunAlgorithm: simulation not fully completed.',msg_type='ERROR',msg_level=0)
-
-
- def DataNormalization(self, X):
- """
- Normalizing the X
+ self.logfile.write_msg(
+ msg='PLoM.RunAlgorithm: simulation not fully completed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+
+ def DataNormalization(self, X): # noqa: N802, N803, PLR6301
+ """Normalizing the X
- X: the data matrix to be normalized
- """
- #scaling
- X_scaled, alpha, x_min = plom.scaling(X)
+ """ # noqa: D205, D400, D401
+ # scaling
+ X_scaled, alpha, x_min = plom.scaling(X) # noqa: N806
x_mean = plom.mean(X_scaled)
-
- return X_scaled, alpha, x_min, x_mean
+ return X_scaled, alpha, x_min, x_mean
- def RunPCA(self, X_origin, epsilon_pca):
- #...PCA...
- (H, mu, phi, errors) = plom.PCA(X_origin, epsilon_pca)
+ def RunPCA(self, X_origin, epsilon_pca): # noqa: N802, N803, D102
+ # ...PCA...
+ (H, mu, phi, errors) = plom.PCA(X_origin, epsilon_pca) # noqa: N806
nu = len(H)
- self.logfile.write_msg(msg='PLoM.RunPCA: considered number of PCA components = {}'.format(nu),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.RunPCA: considered number of PCA components = {nu}',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
"""
if self.plot_tag:
fig, ax = plt.subplots(figsize=(8,6))
@@ -575,28 +987,25 @@ def RunPCA(self, X_origin, epsilon_pca):
"""
return H, mu, phi, nu, errors
-
- def RunKDE(self, X, epsilon_kde):
- """
- Running Kernel Density Estimation
+ def RunKDE(self, X, epsilon_kde): # noqa: N802, N803, PLR6301
+ """Running Kernel Density Estimation
- X: the data matrix to be reduced
- epsilon_kde: smoothing parameter in the kernel density estimation
- """
+ """ # noqa: D205, D400, D401
(s_v, c_v, hat_s_v) = plom.parameters_kde(X)
- K, b = plom.K(X, epsilon_kde)
-
- return s_v, c_v, hat_s_v, K, b
+ K, b = plom.K(X, epsilon_kde) # noqa: N806
+ return s_v, c_v, hat_s_v, K, b
- def DiffMaps(self, H, K, b, tol=0.1):
- #..diff maps basis...
- #self.Z = PCA(self.H)
+ def DiffMaps(self, H, K, b, tol=0.1): # noqa: N802, N803, D102
+ # ..diff maps basis...
+ # self.Z = PCA(self.H)
try:
- g, eigenvalues = plom.g(K, b) #diffusion maps
+ g, eigenvalues = plom.g(K, b) # diffusion maps
g = g.real
m = plom.m(eigenvalues, tol=tol)
- a = g[:,0:m].dot(np.linalg.inv(np.transpose(g[:,0:m]).dot(g[:,0:m])))
- Z = H.dot(a)
+ a = g[:, 0:m].dot(np.linalg.inv(np.transpose(g[:, 0:m]).dot(g[:, 0:m])))
+ Z = H.dot(a) # noqa: N806
"""
if self.plot_tag:
fig, ax = plt.subplots(figsize=(6,4))
@@ -607,133 +1016,229 @@ def DiffMaps(self, H, K, b, tol=0.1):
plt.savefig(os.path.join(self.vl_path,'KDE_EigenValue.png'),dpi=480)
self.logfile.write_msg(msg='PLoM: {} saved in {}.'.format('KDE_EigenValue.png',self.vl_path),msg_type='RUNNING',msg_level=0)
"""
- except:
+ except: # noqa: E722
g = None
m = 0
a = None
- Z = None
+ Z = None # noqa: N806
eigenvalues = []
- self.logfile.write_msg(msg='PLoM.DiffMaps: diffusion maps failed.',msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.DiffMaps: diffusion maps failed.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
return g, m, a, Z, eigenvalues
-
- def ISDEGeneration(self, n_mc = 5, tol_PCA2 = 1e-5, tol = 0.02, max_iter = 50, seed_num=None):
- """
- The construction of a nonlinear Ito Stochastic Differential Equation (ISDE) to generate realizations of random variable H
- """
+ def ISDEGeneration( # noqa: N802
+ self,
+ n_mc=5,
+ tol_PCA2=1e-5, # noqa: N803
+ tol=0.02,
+ max_iter=50,
+ seed_num=None,
+ ):
+ """The construction of a nonlinear Ito Stochastic Differential Equation (ISDE) to generate realizations of random variable H""" # noqa: D400, D401
if seed_num:
np.random.seed(seed_num)
- #constraints
+ # constraints
if self.g_c:
+ self.C_h_hat_eta = plom.covariance(
+ self.g_c(self.x_mean + (self.phi).dot(np.diag(self.mu)).dot(self.H))
+ )
- self.C_h_hat_eta = plom.covariance(self.g_c(self.x_mean+(self.phi).dot(np.diag(self.mu)).dot(self.H)))
-
- #scaling beta
- #self.beta_c_normalized = self.beta_c_aux(self.beta_c, self.x_min, self.alpha)
+ # scaling beta
+ # self.beta_c_normalized = self.beta_c_aux(self.beta_c, self.x_min, self.alpha)
# KZ, 07/24
self.beta_c_normalized = self.beta_c_aux(self.beta_c, self.X)
- self.b_c, self.psi = plom.PCA2(self.C_h_hat_eta, self.beta_c_normalized, tol_PCA2)
+ self.b_c, self.psi = plom.PCA2(
+ self.C_h_hat_eta, self.beta_c_normalized, tol_PCA2
+ )
self.nu_c = len(self.b_c)
-
- self.hessian = plom.hessian_gamma(self.H, self.psi, self.g_c, self.phi, self.mu, self.x_mean)
+ self.hessian = plom.hessian_gamma(
+ self.H, self.psi, self.g_c, self.phi, self.mu, self.x_mean
+ )
self.inverse = plom.solve_inverse(self.hessian)
-
- self.gradient = plom.gradient_gamma(self.b_c, self.H, self.g_c, self.phi, self.mu, self.psi, self.x_mean)
+
+ self.gradient = plom.gradient_gamma(
+ self.b_c, self.H, self.g_c, self.phi, self.mu, self.psi, self.x_mean
+ )
self.lambda_i = -(self.inverse).dot(self.gradient)
self.errors = [plom.err(self.gradient, self.b_c)]
iteration = 0
- nu_init = np.random.normal(size=(int(self.nu),int(self.N)))
+ nu_init = np.random.normal(size=(int(self.nu), int(self.N)))
self.Y = nu_init.dot(self.a)
error_ratio = 0
increasing_iterations = 0
-
- while (iteration < max_iter and self.errors[iteration] > tol*self.errors[0] and (increasing_iterations < 3)):
- self.logfile.write_msg(msg='PLoM.ISDEGeneration: running iteration {}.'.format(iteration+1),msg_type='RUNNING',msg_level=0)
- Hnewvalues, nu_lambda, x_, x_2 = plom.generator(self.Z, self.Y, self.a,\
- n_mc, self.x_mean, self.H, self.s_v,\
- self.hat_s_v, self.mu, self.phi,\
- self.g[:,0:int(self.m)], psi=self.psi,\
- lambda_i=self.lambda_i, g_c=self.g_c, D_x_g_c = self.D_x_g_c) #solve the ISDE in n_mc iterations
-
- self.gradient = plom.gradient_gamma(self.b_c, Hnewvalues, self.g_c, self.phi, self.mu, self.psi, self.x_mean)
- self.hessian = plom.hessian_gamma(Hnewvalues, self.psi, self.g_c, self.phi, self.mu, self.x_mean)
+
+ while (
+ iteration < max_iter
+ and self.errors[iteration] > tol * self.errors[0]
+ and (increasing_iterations < 3) # noqa: PLR2004
+ ):
+ self.logfile.write_msg(
+ msg=f'PLoM.ISDEGeneration: running iteration {iteration + 1}.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ Hnewvalues, nu_lambda, x_, x_2 = plom.generator( # noqa: N806
+ self.Z,
+ self.Y,
+ self.a,
+ n_mc,
+ self.x_mean,
+ self.H,
+ self.s_v,
+ self.hat_s_v,
+ self.mu,
+ self.phi,
+ self.g[:, 0 : int(self.m)],
+ psi=self.psi,
+ lambda_i=self.lambda_i,
+ g_c=self.g_c,
+ D_x_g_c=self.D_x_g_c,
+ ) # solve the ISDE in n_mc iterations
+
+ self.gradient = plom.gradient_gamma(
+ self.b_c,
+ Hnewvalues,
+ self.g_c,
+ self.phi,
+ self.mu,
+ self.psi,
+ self.x_mean,
+ )
+ self.hessian = plom.hessian_gamma(
+ Hnewvalues, self.psi, self.g_c, self.phi, self.mu, self.x_mean
+ )
self.inverse = plom.solve_inverse(self.hessian)
-
- self.lambda_i = self.lambda_i - 0.3*(self.inverse).dot(self.gradient)
- self.Z = Hnewvalues[:,-self.N:].dot(self.a)
- self.Y = nu_lambda[:,-self.N:].dot(self.a)
+ self.lambda_i = self.lambda_i - 0.3 * (self.inverse).dot( # noqa: PLR6104
+ self.gradient
+ )
+
+ self.Z = Hnewvalues[:, -self.N :].dot(self.a)
+ self.Y = nu_lambda[:, -self.N :].dot(self.a)
iteration += 1
(self.errors).append(plom.err(self.gradient, self.b_c))
-
- if (error_ratio > 1.00):
- increasing_iterations +=1
+
+ if error_ratio > 1.00:
+ increasing_iterations += 1
else:
- increasing_iterations = 0
-
- #saving data
- self.dbserver.add_item(item_name = 'Errors', item = np.array(self.errors), data_shape=np.array(self.errors).shape)
+ increasing_iterations = 0
+
+ # saving data
+ self.dbserver.add_item(
+ item_name='Errors',
+ item=np.array(self.errors),
+ data_shape=np.array(self.errors).shape,
+ )
if iteration == max_iter:
- self.logfile.write_msg(msg='PLoM.ISDEGeneration: max. iteration reached and convergence not achieved.',msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.ISDEGeneration: max. iteration reached and convergence not achieved.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
- #no constraints
+ # no constraints
else:
- nu_init = np.random.normal(size=(int(self.nu),int(self.N)))
+ nu_init = np.random.normal(size=(int(self.nu), int(self.N)))
self.Y = nu_init.dot(self.a)
- Hnewvalues, nu_lambda, x_, x_2 = plom.generator(self.Z, self.Y, self.a,\
- n_mc, self.x_mean, self.H, self.s_v,\
- self.hat_s_v, self.mu, self.phi,\
- self.g[:,0:int(self.m)],seed_num=seed_num) #solve the ISDE in n_mc iterations
- self.logfile.write_msg(msg='PLoM.ISDEGeneration: new generations are simulated.',msg_type='RUNNING',msg_level=0)
- self.dbserver.add_item(item_name = 'Errors', item = np.array([0]))
-
- #saving data
+ Hnewvalues, nu_lambda, x_, x_2 = plom.generator( # noqa: F841, N806
+ self.Z,
+ self.Y,
+ self.a,
+ n_mc,
+ self.x_mean,
+ self.H,
+ self.s_v,
+ self.hat_s_v,
+ self.mu,
+ self.phi,
+ self.g[:, 0 : int(self.m)],
+ seed_num=seed_num,
+ ) # solve the ISDE in n_mc iterations
+ self.logfile.write_msg(
+ msg='PLoM.ISDEGeneration: new generations are simulated.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
+ self.dbserver.add_item(item_name='Errors', item=np.array([0]))
+
+ # saving data
self.errors = []
- self.dbserver.add_item(item_name = 'Errors', item = np.array(self.errors), data_shape=np.array(self.errors).shape)
+ self.dbserver.add_item(
+ item_name='Errors',
+ item=np.array(self.errors),
+ data_shape=np.array(self.errors).shape,
+ )
self.Xnew = self.x_mean + self.phi.dot(np.diag(self.mu)).dot(Hnewvalues)
-
- #unscale
- self.Xnew = np.diag(self.alpha).dot(self.Xnew)+self.x_min
-
- def export_results(self, data_list = [], file_format_list = ['csv']):
- """
- Exporting results by the data names
+ # unscale
+ self.Xnew = np.diag(self.alpha).dot(self.Xnew) + self.x_min
+
+ def export_results(self, data_list=[], file_format_list=['csv']): # noqa: B006
+ """Exporting results by the data names
- data_list: list of data names
- file_format_list: list of output formats
- """
+ """ # noqa: D205, D400, D401
avail_name_list = self.dbserver.get_name_list()
if not data_list:
# print available data names
avail_name_str = ','.join(avail_name_list)
- self.logfile.write_msg(msg='PLoM.export_results: available data {}.'.format(avail_name_str),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.export_results: available data {avail_name_str}.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
if not avail_name_list:
# empty database
- self.logfile.write_msg(msg='PLoM.export_results: database is empty - no data exported.',msg_type='ERROR',msg_level=0)
+ self.logfile.write_msg(
+ msg='PLoM.export_results: database is empty - no data exported.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
else:
for tag, data_i in enumerate(data_list):
if data_i not in avail_name_list:
- self.logfile.write_msg(msg='PLoM.export_results: {} is not found and skipped.'.format(data_i),msg_type='WARNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.export_results: {data_i} is not found and skipped.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
else:
try:
ff_i = file_format_list[tag]
- except:
+ except: # noqa: E722
ff_i = file_format_list[-1]
- ex_flag = self.dbserver.export(data_name = data_i, file_format = ff_i)
- if type(ex_flag) == int and ex_flat == 1:
- self.logfile.write_msg(msg='PLoM.export_results: {} is not found and skipped.'.format(data_i),msg_type='WARNING',msg_level=0)
- elif type(ex_flag) == int and ex_flag == 2:
- self.logfile.write_msg(msg='PLoM.export_results: {} is not supported yest.'.format(ff_i),msg_type='ERROR',msg_level=0)
+ ex_flag = self.dbserver.export(
+ data_name=data_i, file_format=ff_i
+ )
+ if type(ex_flag) == int and ex_flat == 1: # noqa: E721, F405
+ self.logfile.write_msg(
+ msg=f'PLoM.export_results: {data_i} is not found and skipped.',
+ msg_type='WARNING',
+ msg_level=0,
+ )
+ elif type(ex_flag) == int and ex_flag == 2: # noqa: E721, PLR2004
+ self.logfile.write_msg(
+ msg=f'PLoM.export_results: {ff_i} is not supported yest.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
else:
- self.logfile.write_msg(msg='PLoM.export_results: {} is exported in {}.'.format(data_i,ex_flag),msg_type='RUNNING',msg_level=0)
+ self.logfile.write_msg(
+ msg=f'PLoM.export_results: {data_i} is exported in {ex_flag}.',
+ msg_type='RUNNING',
+ msg_level=0,
+ )
"""
def PostProcess():
@@ -797,7 +1302,4 @@ def PostProcess():
blue_patch = mpatches.Patch(color='blue', label='X')
plt.legend(handles=[red_patch, blue_patch])
plt.show()
- """
-
-
-
+ """ # noqa: E101
diff --git a/modules/performUQ/SimCenterUQ/PLoM/PLoM_library.py b/modules/performUQ/SimCenterUQ/PLoM/PLoM_library.py
index f65dfba55..ac55ef66f 100644
--- a/modules/performUQ/SimCenterUQ/PLoM/PLoM_library.py
+++ b/modules/performUQ/SimCenterUQ/PLoM/PLoM_library.py
@@ -1,381 +1,584 @@
-# -*- coding: utf-8 -*-
-#JGA
-#from matplotlib import pyplot as plt
+# JGA # noqa: CPY001, D100, N999
+# from matplotlib import pyplot as plt
+import os
+import platform
+from ctypes import * # noqa: F403
+from math import exp, log, pi, sqrt
+from sys import platform as pltm
+
import numpy as np
+from general import Logfile
from scipy import integrate
-from math import sqrt, exp, pi, log
-import time
-from ctypes import *
-import os
-from general import Logfile, DBServer
-from sys import platform as pltm
-import platform
-if pltm == "linux" or pltm == "linux2":
- c_lib = CDLL(os.path.join(os.path.dirname(os.path.abspath(__file__)),"lib/linux/PLoM_C_library.so"))
-elif pltm == "darwin":
+if pltm == 'linux' or pltm == 'linux2':
+ c_lib = CDLL( # noqa: F405
+ os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'lib/linux/PLoM_C_library.so',
+ )
+ )
+elif pltm == 'darwin':
if platform.processor() == 'arm':
- c_lib = CDLL(os.path.join(os.path.dirname(os.path.abspath(__file__)),"lib/macOS_m1/PLoM_C_library.so"))
+ c_lib = CDLL( # noqa: F405
+ os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'lib/macOS_m1/PLoM_C_library.so',
+ )
+ )
else:
- c_lib = CDLL(os.path.join(os.path.dirname(os.path.abspath(__file__)),"lib/macOS/PLoM_C_library.so"))
-elif pltm == "win32":
- c_lib = CDLL(os.path.join(os.path.dirname(os.path.abspath(__file__)),"lib/win/PLoM_C_library.so"))
-
-c_lib.rho.restype = c_double
-c_lib.rho.argtypes = [np.ctypeslib.ndpointer(dtype=np.float64),
- np.ctypeslib.ndpointer(dtype=np.float64),c_int,c_int,c_double,c_double]
+ c_lib = CDLL( # noqa: F405
+ os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'lib/macOS/PLoM_C_library.so',
+ )
+ )
+elif pltm == 'win32':
+ c_lib = CDLL( # noqa: F405
+ os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'lib/win/PLoM_C_library.so',
+ )
+ )
+
+c_lib.rho.restype = c_double # noqa: F405
+c_lib.rho.argtypes = [
+ np.ctypeslib.ndpointer(dtype=np.float64),
+ np.ctypeslib.ndpointer(dtype=np.float64),
+ c_int, # noqa: F405
+ c_int, # noqa: F405
+ c_double, # noqa: F405
+ c_double, # noqa: F405
+]
c_lib.gradient_rho.restype = np.ctypeslib.ndpointer(dtype=np.float64)
-c_lib.gradient_rho.argtypes = [np.ctypeslib.ndpointer(dtype=np.float64),
- np.ctypeslib.ndpointer(dtype=np.float64),
- np.ctypeslib.ndpointer(dtype=np.float64),
- c_int,c_int,c_double,c_double]
+c_lib.gradient_rho.argtypes = [
+ np.ctypeslib.ndpointer(dtype=np.float64),
+ np.ctypeslib.ndpointer(dtype=np.float64),
+ np.ctypeslib.ndpointer(dtype=np.float64),
+ c_int, # noqa: F405
+ c_int, # noqa: F405
+ c_double, # noqa: F405
+ c_double, # noqa: F405
+]
-def rhoctypes(y, eta, nu, N, s_v, hat_s_v):
- return c_lib.rho(np.array(y,np.float64),np.array(eta,np.float64),nu,N,s_v,hat_s_v)
-def scaling(x):
+def rhoctypes(y, eta, nu, N, s_v, hat_s_v): # noqa: N803, D103
+ return c_lib.rho(
+ np.array(y, np.float64), np.array(eta, np.float64), nu, N, s_v, hat_s_v
+ )
+
+
+def scaling(x): # noqa: D103
n = x.shape[0]
alpha = np.zeros(n)
- x_min = np.zeros((n,1))
- for i in range(0,n):
- x_max_k = max(x[i,:])
- x_min_k= min(x[i,:])
+ x_min = np.zeros((n, 1))
+ for i in range(n):
+ x_max_k = max(x[i, :])
+ x_min_k = min(x[i, :])
x_min[i] = x_min_k
if x_max_k - x_min_k != 0:
alpha[i] = x_max_k - x_min_k
else:
alpha[i] = 1
- x_scaled = np.diag(1/alpha).dot(x-x_min)
+ x_scaled = np.diag(1 / alpha).dot(x - x_min)
return x_scaled, alpha, x_min
-def gradient_rhoctypes(gradient, y, eta, nu, N, s_v, hat_s_v):
- return c_lib.gradient_rho(np.array(gradient,np.float64),\
- np.array(y,np.float64),\
- np.array(eta,np.float64),\
- nu, N, s_v, hat_s_v)
+
+def gradient_rhoctypes(gradient, y, eta, nu, N, s_v, hat_s_v): # noqa: N803, D103
+ return c_lib.gradient_rho(
+ np.array(gradient, np.float64),
+ np.array(y, np.float64),
+ np.array(eta, np.float64),
+ nu,
+ N,
+ s_v,
+ hat_s_v,
+ )
+
def kernel(x, y, epsilon):
- """
- >>> kernel(np.array([1,0]), np.array([1,0]), 0.5)
+ """>>> kernel(np.array([1,0]), np.array([1,0]), 0.5)
1.0
- """
- dist = np.linalg.norm(x-y)**2
- k = np.exp(-dist/(4*epsilon))
- return k
-
-def K(eta, epsilon):
- """
- >>> K((np.array([[1,1],[1,1]])), 3)
+ """ # noqa: D205, D400
+ dist = np.linalg.norm(x - y) ** 2
+ k = np.exp(-dist / (4 * epsilon))
+ return k # noqa: RET504
+
+
+def K(eta, epsilon): # noqa: N802
+ """>>> K((np.array([[1,1],[1,1]])), 3)
(array([[1., 1.],
[1., 1.]]), array([[2., 0.],
[0., 2.]]))
- """
- N = eta.shape[1]
- K = np.zeros((N,N))
- b = np.zeros((N,N))
- for i in range(0,N):
+ """ # noqa: D205, D400
+ N = eta.shape[1] # noqa: N806
+ K = np.zeros((N, N)) # noqa: N806
+ b = np.zeros((N, N))
+ for i in range(N):
row_sum = 0
- for j in range(0,N):
+ for j in range(N):
if j != i:
- K[i,j] = kernel((eta[:,i]),((eta[:,j])), epsilon)
- row_sum = row_sum + K[i,j]
+ K[i, j] = kernel((eta[:, i]), (eta[:, j]), epsilon)
+ row_sum = row_sum + K[i, j] # noqa: PLR6104
else:
- K[i,j] = 1
- row_sum = row_sum + 1
- b[i,i] = row_sum
+ K[i, j] = 1
+ row_sum = row_sum + 1 # noqa: PLR6104
+ b[i, i] = row_sum
return K, b
-def g(K, b):
- """
- >>> g((np.array([[1,0.5],[0.5,1]])), np.array([[1.5, 0.], [0., 1.5]]))
+
+def g(K, b): # noqa: N803
+ """>>> g((np.array([[1,0.5],[0.5,1]])), np.array([[1.5, 0.], [0., 1.5]]))
(array([[ 0.57735027, -0.57735027],
[ 0.57735027, 0.57735027]]), array([1. , 0.33333333]))
- """
- invb = np.diag(1/np.diag(b))
+ """ # noqa: D205, D400
+ invb = np.diag(1 / np.diag(b))
inv_sqrt_b = np.sqrt(invb)
xi = np.linalg.eigh(inv_sqrt_b.dot(K).dot(inv_sqrt_b))
- xi[1][:,:] = np.transpose(xi[1][:,:])
- xi[1][:,:] = xi[1][[np.argsort(xi[0], kind = 'mergesort', axis = 0)[::-1]], :]
- eigenvalues = np.sort(xi[0], kind = 'mergesort', axis = 0)[::-1]
- g = inv_sqrt_b.dot(np.transpose(xi[1][:,:]))
+ xi[1][:, :] = np.transpose(xi[1][:, :])
+ xi[1][:, :] = xi[1][[np.argsort(xi[0], kind='mergesort', axis=0)[::-1]], :]
+ eigenvalues = np.sort(xi[0], kind='mergesort', axis=0)[::-1]
+ g = inv_sqrt_b.dot(np.transpose(xi[1][:, :]))
norm = np.diagonal(np.transpose(g).dot(b).dot(g))
- sqrt_norm = np.sqrt(1/norm)
+ sqrt_norm = np.sqrt(1 / norm)
g = np.multiply(g, sqrt_norm)
return g, eigenvalues
+
def m(eigenvalues, tol=0.1):
- """
- >>> m(np.array([1, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0.05, 0.025]))
+ """>>> m(np.array([1, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0.05, 0.025]))
11
- """
+ """ # noqa: D205, D400
i = 2
m = 0
while i < len(eigenvalues) and m == 0:
- if eigenvalues[i] <= eigenvalues[1]*tol:
- return i+1
- i = i+1
+ if eigenvalues[i] <= eigenvalues[1] * tol:
+ return i + 1
+ i = i + 1 # noqa: PLR6104
if m == 0:
- return max(round(len(eigenvalues)/10), 3)
+ return max(round(len(eigenvalues) / 10), 3)
return m
+
def mean(x):
- """
- >>> mean(np.array([[1,1],[0,1],[2,4]]))
+ """>>> mean(np.array([[1,1],[0,1],[2,4]]))
array([[1. ],
[0.5],
[3. ]])
- """
+ """ # noqa: D205, D400
dim = x.shape[0]
- x_mean = np.zeros((dim,1))
- for i in range(0,dim):
- x_mean[i] = np.mean(x[i,:])
+ x_mean = np.zeros((dim, 1))
+ for i in range(dim):
+ x_mean[i] = np.mean(x[i, :])
return x_mean
+
def covariance(x):
- """
- >>> covariance(np.array([[1,1],[0,1],[2,4]]))
+ """>>> covariance(np.array([[1,1],[0,1],[2,4]]))
array([[0. , 0. , 0. ],
[0. , 0.5, 1. ],
[0. , 1. , 2. ]])
- """
+ """ # noqa: D205, D400
dim = x.shape[0]
- N = x.shape[1]
- C = np.zeros((dim,dim))
+ N = x.shape[1] # noqa: N806
+ C = np.zeros((dim, dim)) # noqa: N806
x_mean = mean(x)
- for i in range(0,N):
- C = C + (np.resize(x[:,i], x_mean.shape) - x_mean).dot(np.transpose((np.resize(x[:,i], x_mean.shape) - x_mean)))
- return C/(N-1)
+ for i in range(N):
+ C = C + (np.resize(x[:, i], x_mean.shape) - x_mean).dot( # noqa: N806, PLR6104
+ np.transpose(np.resize(x[:, i], x_mean.shape) - x_mean)
+ )
+ return C / (N - 1)
+
-def PCA(x, tol):
- """
- >>> PCA(np.array([[1,1],[0,1],[2,4]]), 0.1)
+def PCA(x, tol): # noqa: N802
+ """>>> PCA(np.array([[1,1],[0,1],[2,4]]), 0.1)
(array([[-0.70710678, 0.70710678]]), array([1.58113883]), array([[-1.13483031e-17],
[ 4.47213595e-01],
[ 8.94427191e-01]]))
- """
+ """ # noqa: D205, D400
x_mean = mean(x)
- (phi,mu,v) = np.linalg.svd(x-x_mean)
- mu = mu/sqrt(len(x[0])-1)
- #plt.figure()
- #plt.plot(np.arange(len(mu)), mu)
- #plt.xlabel('# eigenvalue of X covariance')
- #plt.show()
+ (phi, mu, v) = np.linalg.svd(x - x_mean) # noqa: F841
+ mu = mu / sqrt(len(x[0]) - 1) # noqa: PLR6104
+ # plt.figure()
+ # plt.plot(np.arange(len(mu)), mu)
+ # plt.xlabel('# eigenvalue of X covariance')
+ # plt.show()
error = 1
i = 0
errors = [1]
while error > tol and i < len(mu):
- error = error - (mu[i]**2)/sum((mu**2))
- i = i+1
+ error = error - (mu[i] ** 2) / sum(mu**2) # noqa: PLR6104
+ i = i + 1 # noqa: PLR6104
nu = i
errors.append(error)
while i < len(mu):
- error = error - (mu[i]**2)/sum((mu**2))
- i = i+1
+ error = error - (mu[i] ** 2) / sum(mu**2) # noqa: PLR6104
+ i = i + 1 # noqa: PLR6104
errors.append(error)
- #plt.figure()
- #plt.semilogy(np.arange(len(mu)+1), errors)
- #plt.xlabel('# eigenvalue of Covariance matrix of X')
- #plt.ylabel('Error of the PCA associated with the eigenvalue')
- #plt.show()
+ # plt.figure()
+ # plt.semilogy(np.arange(len(mu)+1), errors)
+ # plt.xlabel('# eigenvalue of Covariance matrix of X')
+ # plt.ylabel('Error of the PCA associated with the eigenvalue')
+ # plt.show()
mu = mu[0:nu]
- phi = phi[:,0:nu]
- mu_sqrt_inv = (np.diag(1/(mu))) #no need to do the sqrt because we use the singularvalues
- eta = mu_sqrt_inv.dot(np.transpose(phi)).dot((x-x_mean))
- return eta, mu, phi, errors #mu is the diagonal matrix with the singularvalues up to a tolerance
+ phi = phi[:, 0:nu]
+ mu_sqrt_inv = np.diag(
+ 1 / (mu)
+ ) # no need to do the sqrt because we use the singularvalues
+ eta = mu_sqrt_inv.dot(np.transpose(phi)).dot(x - x_mean)
+ return (
+ eta,
+ mu,
+ phi,
+ errors,
+ ) # mu is the diagonal matrix with the singularvalues up to a tolerance
+
def parameters_kde(eta):
- """
- >>> parameters_kde(np.array([[1,1],[0,1],[2,4]]))
+ """>>> parameters_kde(np.array([[1,1],[0,1],[2,4]]))
(0.8773066621237415, 0.13452737030512696, 0.7785858648409519)
- """
+ """ # noqa: D205, D400
nu = eta.shape[0]
- N = eta.shape[1]
- s_v = (4/(N*(2+nu)))**(1/(nu+4))#(4/(N*(2+nu)))**(1/(nu+4))
- hat_s_v = s_v/sqrt(s_v**2+((N-1)/N))
- c_v = 1/(sqrt(2*pi)*hat_s_v)**nu
+ N = eta.shape[1] # noqa: N806
+ s_v = (4 / (N * (2 + nu))) ** (1 / (nu + 4)) # (4/(N*(2+nu)))**(1/(nu+4))
+ hat_s_v = s_v / sqrt(s_v**2 + ((N - 1) / N))
+ c_v = 1 / (sqrt(2 * pi) * hat_s_v) ** nu
return s_v, c_v, hat_s_v
-def kde(y, eta, s_v = None, c_v = None, hat_s_v = None):
- """
- >>> kde(np.array([[1, 2, 3]]), np.array([[1,1],[0,1],[2,4]]))
+
+def kde(y, eta, s_v=None, c_v=None, hat_s_v=None):
+ """>>> kde(np.array([[1, 2, 3]]), np.array([[1,1],[0,1],[2,4]]))
0.01940049487135241
- """
+ """ # noqa: D205, D400
nu = eta.shape[0]
- N = eta.shape[1]
- if s_v == None or c_v == None or hat_s_v == None:
+ N = eta.shape[1] # noqa: N806
+ if s_v == None or c_v == None or hat_s_v == None: # noqa: E711
s_v, c_v, hat_s_v = parameters_kde(eta)
- return c_v*rhoctypes(np.resize(y,(y.shape[0]*y.shape[1],1)), np.resize(np.transpose(eta),(nu*N,1)),\
- nu, N, s_v, hat_s_v)
-
-def PCA2(C_h_hat_eta, beta, tol): #taking only independent constraints
- """
- >>> PCA2(np.array([[1. , 1. , 1. ], [1. , 4.5, 1.5 ], [1. , 1.5 , 2. ]]), np.array([10, 1, 2]), 0.1)
+ return c_v * rhoctypes(
+ np.resize(y, (y.shape[0] * y.shape[1], 1)),
+ np.resize(np.transpose(eta), (nu * N, 1)),
+ nu,
+ N,
+ s_v,
+ hat_s_v,
+ )
+
+
+# taking only independent constraints
+def PCA2(C_h_hat_eta, beta, tol): # noqa: N802, N803
+ """>>> PCA2(np.array([[1. , 1. , 1. ], [1. , 4.5, 1.5 ], [1. , 1.5 , 2. ]]), np.array([10, 1, 2]), 0.1)
(array([-4.53648062, 5.2236145 ]), array([[-0.28104828, 0.42570005],
[-0.85525695, -0.51768266],
[-0.43537043, 0.74214832]]))
- """
- (lambda_c, psi) = np.linalg.eig(C_h_hat_eta) #eigenvalue decomposition as the dimensions are not so big
+ """ # noqa: D205, D400
+ (lambda_c, psi) = np.linalg.eig(
+ C_h_hat_eta
+ ) # eigenvalue decomposition as the dimensions are not so big
psi = np.transpose(psi)
- psi = psi[np.argsort(lambda_c, kind = 'mergesort', axis = 0)[::-1], :]
+ psi = psi[np.argsort(lambda_c, kind='mergesort', axis=0)[::-1], :]
psi = np.transpose(psi)
- lambda_c = np.sort(lambda_c, kind = 'mergesort', axis = 0)[::-1]
+ lambda_c = np.sort(lambda_c, kind='mergesort', axis=0)[::-1]
i = 1
nu_c = 1
- while i < len(lambda_c) and not(lambda_c[i-1] > tol*lambda_c[0] and lambda_c[i] <= tol*lambda_c[0]):
- i = i+1
+ while i < len(lambda_c) and not (
+ lambda_c[i - 1] > tol * lambda_c[0] and lambda_c[i] <= tol * lambda_c[0]
+ ):
+ i = i + 1 # noqa: PLR6104
nu_c = i
lambda_c = lambda_c[0:nu_c]
psi = psi[:, 0:nu_c]
b_c = np.transpose(psi).dot(beta)
return b_c, psi
-def h_c(eta, g_c, phi, mu, psi, x_mean):
- return np.transpose(psi).dot(g_c(x_mean +phi.dot(np.diag(mu)).dot(eta) ))
-def gradient_gamma(b_c, eta_lambda, g_c, phi, mu, psi, x_mean):
- return (b_c) - mean(h_c(eta_lambda, g_c, phi, mu, psi, x_mean)) #the mean is the empirical expectation
+def h_c(eta, g_c, phi, mu, psi, x_mean): # noqa: D103
+ return np.transpose(psi).dot(g_c(x_mean + phi.dot(np.diag(mu)).dot(eta)))
+
-def hessian_gamma(eta_lambda, psi, g_c, phi, mu, x_mean):
+def gradient_gamma(b_c, eta_lambda, g_c, phi, mu, psi, x_mean): # noqa: D103
+ return (b_c) - mean(
+ h_c(eta_lambda, g_c, phi, mu, psi, x_mean)
+ ) # the mean is the empirical expectation
+
+
+def hessian_gamma(eta_lambda, psi, g_c, phi, mu, x_mean): # noqa: D103
return covariance(h_c(eta_lambda, g_c, phi, mu, psi, x_mean))
-def solve_inverse(matrix):
+
+def solve_inverse(matrix): # noqa: D103
if matrix.shape[0] != matrix.shape[1]:
- return Logfile().write_msg(msg='PLoM: solve_inverse non-square matrix.',msg_type='ERROR',msg_level=0)
- else:
+ return Logfile().write_msg(
+ msg='PLoM: solve_inverse non-square matrix.',
+ msg_type='ERROR',
+ msg_level=0,
+ )
+ else: # noqa: RET505
inverse = np.zeros(matrix.shape)
- for j in range(0,matrix.shape[1]):
+ for j in range(matrix.shape[1]):
unit = np.zeros(matrix.shape[1])
unit[j] = 1
solve = np.linalg.solve(matrix, unit)
- inverse[:,j] = solve
+ inverse[:, j] = solve
return inverse
-def generator(z_init, y_init, a, n_mc, x_mean, eta, s_v, hat_s_v, mu, phi, g, psi = 0, lambda_i = 0, g_c = 0, D_x_g_c = 0, seed_num=None):
+def generator( # noqa: D103, PLR0913, PLR0917
+ z_init,
+ y_init,
+ a,
+ n_mc,
+ x_mean,
+ eta,
+ s_v,
+ hat_s_v,
+ mu,
+ phi,
+ g,
+ psi=0,
+ lambda_i=0,
+ g_c=0,
+ D_x_g_c=0, # noqa: N803
+ seed_num=None,
+):
if seed_num:
np.random.seed(seed_num)
- delta_t = 2*pi*hat_s_v/20
- print('delta t: ', delta_t)
+ delta_t = 2 * pi * hat_s_v / 20
+ print('delta t: ', delta_t) # noqa: T201
f_0 = 1.5
- l_0 = 10#200
- M_0 = 10#20
- beta = f_0*delta_t/4
+ l_0 = 10 # 200
+ M_0 = 10 # 20 # noqa: N806
+ beta = f_0 * delta_t / 4
nu = z_init.shape[0]
- N = a.shape[0]
- eta_lambda = np.zeros((nu,(n_mc+1)*N))
- nu_lambda = np.zeros((nu,(n_mc+1)*N))
+ N = a.shape[0] # noqa: N806
+ eta_lambda = np.zeros((nu, (n_mc + 1) * N))
+ nu_lambda = np.zeros((nu, (n_mc + 1) * N))
n = x_mean.shape[0]
- x_ = np.zeros((n,n_mc))
- x_2 = np.zeros((n,n_mc))
+ x_ = np.zeros((n, n_mc))
+ x_2 = np.zeros((n, n_mc))
z_l = z_init
y_l = y_init
- eta_lambda[:,0:N] = z_init.dot(np.transpose(g))
- nu_lambda[:,0:N] = y_init.dot(np.transpose(g))
- for i in range (0,l_0):
- z_l_half = z_l + delta_t*0.5*y_l
- w_l_1 = np.random.normal(scale = sqrt(delta_t), size = (nu,N)).dot(a) #wiener process
- L_l_half = L(z_l_half.dot(np.transpose(g)), g_c, x_mean, eta, s_v, hat_s_v, mu, phi, psi, lambda_i, D_x_g_c).dot(a)
- y_l_1 = (1-beta)*y_l/(1+beta) + delta_t*(L_l_half)/(1+beta) + sqrt(f_0)*w_l_1/(1+beta)
- z_l = z_l_half + delta_t*0.5*y_l_1
+ eta_lambda[:, 0:N] = z_init.dot(np.transpose(g))
+ nu_lambda[:, 0:N] = y_init.dot(np.transpose(g))
+ for i in range(l_0): # noqa: B007
+ z_l_half = z_l + delta_t * 0.5 * y_l
+ w_l_1 = np.random.normal(scale=sqrt(delta_t), size=(nu, N)).dot(
+ a
+ ) # wiener process
+ L_l_half = L( # noqa: N806
+ z_l_half.dot(np.transpose(g)),
+ g_c,
+ x_mean,
+ eta,
+ s_v,
+ hat_s_v,
+ mu,
+ phi,
+ psi,
+ lambda_i,
+ D_x_g_c,
+ ).dot(a)
+ y_l_1 = (
+ (1 - beta) * y_l / (1 + beta)
+ + delta_t * (L_l_half) / (1 + beta)
+ + sqrt(f_0) * w_l_1 / (1 + beta)
+ )
+ z_l = z_l_half + delta_t * 0.5 * y_l_1
y_l = y_l_1
- for l in range(M_0, M_0*(n_mc+1)):
- z_l_half = z_l + delta_t*0.5*y_l
- w_l_1 = np.random.normal(scale = sqrt(delta_t), size = (nu,N)).dot(a) #wiener process
- L_l_half = L(z_l_half.dot(np.transpose(g)), g_c, x_mean, eta, s_v, hat_s_v, mu, phi, psi, lambda_i, D_x_g_c).dot(a)
- y_l_1 = (1-beta)*y_l/(1+beta) + delta_t*(L_l_half)/(1+beta) + sqrt(f_0)*w_l_1/(1+beta)
- z_l = z_l_half + delta_t*0.5*y_l_1
+ for l in range(M_0, M_0 * (n_mc + 1)): # noqa: E741
+ z_l_half = z_l + delta_t * 0.5 * y_l
+ w_l_1 = np.random.normal(scale=sqrt(delta_t), size=(nu, N)).dot(
+ a
+ ) # wiener process
+ L_l_half = L( # noqa: N806
+ z_l_half.dot(np.transpose(g)),
+ g_c,
+ x_mean,
+ eta,
+ s_v,
+ hat_s_v,
+ mu,
+ phi,
+ psi,
+ lambda_i,
+ D_x_g_c,
+ ).dot(a)
+ y_l_1 = (
+ (1 - beta) * y_l / (1 + beta)
+ + delta_t * (L_l_half) / (1 + beta)
+ + sqrt(f_0) * w_l_1 / (1 + beta)
+ )
+ z_l = z_l_half + delta_t * 0.5 * y_l_1
y_l = y_l_1
- if l%M_0 == M_0-1:
- eta_lambda[:,int(l/M_0)*N:(int(l/M_0)+1)*N] = z_l.dot(np.transpose(g))
- nu_lambda[:,int(l/M_0)*N:(int(l/M_0)+1)*N] = y_l.dot(np.transpose(g))
- x_[:,int(l/M_0)-1:int(l/M_0)] = mean(x_mean + phi.dot(np.diag(mu)).dot(eta_lambda[:,:(int(l/M_0)+1)*N]))
- x_2[:,int(l/M_0)-1:int(l/M_0)] = mean((x_mean + phi.dot(np.diag(mu)).dot(eta_lambda[:,:(int(l/M_0)+1)*N]))**2)
- return eta_lambda[:,N:], nu_lambda[:,N:], x_, x_2
-
-def ac(sig):
- sig = sig - np.mean(sig)
- sft = np.fft.rfft( np.concatenate((sig,0*sig)) )
- return np.fft.irfft(np.conj(sft)*sft)
-
-def L(y, g_c, x_mean, eta, s_v, hat_s_v, mu, phi, psi, lambda_i, D_x_g_c): #gradient of the potential
+ if l % M_0 == M_0 - 1:
+ eta_lambda[:, int(l / M_0) * N : (int(l / M_0) + 1) * N] = z_l.dot(
+ np.transpose(g)
+ )
+ nu_lambda[:, int(l / M_0) * N : (int(l / M_0) + 1) * N] = y_l.dot(
+ np.transpose(g)
+ )
+ x_[:, int(l / M_0) - 1 : int(l / M_0)] = mean(
+ x_mean
+ + phi.dot(np.diag(mu)).dot(eta_lambda[:, : (int(l / M_0) + 1) * N])
+ )
+ x_2[:, int(l / M_0) - 1 : int(l / M_0)] = mean(
+ (
+ x_mean
+ + phi.dot(np.diag(mu)).dot(
+ eta_lambda[:, : (int(l / M_0) + 1) * N]
+ )
+ )
+ ** 2
+ )
+ return eta_lambda[:, N:], nu_lambda[:, N:], x_, x_2
+
+
+def ac(sig): # noqa: D103
+ sig = sig - np.mean(sig) # noqa: PLR6104
+ sft = np.fft.rfft(np.concatenate((sig, 0 * sig)))
+ return np.fft.irfft(np.conj(sft) * sft)
+
+
+def L( # noqa: N802, D103
+ y,
+ g_c, # noqa: ARG001
+ x_mean,
+ eta,
+ s_v,
+ hat_s_v,
+ mu,
+ phi,
+ psi,
+ lambda_i,
+ D_x_g_c, # noqa: N803
+): # gradient of the potential
nu = eta.shape[0]
- N = eta.shape[1]
- L = np.zeros((nu,N))
- for l in range(0,N):
- yl = np.resize(y[:,l],(len(y[:,l]),1))
- rho_ = rhoctypes(yl, np.resize(np.transpose(eta),(nu*N,1)),\
- nu, N, s_v, hat_s_v)
- rho_ = 1e250*rho_
+ N = eta.shape[1] # noqa: N806
+ L = np.zeros((nu, N)) # noqa: N806
+ for l in range(N): # noqa: E741
+ yl = np.resize(y[:, l], (len(y[:, l]), 1))
+ rho_ = rhoctypes(
+ yl, np.resize(np.transpose(eta), (nu * N, 1)), nu, N, s_v, hat_s_v
+ )
+ rho_ = 1e250 * rho_ # noqa: PLR6104
# compute the D_x_g_c if D_x_g_c is not 0 (KZ)
if D_x_g_c:
- grad_g_c = D_x_g_c(x_mean+np.resize(phi.dot(np.diag(mu)).dot(yl), (x_mean.shape)))
+ grad_g_c = D_x_g_c(
+ x_mean + np.resize(phi.dot(np.diag(mu)).dot(yl), (x_mean.shape))
+ )
else:
# not constraints and no D_x_g_c
- grad_g_c = np.zeros((x_mean.shape[0],1))
- if rho_ < 1e-250:
+ grad_g_c = np.zeros((x_mean.shape[0], 1))
+ if rho_ < 1e-250: # noqa: PLR2004
closest = 1e30
- for i in range(0,N):
- if closest > np.linalg.norm((hat_s_v/s_v)*np.resize(eta[:,i],yl.shape)-yl):
- closest = np.linalg.norm((hat_s_v/s_v)*np.resize(eta[:,i],yl.shape)-yl)
- vector = (hat_s_v/s_v)*np.resize(eta[:,i],yl.shape)-yl
- #KZ L[:,l] = ( np.resize(vector/(hat_s_v**2),(nu))\
+ for i in range(N):
+ if closest > np.linalg.norm(
+ (hat_s_v / s_v) * np.resize(eta[:, i], yl.shape) - yl
+ ):
+ closest = np.linalg.norm(
+ (hat_s_v / s_v) * np.resize(eta[:, i], yl.shape) - yl
+ )
+ vector = (hat_s_v / s_v) * np.resize(eta[:, i], yl.shape) - yl
+ # KZ L[:,l] = ( np.resize(vector/(hat_s_v**2),(nu))\
# -np.resize(np.diag(mu).dot(np.transpose(phi)).\
# dot(D_x_g_c(x_mean+np.resize(phi.dot(np.diag(mu)).dot(yl), (x_mean.shape)))).\
# dot(psi).dot(lambda_i), (nu)))
- L[:,l] = ( np.resize(vector/(hat_s_v**2),(nu))\
- -np.resize(np.diag(mu).dot(np.transpose(phi)).\
- dot(grad_g_c).dot(psi).dot(lambda_i), (nu)))
+ L[:, l] = np.resize(vector / (hat_s_v**2), (nu)) - np.resize(
+ np.diag(mu)
+ .dot(np.transpose(phi))
+ .dot(grad_g_c)
+ .dot(psi)
+ .dot(lambda_i),
+ (nu),
+ )
else:
- array_pointer = cast(gradient_rhoctypes(np.zeros((nu,1)),yl,\
- np.resize(np.transpose(eta),(nu*N,1)), nu, N, s_v, hat_s_v), POINTER(c_double*nu))
+ array_pointer = cast( # noqa: F405
+ gradient_rhoctypes(
+ np.zeros((nu, 1)),
+ yl,
+ np.resize(np.transpose(eta), (nu * N, 1)),
+ nu,
+ N,
+ s_v,
+ hat_s_v,
+ ),
+ POINTER(c_double * nu), # noqa: F405
+ )
gradient_rho = np.frombuffer(array_pointer.contents)
- #KZ L[:,l] = np.resize(1e250*gradient_rho/rho_,(nu))\
+ # KZ L[:,l] = np.resize(1e250*gradient_rho/rho_,(nu))\
# -np.resize(np.diag(mu).dot(np.transpose(phi)).\
# dot(D_x_g_c(x_mean+np.resize(phi.dot(np.diag(mu)).dot(yl), (x_mean.shape)))).\
# dot(psi).dot(lambda_i), (nu))
- L[:,l] = np.resize(1e250*gradient_rho/rho_,(nu))\
- -np.resize(np.diag(mu).dot(np.transpose(phi)).\
- dot(grad_g_c).dot(psi).dot(lambda_i), (nu))
+ L[:, l] = np.resize(1e250 * gradient_rho / rho_, (nu)) - np.resize(
+ np.diag(mu)
+ .dot(np.transpose(phi))
+ .dot(grad_g_c)
+ .dot(psi)
+ .dot(lambda_i),
+ (nu),
+ )
return L
-def err(gradient, b_c):
- return np.linalg.norm(gradient)/np.linalg.norm(b_c)
+def err(gradient, b_c): # noqa: D103
+ return np.linalg.norm(gradient) / np.linalg.norm(b_c)
-def gamma(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, b_c):
- return np.transpose(lambda_i).dot(b_c)\
- + log(inv_c_0(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean))
-def func(x,y, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, lambda_i):
+def gamma(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, b_c): # noqa: D103
+ return np.transpose(lambda_i).dot(b_c) + log(
+ inv_c_0(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean)
+ )
+
+
+def func(x, y, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, lambda_i): # noqa: D103
nu = eta.shape[0]
- N = eta.shape[1]
- return rhoctypes(np.array([x,y]), np.resize(np.transpose(eta),(nu*N,1)),\
- nu, N, s_v, hat_s_v)*\
- exp(-np.transpose(lambda_i).dot(h_c(np.array([[x],[y]]), g_c, phi, mu, psi, x_mean)))
-
-def gaussian_bell(x,y):
- return exp(-(x**2+y**2)/2)/(2*pi)
-
-def inv_c_0(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean):
- c,error = integrate.dblquad(func,\
- -3, 3, -3, 3, args=(eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, lambda_i))
- return c #integral mathematica
-
-def expo(y):
- meann = np.array([[0],[0]])
- sigma = np.array([[1, 0],[0, 1]])
- f = exp(-0.5*np.transpose(y-meann).dot(y-meann))
- return f
-
-def gradient_expo(y):
- meann = np.array([[0],[0]])
- sigma = np.array([[1, 0],[0, 1]])
- f = np.zeros((2,1))
- f = -(y-meann)*exp(-0.5*np.transpose(y-meann).dot(y-meann))
- return f
-
-if __name__ == "__main__":
+ N = eta.shape[1] # noqa: N806
+ return rhoctypes(
+ np.array([x, y]),
+ np.resize(np.transpose(eta), (nu * N, 1)),
+ nu,
+ N,
+ s_v,
+ hat_s_v,
+ ) * exp(
+ -np.transpose(lambda_i).dot(
+ h_c(np.array([[x], [y]]), g_c, phi, mu, psi, x_mean)
+ )
+ )
+
+
+def gaussian_bell(x, y): # noqa: D103
+ return exp(-(x**2 + y**2) / 2) / (2 * pi)
+
+
+def inv_c_0(lambda_i, eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean): # noqa: D103
+ c, error = integrate.dblquad( # noqa: F841
+ func,
+ -3,
+ 3,
+ -3,
+ 3,
+ args=(eta, s_v, hat_s_v, g_c, phi, mu, psi, x_mean, lambda_i),
+ )
+ return c # integral mathematica
+
+
+def expo(y): # noqa: D103
+ meann = np.array([[0], [0]])
+ sigma = np.array([[1, 0], [0, 1]]) # noqa: F841
+ f = exp(-0.5 * np.transpose(y - meann).dot(y - meann))
+ return f # noqa: RET504
+
+
+def gradient_expo(y): # noqa: D103
+ meann = np.array([[0], [0]])
+ sigma = np.array([[1, 0], [0, 1]]) # noqa: F841
+ f = np.zeros((2, 1))
+ f = -(y - meann) * exp(-0.5 * np.transpose(y - meann).dot(y - meann))
+ return f # noqa: RET504
+
+
+if __name__ == '__main__':
import doctest
+
doctest.testmod()
diff --git a/modules/performUQ/SimCenterUQ/PLoM/__init__.py b/modules/performUQ/SimCenterUQ/PLoM/__init__.py
index 2dd0a8bd3..37bd1190e 100644
--- a/modules/performUQ/SimCenterUQ/PLoM/__init__.py
+++ b/modules/performUQ/SimCenterUQ/PLoM/__init__.py
@@ -1,2 +1,4 @@
-import os, sys
-sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
\ No newline at end of file
+import os # noqa: CPY001, D104, N999
+import sys
+
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
diff --git a/modules/performUQ/SimCenterUQ/PLoM/general.py b/modules/performUQ/SimCenterUQ/PLoM/general.py
index 88cee4615..e59f44039 100644
--- a/modules/performUQ/SimCenterUQ/PLoM/general.py
+++ b/modules/performUQ/SimCenterUQ/PLoM/general.py
@@ -1,178 +1,215 @@
-# Constants, variables, and methods that are commonly used
+# Constants, variables, and methods that are commonly used # noqa: CPY001, D100
import os
-from datetime import datetime
-import numpy as np
-import pandas as pd
from collections import Counter
+from datetime import datetime
+import pandas as pd
-ITEM_LIST_DATANORM = ['X_range','X_min','X_scaled','X_scaled_mean']
-ITEM_LIST_RUNPCA = ['X_PCA','EigenValue_PCA','EigenVector_PCA','NumComp_PCA','Error_PCA']
-ITEM_LIST_RUNKDE = ['s_v','c_v','hat_s_v','X_KDE','EigenValues_KDE','KDE_g','KDE_m','KDE_a','KDE_Z','KDE_Eigen']
-ITEM_LIST_ISDEGENE = ['Errors','X_new']
-ITEM_LIST = ['basic']+['constraints_file']+['X0','N','n']+ITEM_LIST_DATANORM+ITEM_LIST_RUNPCA \
- +ITEM_LIST_RUNKDE+ITEM_LIST_ISDEGENE # all variables in the database
-ITEM_ADDS = ['/'+x for x in ITEM_LIST] # HDFStore ABSOLUTE path-names
-ATTR_LIST = [None,None,'X','N','n',
- 'alpha','x_min','X_scaled','x_mean',
- 'H','mu','phi','nu','errPCA',
- 's_v','c_v','hat_s_v','K','b','g','m','a','Z','eigenKDE',
- 'errors','Xnew']
+ITEM_LIST_DATANORM = ['X_range', 'X_min', 'X_scaled', 'X_scaled_mean']
+ITEM_LIST_RUNPCA = [
+ 'X_PCA',
+ 'EigenValue_PCA',
+ 'EigenVector_PCA',
+ 'NumComp_PCA',
+ 'Error_PCA',
+]
+ITEM_LIST_RUNKDE = [
+ 's_v',
+ 'c_v',
+ 'hat_s_v',
+ 'X_KDE',
+ 'EigenValues_KDE',
+ 'KDE_g',
+ 'KDE_m',
+ 'KDE_a',
+ 'KDE_Z',
+ 'KDE_Eigen',
+]
+ITEM_LIST_ISDEGENE = ['Errors', 'X_new']
+ITEM_LIST = (
+ ['basic'] # noqa: RUF005
+ + ['constraints_file']
+ + ['X0', 'N', 'n']
+ + ITEM_LIST_DATANORM
+ + ITEM_LIST_RUNPCA
+ + ITEM_LIST_RUNKDE
+ + ITEM_LIST_ISDEGENE
+) # all variables in the database
+ITEM_ADDS = ['/' + x for x in ITEM_LIST] # HDFStore ABSOLUTE path-names
+ATTR_LIST = [
+ None,
+ None,
+ 'X',
+ 'N',
+ 'n',
+ 'alpha',
+ 'x_min',
+ 'X_scaled',
+ 'x_mean',
+ 'H',
+ 'mu',
+ 'phi',
+ 'nu',
+ 'errPCA',
+ 's_v',
+ 'c_v',
+ 'hat_s_v',
+ 'K',
+ 'b',
+ 'g',
+ 'm',
+ 'a',
+ 'Z',
+ 'eigenKDE',
+ 'errors',
+ 'Xnew',
+]
ATTR_MAP = dict(zip(ITEM_ADDS, ATTR_LIST))
-FULL_TASK_LIST = ['DataNormalization','RunPCA','RunKDE','ISDEGeneration']
-TASK_ITEM_MAP = {'DataNormalization': ITEM_LIST_DATANORM,
- 'RunPCA': ITEM_LIST_RUNPCA,
- 'RunKDE': ITEM_LIST_RUNKDE,
- 'ISDEGeneration': ITEM_LIST_ISDEGENE}
-
-
-class Logfile:
- def __init__(self, logfile_dir = './', logfile_name = 'plom.log', screen_msg = True):
- """
- Initializing the logfile
+FULL_TASK_LIST = ['DataNormalization', 'RunPCA', 'RunKDE', 'ISDEGeneration']
+TASK_ITEM_MAP = {
+ 'DataNormalization': ITEM_LIST_DATANORM,
+ 'RunPCA': ITEM_LIST_RUNPCA,
+ 'RunKDE': ITEM_LIST_RUNKDE,
+ 'ISDEGeneration': ITEM_LIST_ISDEGENE,
+}
+
+
+class Logfile: # noqa: D101
+ def __init__(self, logfile_dir='./', logfile_name='plom.log', screen_msg=True): # noqa: FBT002
+ """Initializing the logfile
- logfile_dir: default is the same path of the PLoM package
- logfile_name: default is the "plom.log"
- screen_msg: default is to show message on screen
- """
+ """ # noqa: D205, D400, D401
self.logfile_dir = logfile_dir
self.logfile_name = logfile_name
- self.logfile_path = os.path.join(self.logfile_dir, self.logfile_name)
+ self.logfile_path = os.path.join(self.logfile_dir, self.logfile_name) # noqa: PTH118
self.screen_msg = screen_msg
# start the log
- self.write_msg(msg = '--NEW LOG STARTING FROM THIS LINE--', mode='w')
-
-
- def write_msg(self, msg = '', msg_type = 'RUNNING', msg_level = 0, mode='a'):
- """
- Writing running messages
+ self.write_msg(msg='--NEW LOG STARTING FROM THIS LINE--', mode='w')
+
+ def write_msg(self, msg='', msg_type='RUNNING', msg_level=0, mode='a'):
+ """Writing running messages
- msg: the message
- msg_type: the type of message 'RUNNING', 'WARNING', 'ERROR'
- msg_level: how many indent tags
- """
- indent_tabs = ''.join(['\t']*msg_level)
- decorated_msg = '{} {} {}-MSG {} '.format(datetime.utcnow(), indent_tabs, msg_type, msg)
+ """ # noqa: D205, D400, D401
+ indent_tabs = ''.join(['\t'] * msg_level)
+ decorated_msg = f'{datetime.utcnow()} {indent_tabs} {msg_type}-MSG {msg} ' # noqa: DTZ003
if self.screen_msg:
- print(decorated_msg)
- with open(self.logfile_path, mode) as f:
- f.write('\n'+decorated_msg)
+ print(decorated_msg) # noqa: T201
+ with open(self.logfile_path, mode) as f: # noqa: PTH123
+ f.write('\n' + decorated_msg)
-
def delete_logfile(self):
- """
- Deleting the log file
- """
- if os.path.exists(self.logfile_path):
- os.remove(self.logfile_path)
+ """Deleting the log file""" # noqa: D400, D401
+ if os.path.exists(self.logfile_path): # noqa: PTH110
+ os.remove(self.logfile_path) # noqa: PTH107
else:
- print('The logfile {} does not exist.'.format(self.logfile_path))
+ print(f'The logfile {self.logfile_path} does not exist.') # noqa: T201
-class DBServer:
- def __init__(self, db_dir = './', db_name = 'plom.h5'):
- """
- Initializing the database
+class DBServer: # noqa: D101
+ def __init__(self, db_dir='./', db_name='plom.h5'):
+ """Initializing the database
- db_dir: default is the same path of the PLoM package
- db_name: default is "plom.h5"
- """
+ """ # noqa: D205, D400, D401
self.db_dir = db_dir
self.db_name = db_name
- self.db_path = os.path.join(self.db_dir, self.db_name)
- if os.path.exists(self.db_path):
+ self.db_path = os.path.join(self.db_dir, self.db_name) # noqa: PTH118
+ if os.path.exists(self.db_path): # noqa: PTH110
# deleting the old database
- os.remove(self.db_path)
- self.init_time = datetime.utcnow()
+ os.remove(self.db_path) # noqa: PTH107
+ self.init_time = datetime.utcnow() # noqa: DTZ003
self.item_name_list = []
self.basic()
self.dir_export = self._create_export_dir()
self._item_list = ITEM_LIST
self._item_adds = ITEM_ADDS
-
-
+
def basic(self):
- """
- Writing basic info
- """
- df = pd.DataFrame.from_dict({
- 'InitializedTime': [self.init_time],
- 'LastEditedTime': [datetime.utcnow()],
- 'DBName': [self.db_name],
- }, dtype=str)
+ """Writing basic info""" # noqa: D400, D401
+ df = pd.DataFrame.from_dict( # noqa: PD901
+ {
+ 'InitializedTime': [self.init_time],
+ 'LastEditedTime': [datetime.utcnow()], # noqa: DTZ003
+ 'DBName': [self.db_name],
+ },
+ dtype=str,
+ )
store = pd.HDFStore(self.db_path, 'a')
df.to_hdf(store, 'basic', mode='a')
store.close()
- self.add_item(item=[''],data_type='ConstraintsFile')
-
+ self.add_item(item=[''], data_type='ConstraintsFile')
def _create_export_dir(self):
- """
- Creating a export folder
- """
- dir_export = os.path.join(self.db_dir,'DataOut')
+ """Creating a export folder""" # noqa: D400, D401
+ dir_export = os.path.join(self.db_dir, 'DataOut') # noqa: PTH118
try:
- os.makedirs(dir_export, exist_ok=True)
- return dir_export
- except:
+ os.makedirs(dir_export, exist_ok=True) # noqa: PTH103
+ return dir_export # noqa: TRY300
+ except: # noqa: E722
return None
-
def get_item_adds(self):
- """
- Returning the full list of data items
- """
+ """Returning the full list of data items""" # noqa: D400, D401
return self._item_adds
-
- def add_item(self, item_name = None, col_names = None, item = [], data_shape = None, data_type='Data'):
- """
- Adding a new data item into database
- """
+ def add_item(
+ self,
+ item_name=None,
+ col_names=None,
+ item=[], # noqa: B006
+ data_shape=None,
+ data_type='Data',
+ ):
+ """Adding a new data item into database""" # noqa: D400
if data_type == 'Data':
if item.size > 1:
- df = pd.DataFrame(item, columns = col_names)
- dshape = pd.DataFrame(data_shape, columns=['DS_'+item_name])
+ df = pd.DataFrame(item, columns=col_names) # noqa: PD901
+ dshape = pd.DataFrame(data_shape, columns=['DS_' + item_name])
else:
if col_names is None:
col_names = item_name
- df = pd.DataFrame.from_dict({
- col_names: item.tolist()
- })
- dshape = pd.DataFrame.from_dict({
- 'DS_'+col_names: (1,)
- })
- if item_name is not None:
+ df = pd.DataFrame.from_dict({col_names: item.tolist()}) # noqa: PD901
+ dshape = pd.DataFrame.from_dict({'DS_' + col_names: (1,)})
+ if item_name is not None: # noqa: RET503
store = pd.HDFStore(self.db_path, 'a')
# data item
df.to_hdf(store, item_name, mode='a')
# data shape
- dshape.to_hdf(store, 'DS_'+item_name, mode='a')
- store.close()
+ dshape.to_hdf(store, 'DS_' + item_name, mode='a')
+ store.close() # noqa: RET503
elif data_type == 'ConstraintsFile':
# constraints filename
- cf = pd.DataFrame.from_dict({
- 'ConstraintsFile': item
- }, dtype=str)
+ cf = pd.DataFrame.from_dict({'ConstraintsFile': item}, dtype=str)
store = pd.HDFStore(self.db_path, 'a')
cf.to_hdf(store, 'constraints_file', mode='a')
- store.close()
+ store.close() # noqa: RET503
else:
# Not supported data_type
return False
-
- def get_item(self, item_name = None, table_like=False, data_type='Data'):
- """
- Getting a specific data item
- """
- if data_type == 'Data':
- if item_name is not None:
+ def get_item(self, item_name=None, table_like=False, data_type='Data'): # noqa: FBT002
+ """Getting a specific data item""" # noqa: D400, D401
+ if data_type == 'Data': # noqa: RET503
+ if item_name is not None: # noqa: RET503
store = pd.HDFStore(self.db_path, 'r')
try:
item = store.get(item_name)
- item_shape = tuple([x[0] for x in self.get_item_shape(item_name=item_name).values.tolist()])
+ item_shape = tuple(
+ [
+ x[0]
+ for x in self.get_item_shape( # noqa: PD011
+ item_name=item_name
+ ).values.tolist()
+ ]
+ )
if not table_like:
- item = item.to_numpy().reshape(item_shape)
- except:
+ item = item.to_numpy().reshape(item_shape)
+ except: # noqa: E722
item = None
finally:
store.close()
@@ -182,103 +219,92 @@ def get_item(self, item_name = None, table_like=False, data_type='Data'):
store = pd.HDFStore(self.db_path, 'r')
try:
item = store.get('/constraints_file')
- except:
+ except: # noqa: E722
item = None
finally:
store.close()
- return item.values.tolist()[0][0]
-
+ return item.values.tolist()[0][0] # noqa: PD011
- def remove_item(self, item_name = None):
- """
- Removing an item
- """
+ def remove_item(self, item_name=None):
+ """Removing an item""" # noqa: D400, D401
if item_name is not None:
store = pd.HDFStore(self.db_path, 'r')
try:
store.remove(item_name)
- except:
- item = None
+ except: # noqa: E722
+ item = None # noqa: F841
finally:
store.close()
-
- def get_item_shape(self, item_name = None):
- """
- Getting the shape of a specific data item
- """
-
- if item_name is not None:
+ def get_item_shape(self, item_name=None):
+ """Getting the shape of a specific data item""" # noqa: D400, D401
+ if item_name is not None: # noqa: RET503
store = pd.HDFStore(self.db_path, 'r')
try:
- item_shape = store.get('DS_'+item_name)
- except:
+ item_shape = store.get('DS_' + item_name)
+ except: # noqa: E722
item_shape = None
store.close()
return item_shape
-
def get_name_list(self):
- """
- Returning the keys of the database
- """
+ """Returning the keys of the database""" # noqa: D400, D401
store = pd.HDFStore(self.db_path, 'r')
try:
name_list = store.keys()
- except:
+ except: # noqa: E722
name_list = []
store.close()
return name_list
-
- def export(self, data_name = None, filename = None, file_format = 'csv'):
- """
- Exporting the specific data item
+ def export(self, data_name=None, filename=None, file_format='csv'):
+ """Exporting the specific data item
- data_name: data tag
- format: data format
- """
- d = self.get_item(item_name = data_name[1:], table_like=True)
+ """ # noqa: D205, D400, D401
+ d = self.get_item(item_name=data_name[1:], table_like=True)
if d is None:
return 1
if filename is None:
- filename = os.path.join(self.dir_export,str(data_name).replace('/','')+'.'+file_format)
+ filename = os.path.join( # noqa: PTH118
+ self.dir_export, str(data_name).replace('/', '') + '.' + file_format
+ )
else:
- filename = os.path.join(self.dir_export,filename.split('.')[0]+'.'+file_format)
- if file_format == 'csv' or 'txt':
+ filename = os.path.join( # noqa: PTH118
+ self.dir_export, filename.split('.')[0] + '.' + file_format
+ )
+ if file_format == 'csv' or 'txt': # noqa: SIM222
d.to_csv(filename, header=True, index=True)
elif file_format == 'json':
- with open(filename, 'w', encoding='utf-8') as f:
- json.dump(d, f)
+ with open(filename, 'w', encoding='utf-8') as f: # noqa: PTH123
+ json.dump(d, f) # noqa: F821
else:
return 2
return filename
class Task:
- """
- This is a class for managering an individual task in
+ """This is a class for managering an individual task in
the PLoM running process
- """
- def __init__(self, task_name = None):
- """
- Initialization
+ """ # noqa: D205, D400, D404
+
+ def __init__(self, task_name=None):
+ """Initialization
- task_name: name of the task
- """
- self.task_name = task_name # task name
- self.pre_task = None # previous task
- self.next_task = None # next task
- self.full_var_list = [] # key variable list
- self.avail_var_list = [] # current available variables
- self.status = False # task status
-
-
+ """ # noqa: D205, D400, D401
+ self.task_name = task_name # task name
+ self.pre_task = None # previous task
+ self.next_task = None # next task
+ self.full_var_list = [] # key variable list
+ self.avail_var_list = [] # current available variables
+ self.status = False # task status
+
def refresh_status(self):
- """
- Refreshing the current status of the task
+ """Refreshing the current status of the task
If any of the previous tasks is not completed, the current task is also not reliable
- """
+ """ # noqa: D205, D400, D401
# check the previous task if any
if self.pre_task:
if not self.pre_task.refresh_status():
@@ -286,47 +312,45 @@ def refresh_status(self):
self.status = False
return self.status
-
+
# self-check
- if Counter(self.avail_var_list)==Counter(self.full_var_list) and len(self.avail_var_list):
- self.status = True # not finished
+ if Counter(self.avail_var_list) == Counter(self.full_var_list) and len(
+ self.avail_var_list
+ ):
+ self.status = True # not finished
else:
- self.status = False # finished
+ self.status = False # finished
return self.status
class TaskList:
- """
- This is a class for managering a set of tasks
+ """This is a class for managering a set of tasks
in a specific order
- """
+ """ # noqa: D205, D400, D404
+
def __init__(self):
- self.head_task = None # first task
- self.tail_task = None # last task
- self.status = False # status
+ self.head_task = None # first task
+ self.tail_task = None # last task
+ self.status = False # status
- def add_task(self, new_task = None):
+ def add_task(self, new_task=None): # noqa: D102
if new_task is None:
self.head_task = None
return
+ elif self.head_task is None: # noqa: RET505
+ # first task
+ self.head_task = new_task
+ self.tail_task = new_task
else:
- if self.head_task is None:
- # first task
- self.head_task = new_task
- self.tail_task = new_task
- else:
- # adding a new to the current list
- new_task.pre_task = self.tail_task
- self.tail_task.next_task = new_task
- self.tail_task = new_task
+ # adding a new to the current list
+ new_task.pre_task = self.tail_task
+ self.tail_task.next_task = new_task
+ self.tail_task = new_task
-
def refresh_status(self):
- """
- Refreshing the tasks' status
- """
- if self.head_task:
+ """Refreshing the tasks' status""" # noqa: D400, D401
+ if self.head_task: # noqa: RET503
cur_task = self.head_task
if not cur_task.status:
self.status = False
@@ -338,8 +362,7 @@ def refresh_status(self):
self.status = False
return self.status
-
+
self.status = True
-
- return self.status
-
+
+ return self.status
diff --git a/modules/performUQ/SimCenterUQ/SimCenterUQ.py b/modules/performUQ/SimCenterUQ/SimCenterUQ.py
index 5ee798872..1979412aa 100644
--- a/modules/performUQ/SimCenterUQ/SimCenterUQ.py
+++ b/modules/performUQ/SimCenterUQ/SimCenterUQ.py
@@ -1,114 +1,119 @@
-# written: UQ team @ SimCenter
+# written: UQ team @ SimCenter # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-import shutil
+import argparse
import json
import os
import stat
+import subprocess # noqa: S404
import sys
-import platform
-from subprocess import Popen, PIPE
from pathlib import Path
-import subprocess
-import argparse
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--workflowInput')
- parser.add_argument('--workflowOutput')
+ parser.add_argument('--workflowOutput')
parser.add_argument('--driverFile')
parser.add_argument('--runType')
- args,unknowns = parser.parse_known_args()
+ args, unknowns = parser.parse_known_args() # noqa: F841
+
+ inputFile = args.workflowInput # noqa: N806
+ runType = args.runType # noqa: N806
+ workflowDriver = args.driverFile # noqa: N806
+ outputFile = args.workflowOutput # noqa: N806, F841
- inputFile = args.workflowInput
- runType = args.runType
- workflowDriver = args.driverFile
- outputFile = args.workflowOutput
-
- with open(inputFile, 'r', encoding='utf-8') as f:
+ with open(inputFile, encoding='utf-8') as f: # noqa: PTH123
data = json.load(f)
-
- if runType in ['runningLocal',]:
- if (sys.platform == 'darwin' or sys.platform == "linux" or sys.platform == "linux2"):
+ if runType == 'runningLocal':
+ if (
+ sys.platform == 'darwin'
+ or sys.platform == 'linux'
+ or sys.platform == 'linux2'
+ ):
# MAC
- surrogate = 'surrogateBuild.py'
- plom = 'runPLoM.py' # KZ: main script of PLoM
- #natafExe = os.path.join('nataf_gsa','nataf_gsa')
- natafExe = 'nataf_gsa'
- osType = 'Linux'
- workflowDriver1 = 'workflowDriver1'
+ surrogate = 'surrogateBuild.py'
+ plom = 'runPLoM.py' # KZ: main script of PLoM
+ # natafExe = os.path.join('nataf_gsa','nataf_gsa')
+ natafExe = 'nataf_gsa' # noqa: N806
+ osType = 'Linux' # noqa: N806
+ workflowDriver1 = 'workflowDriver1' # noqa: N806
python = 'python3'
else:
-
- surrogate = 'surrogateBuild.py'
- plom = 'runPLoM.py' # KZ: main script of PLoM
- #natafExe = os.path.join('nataf_gsa','nataf_gsa.exe')
- natafExe = 'nataf_gsa.exe'
- workflowDriver = workflowDriver + ".bat"
- workflowDriver1 = 'workflowDriver1.bat'
- osType = 'Windows'
- python = 'python'
-
-
- cwd = os.getcwd()
- workdir_main = str(Path(cwd).parents[0])
- print('CWD: ' + cwd)
- print('work_dir: ' + workdir_main)
-
+ surrogate = 'surrogateBuild.py'
+ plom = 'runPLoM.py' # KZ: main script of PLoM
+ # natafExe = os.path.join('nataf_gsa','nataf_gsa.exe')
+ natafExe = 'nataf_gsa.exe' # noqa: N806
+ workflowDriver = workflowDriver + '.bat' # noqa: N806, PLR6104
+ workflowDriver1 = 'workflowDriver1.bat' # noqa: N806, F841
+ osType = 'Windows' # noqa: N806
+ python = 'python'
+
+ cwd = os.getcwd() # noqa: PTH109
+ workdir_main = str(Path(cwd).parents[0])
+ print('CWD: ' + cwd) # noqa: T201
+ print('work_dir: ' + workdir_main) # noqa: T201
+
# open the input json file
- with open(inputFile, 'r', encoding='utf-8') as data_file:
+ with open(inputFile, encoding='utf-8') as data_file: # noqa: PTH123
data = json.load(data_file)
- uq_data = data["UQ"]
+ uq_data = data['UQ']
- myScriptDir = os.path.dirname(os.path.realpath(__file__))
+ myScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
- if os.path.exists(workflowDriver):
- os.chmod(workflowDriver, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
-
- st = os.stat(workflowDriver)
- os.chmod(workflowDriver, st.st_mode | stat.S_IEXEC)
- else:
- print(workflowDriver + " not found.")
+ if os.path.exists(workflowDriver): # noqa: PTH110
+ os.chmod(workflowDriver, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH) # noqa: PTH101
+ st = os.stat(workflowDriver) # noqa: PTH116
+ os.chmod(workflowDriver, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+ else:
+ print(workflowDriver + ' not found.') # noqa: T201
# change dir to the main working dir for the structure
- os.chdir("../")
-
+ os.chdir('../')
+
# p = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
# for line in p.stdout:
# print(str(line))
-
+
# dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
-
- '''
+
+ """
LATER, CHANGE THE LOCATION
- '''
- #
+ """
if uq_data['uqType'] == 'Train GP Surrogate Model':
- simCenterUQCommand = '"{}" "{}/{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2>&1'.format(python,myScriptDir,surrogate,workdir_main,inputFile, workflowDriver, osType, runType)
- elif uq_data['uqType'] == 'Sensitivity Analysis':
- simCenterUQCommand = '"{}/{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2>&1'.format(myScriptDir,natafExe,workdir_main,inputFile, workflowDriver, osType, runType)
- elif uq_data['uqType'] == 'Forward Propagation':
- simCenterUQCommand = '"{}/{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2>&1'.format(myScriptDir,natafExe,workdir_main,inputFile, workflowDriver, osType, runType)
+ simCenterUQCommand = f'"{python}" "{myScriptDir}/{surrogate}" "{workdir_main}" {inputFile} {workflowDriver} {osType} {runType} 1> logFileSimUQ.txt 2>&1' # noqa: N806
+ elif (
+ uq_data['uqType'] == 'Sensitivity Analysis'
+ or uq_data['uqType'] == 'Forward Propagation'
+ ):
+ simCenterUQCommand = f'"{myScriptDir}/{natafExe}" "{workdir_main}" {inputFile} {workflowDriver} {osType} {runType} 1> logFileSimUQ.txt 2>&1' # noqa: N806
# KZ: training calling runPLoM.py to launch the model training
elif uq_data['uqType'] == 'PLoM Model':
- simCenterUQCommand = '"{}" "{}" "{}" {} {} {} {}'.format(python, os.path.join(myScriptDir,plom).replace('\\','/'),workdir_main.replace('\\','/'),inputFile,workflowDriver,osType,runType)
-
+ simCenterUQCommand = '"{}" "{}" "{}" {} {} {} {}'.format( # noqa: N806
+ python,
+ os.path.join(myScriptDir, plom).replace('\\', '/'), # noqa: PTH118
+ workdir_main.replace('\\', '/'),
+ inputFile,
+ workflowDriver,
+ osType,
+ runType,
+ )
+
# if uq_data['uqType'] == 'Train GP Surrogate Model':
# simCenterUQCommand = '"{}" "{}/{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2> dakota.err2'.format(python,myScriptDir,surrogate,workdir_main,inputFile, workflowDriver, osType, runType)
# elif uq_data['uqType'] == 'Sensitivity Analysis':
@@ -118,7 +123,7 @@ def main(args):
# # KZ: training calling runPLoM.py to launch the model training
# elif uq_data['uqType'] == 'PLoM Model':
# simCenterUQCommand = '"{}" "{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2> dakota.err2'.format(python, os.path.join(myScriptDir,plom).replace('\\','/'),workdir_main.replace('\\','/'),inputFile,workflowDriver,osType,runType)
-
+
# if uq_data['uqType'] == 'Train GP Surrogate Model':
# simCenterUQCommand = '"{}" "{}/{}" "{}" {} {} {} {} 1> logFileSimUQ.txt 2>&1'.format(python,myScriptDir,surrogate,workdir_main,inputFile, workflowDriver, osType, runType)
# elif uq_data['uqType'] == 'Sensitivity Analysis':
@@ -128,20 +133,22 @@ def main(args):
# # KZ: training calling runPLoM.py to launch the model training
# elif uq_data['uqType'] == 'PLoM Model':
# simCenterUQCommand = '"{}" "{}" "{}" {} {} {} {}'.format(python, os.path.join(myScriptDir,plom).replace('\\','/'),workdir_main.replace('\\','/'),inputFile,workflowDriver,osType,runType)
-
- print('running SimCenterUQ: ', simCenterUQCommand)
+
+ print('running SimCenterUQ: ', simCenterUQCommand) # noqa: T201
# subprocess.Popen(simCenterUQCommand, shell=True).wait()
-
+
try:
- result = subprocess.check_output(simCenterUQCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ simCenterUQCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
- print('DONE SUCESS')
+ print('DONE SUCESS') # noqa: T201
except subprocess.CalledProcessError as e:
- result = e.output
- returncode = e.returncode
- print('DONE FAIL')
+ result = e.output # noqa: F841
+ returncode = e.returncode # noqa: F841
+ print('DONE FAIL') # noqa: T201
-if __name__ == '__main__':
- main(sys.argv[1:])
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/modules/performUQ/SimCenterUQ/UQengine.py b/modules/performUQ/SimCenterUQ/UQengine.py
index e2ffd4d3a..200608003 100644
--- a/modules/performUQ/SimCenterUQ/UQengine.py
+++ b/modules/performUQ/SimCenterUQ/UQengine.py
@@ -1,105 +1,101 @@
-import glob
+import glob # noqa: CPY001, D100, INP001
+import json
import os
import shutil
import stat
-import subprocess
+import subprocess # noqa: S404
import sys
import time
-import pandas as pd
-import os
-import json
import numpy as np
+import pandas as pd
-class UQengine:
- def __init__(self, inputArgs):
-
- self.work_dir = inputArgs[1].replace(os.sep, "/")
+class UQengine: # noqa: D101
+ def __init__(self, inputArgs): # noqa: N803
+ self.work_dir = inputArgs[1].replace(os.sep, '/')
self.inputFile = inputArgs[2]
self.workflowDriver = inputArgs[3]
self.os_type = inputArgs[4]
self.run_type = inputArgs[5]
- self.IM_names = [] # used in EEUQ
-
-
+ self.IM_names = [] # used in EEUQ
- jsonPath = self.inputFile
- if not os.path.isabs(jsonPath):
- jsonPath = self.work_dir + "/templatedir/" + self.inputFile # for quoFEM
+ jsonPath = self.inputFile # noqa: N806
+ if not os.path.isabs(jsonPath): # noqa: PTH117
+ # for quoFEM
+ jsonPath = self.work_dir + '/templatedir/' + self.inputFile # noqa: N806
- # temporary for EEUQ....
- jsonDir, jsonName = os.path.split(jsonPath)
- eeJsonPath = os.path.join(jsonDir,"sc_"+jsonName)
+ # temporary for EEUQ....
+ jsonDir, jsonName = os.path.split(jsonPath) # noqa: N806
+ eeJsonPath = os.path.join(jsonDir, 'sc_' + jsonName) # noqa: PTH118, N806
- if os.path.exists(eeJsonPath):
+ if os.path.exists(eeJsonPath): # noqa: PTH110
self.inputFile = eeJsonPath
- jsonPath = eeJsonPath
+ jsonPath = eeJsonPath # noqa: N806
- with open(jsonPath) as f:
- dakotaJson = json.load(f)
+ with open(jsonPath) as f: # noqa: PLW1514, PTH123
+ dakotaJson = json.load(f) # noqa: N806, F841
# self.workflowDriver = "workflow_driver"
# if self.os_type.lower().startswith('win'):
# self.workflowDriver = "workflow_driver.bat"
- def cleanup_workdir(self):
+ def cleanup_workdir(self): # noqa: C901, D102
# if template dir already contains results.out, give an error
# Cleanup working directory if needed
- del_paths = glob.glob(os.path.join(self.work_dir, "workdir*"))
+ del_paths = glob.glob(os.path.join(self.work_dir, 'workdir*')) # noqa: PTH118, PTH207
for del_path in del_paths:
# change permission for workflow_driver.bat
- self.workflowDriver_path = os.path.join(del_path, self.workflowDriver)
+ self.workflowDriver_path = os.path.join(del_path, self.workflowDriver) # noqa: PTH118
# if os.path.exists(self.workflowDriver_path):
# os.chmod(self.workflowDriver_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
# Change permission
for root, dirs, files in os.walk(del_path):
for d in dirs:
- os.chmod(
- os.path.join(root, d),
- stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO,
+ os.chmod( # noqa: PTH101
+ os.path.join(root, d), # noqa: PTH118
+ stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO, # noqa: S103
)
for f in files:
- os.chmod(
- os.path.join(root, f),
- stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO,
+ os.chmod( # noqa: PTH101
+ os.path.join(root, f), # noqa: PTH118
+ stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO, # noqa: S103
)
try:
shutil.rmtree(del_path)
- except Exception as msg:
+ except Exception as msg: # noqa: BLE001
self.exit(str(msg))
- del_outputs = glob.glob(os.path.join(self.work_dir, "*out"))
+ del_outputs = glob.glob(os.path.join(self.work_dir, '*out')) # noqa: PTH118, PTH207
for del_out in del_outputs:
- os.remove(del_out)
+ os.remove(del_out) # noqa: PTH107
- del_pkls = glob.glob(os.path.join(self.work_dir, "*pkl"))
+ del_pkls = glob.glob(os.path.join(self.work_dir, '*pkl')) # noqa: PTH118, PTH207
for del_pkl in del_pkls:
- os.remove(del_pkl)
+ os.remove(del_pkl) # noqa: PTH107
try:
- del_errs = glob.glob(os.path.join(self.work_dir, "*err"))
+ del_errs = glob.glob(os.path.join(self.work_dir, '*err')) # noqa: PTH118, PTH207
for del_err in del_errs:
- os.remove(del_err)
- except:
+ os.remove(del_err) # noqa: PTH107
+ except: # noqa: S110, E722
pass
-
- if glob.glob(os.path.join(self.work_dir, "templatedir","results.out")):
+ if glob.glob(os.path.join(self.work_dir, 'templatedir', 'results.out')): # noqa: PTH118, PTH207
try:
- os.remove(os.path.join(self.work_dir, "templatedir","results.out"))
- except:
- msg = "Your main folder (where the main FEM script is located) already contains results.out. To prevent any confusion, please delete this file first"
+ os.remove(os.path.join(self.work_dir, 'templatedir', 'results.out')) # noqa: PTH107, PTH118
+ except: # noqa: E722
+ msg = 'Your main folder (where the main FEM script is located) already contains results.out. To prevent any confusion, please delete this file first'
self.exit(msg)
- print("working directory cleared")
+ print('working directory cleared') # noqa: T201
- def set_FEM(self, rv_name, do_parallel, y_dim, t_init, t_thr):
+ def set_FEM(self, rv_name, do_parallel, y_dim, t_init, t_thr): # noqa: N802, D102
self.rv_name = rv_name
self.do_parallel = do_parallel
self.y_dim = y_dim
@@ -107,22 +103,21 @@ def set_FEM(self, rv_name, do_parallel, y_dim, t_init, t_thr):
self.t_thr = t_thr
self.total_sim_time = 0
- def run_FEM_batch(self, X, id_sim, runIdx=0, alterInput=[]):
-
+ def run_FEM_batch(self, X, id_sim, runIdx=0, alterInput=[]): # noqa: B006, C901, N802, N803, D102
if runIdx == -1:
# dummy run
return X, np.zeros((0, self.y_dim)), id_sim
- workflowDriver = self.workflowDriver
+ workflowDriver = self.workflowDriver # noqa: N806
#
# serial run
#
- X = np.atleast_2d(X)
+ X = np.atleast_2d(X) # noqa: N806
nsamp = X.shape[0]
if not self.do_parallel:
- Y = np.zeros((nsamp, self.y_dim))
+ Y = np.zeros((nsamp, self.y_dim)) # noqa: N806
for ns in range(nsamp):
- Y_tmp, id_sim_current = run_FEM(
+ Y_tmp, id_sim_current = run_FEM( # noqa: N806
X[ns, :],
id_sim + ns,
self.rv_name,
@@ -131,26 +126,22 @@ def run_FEM_batch(self, X, id_sim, runIdx=0, alterInput=[]):
runIdx,
)
if Y_tmp.shape[0] != self.y_dim:
-
- msg = "model output in sample {} contains {} value(s) while the number of QoIs specified is {}".format(
- ns, Y_tmp.shape[0], y_dim
- )
-
+ msg = f'model output in sample {ns} contains {Y_tmp.shape[0]} value(s) while the number of QoIs specified is {y_dim}' # noqa: F821
self.exit(msg)
Y[ns, :] = Y_tmp
if time.time() - self.t_init > self.t_thr:
- X = X[:ns, :]
- Y = Y[:ns, :]
+ X = X[:ns, :] # noqa: N806
+ Y = Y[:ns, :] # noqa: N806
break
- Nsim = id_sim_current - id_sim + 1
+ Nsim = id_sim_current - id_sim + 1 # noqa: N806
#
# parallel run
#
if self.do_parallel:
- print("Running {} simulations in parallel".format(nsamp))
+ print(f'Running {nsamp} simulations in parallel') # noqa: T201
tmp = time.time()
iterables = (
(
@@ -165,37 +156,35 @@ def run_FEM_batch(self, X, id_sim, runIdx=0, alterInput=[]):
)
try:
result_objs = list(self.pool.starmap(run_FEM, iterables))
- print("Simulation time = {} s".format(time.time() - tmp))
+ print(f'Simulation time = {time.time() - tmp} s') # noqa: T201
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
self.pool.shutdown()
- except Exception:
+ except Exception: # noqa: BLE001
sys.exit()
- Nsim = len(list((result_objs)))
- Y = np.zeros((Nsim, self.y_dim))
- for val, id in result_objs:
+ Nsim = len(list(result_objs)) # noqa: N806
+ Y = np.zeros((Nsim, self.y_dim)) # noqa: N806
+ for val, id in result_objs: # noqa: A001
if isinstance(val, str):
self.exit(val)
elif val.shape[0]:
if val.shape[0] != self.y_dim:
- msg = "model output in sample {} contains {} value(s) while the number of QoIs specified is {}".format(
- id+1, val.shape[0], self.y_dim
- )
+ msg = f'model output in sample {id + 1} contains {val.shape[0]} value(s) while the number of QoIs specified is {self.y_dim}'
self.exit(msg)
if np.isnan(np.sum(val)):
- Nsim = id - id_sim
- X = X[:Nsim, :]
- Y = Y[:Nsim, :]
+ Nsim = id - id_sim # noqa: N806
+ X = X[:Nsim, :] # noqa: N806
+ Y = Y[:Nsim, :] # noqa: N806
else:
Y[id - id_sim, :] = val
- if len(alterInput)>0:
+ if len(alterInput) > 0:
idx = alterInput[0]
- X = np.hstack([X[:, :idx], X[:,idx+1:]])
-
+ X = np.hstack([X[:, :idx], X[:, idx + 1 :]]) # noqa: N806
+
# IM_vals = self.compute_IM(id_sim+1, id_sim + Nsim)
# IM_list = list(map(str, IM_vals))[1:]
# self.IM_names = IM_list
@@ -203,79 +192,89 @@ def run_FEM_batch(self, X, id_sim, runIdx=0, alterInput=[]):
# X_new = np.hstack([X[:,:idx],IM_vals.to_numpy()[:,1:]])
# X_new = np.hstack([X_new, X[:,idx+1:]])
# X = X_new.astype(np.double)
-
+
#
# In case EEUQ
#
-
- IM_vals = self.compute_IM(id_sim+1, id_sim + Nsim)
+
+ IM_vals = self.compute_IM(id_sim + 1, id_sim + Nsim) # noqa: N806
if IM_vals is None:
- X = X.astype(np.double)
+ X = X.astype(np.double) # noqa: N806
else:
self.IM_names = list(map(str, IM_vals))[1:]
- X_new = np.hstack([X,IM_vals.to_numpy()[:,1:]])
- X = X_new.astype(np.double)
+ X_new = np.hstack([X, IM_vals.to_numpy()[:, 1:]]) # noqa: N806
+ X = X_new.astype(np.double) # noqa: N806
-
return X, Y, id_sim + Nsim
- def compute_IM(self, i_begin, i_end):
- workdir_list = [os.path.join(self.work_dir,'workdir.{}'.format(int(i))) for i in range(i_begin,i_end+1)]
+ def compute_IM(self, i_begin, i_end): # noqa: N802, D102
+ workdir_list = [
+ os.path.join(self.work_dir, f'workdir.{int(i)}') # noqa: PTH118
+ for i in range(i_begin, i_end + 1)
+ ]
# intensity measure app
- computeIM = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- 'createEVENT', 'groundMotionIM', 'IntensityMeasureComputer.py')
+ computeIM = os.path.join( # noqa: PTH118, N806
+ os.path.dirname( # noqa: PTH120
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
+ ),
+ 'createEVENT',
+ 'groundMotionIM',
+ 'IntensityMeasureComputer.py',
+ )
- pythonEXE = sys.executable
+ pythonEXE = sys.executable # noqa: N806
# compute IMs
for cur_workdir in workdir_list:
os.chdir(cur_workdir)
- if os.path.exists('EVENT.json') and os.path.exists('AIM.json'):
- os.system(
- f"{pythonEXE} {computeIM} --filenameAIM AIM.json --filenameEVENT EVENT.json --filenameIM IM.json --geoMeanVar")
+ if os.path.exists('EVENT.json') and os.path.exists('AIM.json'): # noqa: PTH110
+ os.system( # noqa: S605
+ f'{pythonEXE} {computeIM} --filenameAIM AIM.json --filenameEVENT EVENT.json --filenameIM IM.json --geoMeanVar'
+ )
os.chdir(self.work_dir)
# collect IMs from different workdirs
for i, cur_workdir in enumerate(workdir_list):
cur_id = int(cur_workdir.split('.')[-1])
- if os.path.exists(os.path.join(cur_workdir, 'IM.csv')):
- print("IM.csv found in wordir.{}".format(cur_id))
- tmp1 = pd.read_csv(os.path.join(cur_workdir, 'IM.csv'), index_col=None)
+ if os.path.exists(os.path.join(cur_workdir, 'IM.csv')): # noqa: PTH110, PTH118
+ print(f'IM.csv found in wordir.{cur_id}') # noqa: T201
+ tmp1 = pd.read_csv(
+ os.path.join(cur_workdir, 'IM.csv'), # noqa: PTH118
+ index_col=None,
+ )
if tmp1.empty:
- print("IM.csv in wordir.{} is empty.".format(cur_id))
- return
- tmp2 = pd.DataFrame({'%eval_id': [cur_id for x in range(len(tmp1.index))]})
+ print(f'IM.csv in wordir.{cur_id} is empty.') # noqa: T201
+ return None
+ tmp2 = pd.DataFrame(
+ {'%eval_id': [cur_id for x in range(len(tmp1.index))]}
+ )
if i == 0:
im_collector = pd.concat([tmp2, tmp1], axis=1)
else:
tmp3 = pd.concat([tmp2, tmp1], axis=1)
im_collector = pd.concat([im_collector, tmp3])
else:
- print("IM.csv NOT found in wordir.{}".format(cur_id))
- return
+ print(f'IM.csv NOT found in wordir.{cur_id}') # noqa: T201
+ return None
im_collector = im_collector.sort_values(by=['%eval_id'])
- return im_collector
- #im_collector.to_csv('IM.csv', index=False)
-
-
-
+ return im_collector # noqa: RET504
+ # im_collector.to_csv('IM.csv', index=False)
-
- def readJson(self):
+ def readJson(self): # noqa: N802, D102
pass
- def make_pool(
+ def make_pool( # noqa: D102
self,
):
- if self.run_type.lower() == "runninglocal":
- from multiprocessing import Pool
+ if self.run_type.lower() == 'runninglocal':
+ from multiprocessing import Pool # noqa: PLC0415
n_processor = os.cpu_count()
pool = Pool(n_processor)
else:
- from mpi4py import MPI
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py import MPI # noqa: PLC0415
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
self.world = MPI.COMM_WORLD
n_processor = self.world.Get_size()
@@ -286,19 +285,19 @@ def make_pool(
# Someplace to write down error messages
#
- def create_errLog(self):
- #self.errfile = open(os.path.join(self.work_dir, "dakota.err"), "a")
+ def create_errLog(self): # noqa: N802, D102
+ # self.errfile = open(os.path.join(self.work_dir, "dakota.err"), "a")
pass
- def exit(self, msg):
- print(msg, file=sys.stderr)
- print(msg)
+ def exit(self, msg): # noqa: D102, PLR6301
+ print(msg, file=sys.stderr) # noqa: T201
+ print(msg) # noqa: T201
# sys.stderr.write(msg)
# self.errfile.write(msg)
# self.errfile.close()
- exit(-1)
+ exit(-1) # noqa: PLR1722
- def terminate_errLog(self):
+ def terminate_errLog(self): # noqa: N802, D102
# self.errfile.close()
pass
@@ -307,20 +306,19 @@ def terminate_errLog(self):
#
-def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0):
-
+def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0): # noqa: C901, N802, N803, D103
if runIdx == 0:
- templatedirFolder = "/templatedir"
- workdirFolder = "/workdir." + str(id_sim + 1)
+ templatedirFolder = '/templatedir' # noqa: N806
+ workdirFolder = '/workdir.' + str(id_sim + 1) # noqa: N806
else:
- templatedirFolder = "/templatedir." + str(runIdx)
- workdirFolder = "/workdir." + str(runIdx) + "." + str(id_sim + 1)
+ templatedirFolder = '/templatedir.' + str(runIdx) # noqa: N806
+ workdirFolder = '/workdir.' + str(runIdx) + '.' + str(id_sim + 1) # noqa: N806
- X = np.atleast_2d(X)
+ X = np.atleast_2d(X) # noqa: N806
x_dim = X.shape[1]
if X.shape[0] > 1:
- msg = "do one simulation at a time"
+ msg = 'do one simulation at a time'
return msg, id_sim
#
# (1) create "workdir.idx " folder :need C++17 to use the files system namespace
@@ -329,104 +327,101 @@ def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0):
current_dir_i = work_dir + workdirFolder
try:
shutil.copytree(work_dir + templatedirFolder, current_dir_i)
- except Exception as ex:
+ except Exception: # noqa: BLE001
try:
-
shutil.copytree(work_dir + templatedirFolder, current_dir_i)
- except Exception as ex:
- msg = "Error running FEM: " + str(ex)
+ except Exception as ex: # noqa: BLE001
+ msg = 'Error running FEM: ' + str(ex)
return msg, id_sim
#
# (2) write param.in file
#
- outF = open(current_dir_i + "/params.in", "w")
- outF.write("{}\n".format(x_dim))
+ outF = open(current_dir_i + '/params.in', 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ outF.write(f'{x_dim}\n')
for i in range(x_dim):
- outF.write("{} {}\n".format(rv_name[i], X[0, i]))
+ outF.write(f'{rv_name[i]} {X[0, i]}\n')
outF.close()
if runIdx == 0:
- print("RUNNING FEM: working directory {} created".format(id_sim + 1))
+ print(f'RUNNING FEM: working directory {id_sim + 1} created') # noqa: T201
else:
- print("RUNNING FEM: working directory {}-{} created".format(runIdx, id_sim + 1))
+ print(f'RUNNING FEM: working directory {runIdx}-{id_sim + 1} created') # noqa: T201
#
# (3) run workflow_driver.bat
#
os.chdir(current_dir_i)
- workflow_run_command = "{}/{} 1> workflow.log 2>&1".format(current_dir_i, workflowDriver)
- #subprocess.check_call(
+ workflow_run_command = f'{current_dir_i}/{workflowDriver} 1> workflow.log 2>&1'
+ # subprocess.check_call(
# workflow_run_command,
# shell=True,
# stdout=subprocess.DEVNULL,
# stderr=subprocess.STDOUT,
- #) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
+ # ) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
# => to end grasefully
- returnCode = subprocess.call(
- workflow_run_command,
- shell=True,
- stdout=subprocess.DEVNULL,
- stderr=subprocess.STDOUT,
- ) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
+ returnCode = subprocess.call( # noqa: S602, N806, F841
+ workflow_run_command,
+ shell=True,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.STDOUT,
+ ) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
#
# (4) reading results
#
- if glob.glob("results.out"):
- g = np.loadtxt("results.out").flatten()
+ if glob.glob('results.out'): # noqa: PTH207
+ g = np.loadtxt('results.out').flatten()
else:
- msg = "Error running FEM: results.out missing at " + current_dir_i
- if glob.glob("ops.out"):
- with open("ops.out", "r") as text_file:
- error_FEM = text_file.read()
+ msg = 'Error running FEM: results.out missing at ' + current_dir_i
+ if glob.glob('ops.out'): # noqa: PTH207
+ with open('ops.out') as text_file: # noqa: FURB101, PLW1514, PTH123
+ error_FEM = text_file.read() # noqa: N806
- startingCharId = error_FEM.lower().find("error")
+ startingCharId = error_FEM.lower().find('error') # noqa: N806
- if startingCharId >0:
- startingCharId = max(0,startingCharId-20)
- endingID = max(len(error_FEM),startingCharId+200)
+ if startingCharId > 0:
+ startingCharId = max(0, startingCharId - 20) # noqa: N806
+ endingID = max(len(error_FEM), startingCharId + 200) # noqa: N806
errmsg = error_FEM[startingCharId:endingID]
- errmsg=errmsg.split(" ", 1)[1]
- errmsg=errmsg[0:errmsg.rfind(" ")]
- msg += "\n"
- msg += "your FEM model says...\n"
- msg += "........\n" + errmsg + "\n........ \n"
- msg += "to read more, see " + os.path.join(os. getcwd(),"ops.out")
+ errmsg = errmsg.split(' ', 1)[1]
+ errmsg = errmsg[0 : errmsg.rfind(' ')]
+ msg += '\n'
+ msg += 'your FEM model says...\n'
+ msg += '........\n' + errmsg + '\n........ \n'
+ msg += 'to read more, see ' + os.path.join(os.getcwd(), 'ops.out') # noqa: PTH109, PTH118
return msg, id_sim
if g.shape[0] == 0:
- msg = "Error running FEM: results.out is empty"
- if glob.glob("ops.out"):
- with open("ops.out", "r") as text_file:
- error_FEM = text_file.read()
+ msg = 'Error running FEM: results.out is empty'
+ if glob.glob('ops.out'): # noqa: PTH207
+ with open('ops.out') as text_file: # noqa: FURB101, PLW1514, PTH123
+ error_FEM = text_file.read() # noqa: N806
- startingCharId = error_FEM.lower().find("error")
+ startingCharId = error_FEM.lower().find('error') # noqa: N806
- if startingCharId >0:
- startingCharId = max(0,startingCharId-20)
- endingID = max(len(error_FEM),startingCharId+200)
+ if startingCharId > 0:
+ startingCharId = max(0, startingCharId - 20) # noqa: N806
+ endingID = max(len(error_FEM), startingCharId + 200) # noqa: N806
errmsg = error_FEM[startingCharId:endingID]
- errmsg=errmsg.split(" ", 1)[1]
- errmsg=errmsg[0:errmsg.rfind(" ")]
- msg += "\n"
- msg += "your FEM model says...\n"
- msg += "........\n" + errmsg + "\n........ \n"
- msg += "to read more, see " + os.path.join(os. getcwd(),"ops.out")
+ errmsg = errmsg.split(' ', 1)[1]
+ errmsg = errmsg[0 : errmsg.rfind(' ')]
+ msg += '\n'
+ msg += 'your FEM model says...\n'
+ msg += '........\n' + errmsg + '\n........ \n'
+ msg += 'to read more, see ' + os.path.join(os.getcwd(), 'ops.out') # noqa: PTH109, PTH118
return msg, id_sim
- os.chdir("../")
+ os.chdir('../')
if np.isnan(np.sum(g)):
- msg = "Error running FEM: Response value at workdir.{} is NaN".format(
- id_sim + 1
- )
+ msg = f'Error running FEM: Response value at workdir.{id_sim + 1} is NaN'
return msg, id_sim
return g, id_sim
@@ -439,17 +434,15 @@ def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0):
# def makePool(self):
# pass
+
#
-# When sampled X is different from surrogate input X. e.g. we sample ground motion parameters or indicies, but we use IM as input of GP
+# When sampled X is different from surrogate input X. e.g. we sample ground motion parameters or indices, but we use IM as input of GP
#
# def run_FEM_alterX(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0, alterIdx, alterFiles):
# g, id_sim = run_FEM(X, id_sim, rv_name, work_dir, workflowDriver, runIdx=0)
-
-
-
#
# class simcenterUQ(UQengine):
# def __init__(self):
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnapps.f
index 0c8c85b4d..397bc852f 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnapps.f
@@ -208,9 +208,9 @@ subroutine cnapps
& cabs1
cabs1( cdum ) = abs( real( cdum ) ) + abs( aimag( cdum ) )
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnaup2.f
index 072b10fff..a658c1f36 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cnaup2.f
@@ -26,7 +26,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication. 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV since a leading block of the current
c upper Hessenberg matrix has split off and contains "unwanted"
c Ritz values.
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cneupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cneupd.f
index cb11d790a..2006e163f 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cneupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/cneupd.f
@@ -488,7 +488,7 @@ subroutine cneupd (rvec, howmny, select, d, z, ldz, sigma,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dlaqrb.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dlaqrb.f
index d851b8636..845966075 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dlaqrb.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dlaqrb.f
@@ -57,7 +57,7 @@
c Z Double precision array, dimension (N). (OUTPUT)
c On exit Z contains the last components of the Schur vectors.
c
-c INFO Integer. (OUPUT)
+c INFO Integer. (OUTPUT)
c = 0: successful exit
c > 0: SLAQRB failed to compute all the eigenvalues ILO to IHI
c in a total of 30*(IHI-ILO+1) iterations; if INFO = i,
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaitr.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaitr.f
index fe799bfc3..456ce5ae5 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaitr.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaitr.f
@@ -377,11 +377,11 @@ subroutine dnaitr
& '_naitr: B-norm of the current residual is')
end if
c
-c %---------------------------------------------------%
-c | STEP 1: Check if the B norm of j-th residual |
-c | vector is zero. Equivalent to determing whether |
-c | an exact j-step Arnoldi factorization is present. |
-c %---------------------------------------------------%
+c %-----------------------------------------------------%
+c | STEP 1: Check if the B norm of j-th residual |
+c | vector is zero. Equivalent to determining whether |
+c | an exact j-step Arnoldi factorization is present. |
+c %-----------------------------------------------------%
c
betaj = rnorm
if (rnorm .gt. zero) go to 40
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnapps.f
index 5385c1b95..3276de50e 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnapps.f
@@ -13,7 +13,7 @@
c A*(V_{k}*Q) - (V_{k}*Q)*(Q^T* H_{k}*Q) = r_{k+p}*e_{k+p}^T * Q
c
c where Q is an orthogonal matrix which is the product of rotations
-c and reflections resulting from the NP bulge chage sweeps.
+c and reflections resulting from the NP bulge change sweeps.
c The updated Arnoldi factorization becomes:
c
c A*VNEW_{k} - VNEW_{k}*HNEW_{k} = rnew_{k}*e_{k}^T.
@@ -30,7 +30,7 @@
c KEV Integer. (INPUT/OUTPUT)
c KEV+NP is the size of the input matrix H.
c KEV is the size of the updated matrix HNEW. KEV is only
-c updated on ouput when fewer than NP shifts are applied in
+c updated on output when fewer than NP shifts are applied in
c order to keep the conjugate pair together.
c
c NP Integer. (INPUT)
@@ -205,9 +205,9 @@ subroutine dnapps
c
intrinsic abs, max, min
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
@@ -384,9 +384,9 @@ subroutine dnapps
c
do 80 i = istart, iend-1
c
-c %-----------------------------------------------------%
-c | Contruct the plane rotation G to zero out the bulge |
-c %-----------------------------------------------------%
+c %------------------------------------------------------%
+c | Construct the plane rotation G to zero out the bulge |
+c %------------------------------------------------------%
c
call dlartg (f, g, c, s, r)
if (i .gt. istart) then
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaup2.f
index 1cdf09403..5e535a603 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaup2.f
@@ -26,7 +26,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV for two reasons. The first, is
c to keep complex conjugate pairs of "wanted" Ritz values
c together. The second, is that a leading block of the current
@@ -598,7 +598,7 @@ subroutine dnaup2
c
c %----------------------------------------------------%
c | Sort the Ritz values according to the scaled Ritz |
-c | esitmates. This will push all the converged ones |
+c | estimates This will push all the converged ones |
c | towards the front of ritzr, ritzi, bounds |
c | (in the case when NCONV < NEV.) |
c %----------------------------------------------------%
@@ -697,7 +697,7 @@ subroutine dnaup2
if (ishift .eq. 0) then
c
c %-------------------------------------------------------%
-c | User specified shifts: reverse comminucation to |
+c | User specified shifts: reverse communication to |
c | compute the shifts. They are returned in the first |
c | 2*NP locations of WORKL. |
c %-------------------------------------------------------%
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaupd.f
index 93218c89f..f634de4ca 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dnaupd.f
@@ -273,7 +273,7 @@
c = -8: Error return from LAPACK eigenvalue calculation;
c = -9: Starting vector is zero.
c = -10: IPARAM(7) must be 1,2,3,4.
-c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatable.
+c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatible.
c = -12: IPARAM(1) must be equal to 0 or 1.
c = -9999: Could not build an Arnoldi factorization.
c IPARAM(5) returns the size of the current Arnoldi
@@ -298,7 +298,7 @@
c L'z = x where x is a Ritz vector of OP.
c
c 4. At present there is no a-priori analysis to guide the selection
-c of NCV relative to NEV. The only formal requrement is that NCV > NEV + 2.
+c of NCV relative to NEV. The only formal requirement is that NCV > NEV + 2.
c However, it is recommended that NCV .ge. 2*NEV+1. If many problems of
c the same type are to be solved, one should experiment with increasing
c NCV while keeping NEV fixed for a given test problem. This will
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dneupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dneupd.f
index 5fb63d9f3..d393906f0 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dneupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dneupd.f
@@ -548,7 +548,7 @@ subroutine dneupd (rvec, howmny, select, dr, di, z, ldz, sigmar,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
@@ -968,9 +968,9 @@ subroutine dneupd (rvec, howmny, select, dr, di, z, ldz, sigmar,
c
if (type .eq. 'SHIFTI' .and. msglvl .gt. 1) then
call dvout (logfil, nconv, dr, ndigit,
- & '_neupd: Untransformed real part of the Ritz valuess.')
+ & '_neupd: Untransformed real part of the Ritz values')
call dvout (logfil, nconv, di, ndigit,
- & '_neupd: Untransformed imag part of the Ritz valuess.')
+ & '_neupd: Untransformed imag part of the Ritz values.')
call dvout (logfil, nconv, workl(ihbds), ndigit,
& '_neupd: Ritz estimates of untransformed Ritz values.')
else if (type .eq. 'REGULR' .and. msglvl .gt. 1) then
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaitr.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaitr.f
index 13b7126fb..579702f76 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaitr.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaitr.f
@@ -370,10 +370,10 @@ subroutine dsaitr
& '_saitr: B-norm of the current residual =')
end if
c
-c %---------------------------------------------------------%
-c | Check for exact zero. Equivalent to determing whether a |
-c | j-step Arnoldi factorization is present. |
-c %---------------------------------------------------------%
+c %-----------------------------------------------------------%
+c | Check for exact zero. Equivalent to determining whether a |
+c | j-step Arnoldi factorization is present. |
+c %-----------------------------------------------------------%
c
if (rnorm .gt. zero) go to 40
c
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsapps.f
index 5c9178055..9a91dfe48 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsapps.f
@@ -192,9 +192,9 @@ subroutine dsapps
c
intrinsic abs
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
@@ -264,7 +264,7 @@ subroutine dsapps
call ivout (logfil, 1, i, ndigit,
& '_sapps: deflation at row/column no.')
call ivout (logfil, 1, jj, ndigit,
- & '_sapps: occured before shift number.')
+ & '_sapps: occurred before shift number.')
call dvout (logfil, 1, h(i+1,1), ndigit,
& '_sapps: the corresponding off diagonal element')
end if
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaup2.f
index 7b441dc92..2faa5383b 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaup2.f
@@ -27,7 +27,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV since a leading block of the current
c upper Tridiagonal matrix has split off and contains "unwanted"
c Ritz values.
@@ -587,7 +587,7 @@ subroutine dsaup2
c
c %----------------------------------------------------%
c | Sort the Ritz values according to the scaled Ritz |
-c | esitmates. This will push all the converged ones |
+c | estimates. This will push all the converged ones |
c | towards the front of ritzr, ritzi, bounds |
c | (in the case when NCONV < NEV.) |
c %----------------------------------------------------%
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaupd.f
index c033bb4be..1f383cca0 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dsaupd.f
@@ -264,12 +264,12 @@
c = -6: BMAT must be one of 'I' or 'G'.
c = -7: Length of private work array WORKL is not sufficient.
c = -8: Error return from trid. eigenvalue calculation;
-c Informatinal error from LAPACK routine dsteqr.
+c Informational error from LAPACK routine dsteqr.
c = -9: Starting vector is zero.
c = -10: IPARAM(7) must be 1,2,3,4,5.
-c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatable.
+c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatible.
c = -12: IPARAM(1) must be equal to 0 or 1.
-c = -13: NEV and WHICH = 'BE' are incompatable.
+c = -13: NEV and WHICH = 'BE' are incompatible.
c = -9999: Could not build an Arnoldi factorization.
c IPARAM(5) returns the size of the current Arnoldi
c factorization. The user is advised to check that
@@ -297,7 +297,7 @@
c L'z = x where x is a Ritz vector of OP.
c
c 4. At present there is no a-priori analysis to guide the selection
-c of NCV relative to NEV. The only formal requrement is that NCV > NEV.
+c of NCV relative to NEV. The only formal requirement is that NCV > NEV.
c However, it is recommended that NCV .ge. 2*NEV. If many problems of
c the same type are to be solved, one should experiment with increasing
c NCV while keeping NEV fixed for a given test problem. This will
@@ -306,7 +306,7 @@
c basis vectors. The optimal "cross-over" with respect to CPU time
c is problem dependent and must be determined empirically.
c
-c 5. If IPARAM(7) = 2 then in the Reverse commuication interface the user
+c 5. If IPARAM(7) = 2 then in the Reverse communication interface the user
c must do the following. When IDO = 1, Y = OP * X is to be computed.
c When IPARAM(7) = 2 OP = inv(B)*A. After computing A*X the user
c must overwrite X with A*X. Y is then the solution to the linear set
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dseupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dseupd.f
index cf6a0d03c..a4567cf02 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dseupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dseupd.f
@@ -444,31 +444,31 @@ subroutine dseupd (rvec, howmny, select, d, z, ldz, sigma, bmat,
c
if (rvec) then
c
-c %------------------------------------------------%
-c | Get the converged Ritz value on the boundary. |
-c | This value will be used to dermine whether we |
-c | need to reorder the eigenvalues and |
-c | eigenvectors comupted by _steqr, and is |
-c | referred to as the "threshold" value. |
-c | |
-c | A Ritz value gamma is said to be a wanted |
-c | one, if |
-c | abs(gamma) .ge. threshold, when WHICH = 'LM'; |
-c | abs(gamma) .le. threshold, when WHICH = 'SM'; |
-c | gamma .ge. threshold, when WHICH = 'LA'; |
-c | gamma .le. threshold, when WHICH = 'SA'; |
-c | gamma .le. thres1 .or. gamma .ge. thres2 |
-c | when WHICH = 'BE'; |
-c | |
-c | Note: converged Ritz values and associated |
-c | Ritz estimates have been placed in the first |
-c | NCONV locations in workl(ritz) and |
-c | workl(bounds) respectively. They have been |
-c | sorted (in _saup2) according to the WHICH |
-c | selection criterion. (Except in the case |
-c | WHICH = 'BE', they are sorted in an increasing |
-c | order.) |
-c %------------------------------------------------%
+c %-------------------------------------------------%
+c | Get the converged Ritz value on the boundary. |
+c | This value will be used to determine whether we |
+c | need to reorder the eigenvalues and |
+c | eigenvectors comupted by _steqr, and is |
+c | referred to as the "threshold" value. |
+c | |
+c | A Ritz value gamma is said to be a wanted |
+c | one, if |
+c | abs(gamma) .ge. threshold, when WHICH = 'LM'; |
+c | abs(gamma) .le. threshold, when WHICH = 'SM'; |
+c | gamma .ge. threshold, when WHICH = 'LA'; |
+c | gamma .le. threshold, when WHICH = 'SA'; |
+c | gamma .le. thres1 .or. gamma .ge. thres2 |
+c | when WHICH = 'BE'; |
+c | |
+c | Note: converged Ritz values and associated |
+c | Ritz estimates have been placed in the first |
+c | NCONV locations in workl(ritz) and |
+c | workl(bounds) respectively. They have been |
+c | sorted (in _saup2) according to the WHICH |
+c | selection criterion. (Except in the case |
+c | WHICH = 'BE', they are sorted in an increasing |
+c | order.) |
+c %-------------------------------------------------%
c
if (which .eq. 'LM' .or. which .eq. 'SM'
& .or. which .eq. 'LA' .or. which .eq. 'SA' ) then
@@ -514,7 +514,7 @@ subroutine dseupd (rvec, howmny, select, d, z, ldz, sigma, bmat,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dstqrb.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dstqrb.f
index 9fef543ba..6e66a84c0 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dstqrb.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/dstqrb.f
@@ -93,7 +93,7 @@
c\Remarks
c 1. Starting with version 2.5, this routine is a modified version
c of LAPACK version 2.0 subroutine SSTEQR. No lines are deleted,
-c only commeted out and new lines inserted.
+c only commented out and new lines inserted.
c All lines commented out have "c$$$" at the beginning.
c Note that the LAPACK version 1.0 subroutine SSTEQR contained
c bugs.
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/slaqrb.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/slaqrb.f
index e967b18e4..f07ccd820 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/slaqrb.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/slaqrb.f
@@ -57,7 +57,7 @@
c Z Real array, dimension (N). (OUTPUT)
c On exit Z contains the last components of the Schur vectors.
c
-c INFO Integer. (OUPUT)
+c INFO Integer. (OUTPUT)
c = 0: successful exit
c > 0: SLAQRB failed to compute all the eigenvalues ILO to IHI
c in a total of 30*(IHI-ILO+1) iterations; if INFO = i,
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaitr.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaitr.f
index 8d9b1ecfb..abfca98d5 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaitr.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaitr.f
@@ -379,7 +379,7 @@ subroutine snaitr
c
c %---------------------------------------------------%
c | STEP 1: Check if the B norm of j-th residual |
-c | vector is zero. Equivalent to determing whether |
+c | vector is zero. Equivalent to determining whether |
c | an exact j-step Arnoldi factorization is present. |
c %---------------------------------------------------%
c
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snapps.f
index 0ae94bf84..8a1fe7bca 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snapps.f
@@ -13,7 +13,7 @@
c A*(V_{k}*Q) - (V_{k}*Q)*(Q^T* H_{k}*Q) = r_{k+p}*e_{k+p}^T * Q
c
c where Q is an orthogonal matrix which is the product of rotations
-c and reflections resulting from the NP bulge chage sweeps.
+c and reflections resulting from the NP bulge change sweeps.
c The updated Arnoldi factorization becomes:
c
c A*VNEW_{k} - VNEW_{k}*HNEW_{k} = rnew_{k}*e_{k}^T.
@@ -30,7 +30,7 @@
c KEV Integer. (INPUT/OUTPUT)
c KEV+NP is the size of the input matrix H.
c KEV is the size of the updated matrix HNEW. KEV is only
-c updated on ouput when fewer than NP shifts are applied in
+c updated on output when fewer than NP shifts are applied in
c order to keep the conjugate pair together.
c
c NP Integer. (INPUT)
@@ -205,9 +205,9 @@ subroutine snapps
c
intrinsic abs, max, min
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
@@ -384,9 +384,9 @@ subroutine snapps
c
do 80 i = istart, iend-1
c
-c %-----------------------------------------------------%
-c | Contruct the plane rotation G to zero out the bulge |
-c %-----------------------------------------------------%
+c %------------------------------------------------------%
+c | Construct the plane rotation G to zero out the bulge |
+c %------------------------------------------------------%
c
call slartg (f, g, c, s, r)
if (i .gt. istart) then
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaup2.f
index 01be472f6..d66ac3acd 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaup2.f
@@ -26,7 +26,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication. 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV for two reasons. The first, is
c to keep complex conjugate pairs of "wanted" Ritz values
c together. The second, is that a leading block of the current
@@ -598,7 +598,7 @@ subroutine snaup2
c
c %----------------------------------------------------%
c | Sort the Ritz values according to the scaled Ritz |
-c | esitmates. This will push all the converged ones |
+c | estimates. This will push all the converged ones |
c | towards the front of ritzr, ritzi, bounds |
c | (in the case when NCONV < NEV.) |
c %----------------------------------------------------%
@@ -697,7 +697,7 @@ subroutine snaup2
if (ishift .eq. 0) then
c
c %-------------------------------------------------------%
-c | User specified shifts: reverse comminucation to |
+c | User specified shifts: reverse communication to |
c | compute the shifts. They are returned in the first |
c | 2*NP locations of WORKL. |
c %-------------------------------------------------------%
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaupd.f
index a1a83bb78..47cdeb68b 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/snaupd.f
@@ -273,7 +273,7 @@
c = -8: Error return from LAPACK eigenvalue calculation;
c = -9: Starting vector is zero.
c = -10: IPARAM(7) must be 1,2,3,4.
-c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatable.
+c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatible.
c = -12: IPARAM(1) must be equal to 0 or 1.
c = -9999: Could not build an Arnoldi factorization.
c IPARAM(5) returns the size of the current Arnoldi
@@ -298,7 +298,7 @@
c L'z = x where x is a Ritz vector of OP.
c
c 4. At present there is no a-priori analysis to guide the selection
-c of NCV relative to NEV. The only formal requrement is that NCV > NEV + 2.
+c of NCV relative to NEV. The only formal requirement is that NCV > NEV + 2.
c However, it is recommended that NCV .ge. 2*NEV+1. If many problems of
c the same type are to be solved, one should experiment with increasing
c NCV while keeping NEV fixed for a given test problem. This will
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sneupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sneupd.f
index 102b53a77..f94cb8b42 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sneupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sneupd.f
@@ -547,7 +547,7 @@ subroutine sneupd (rvec, howmny, select, dr, di, z, ldz, sigmar,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
@@ -967,9 +967,9 @@ subroutine sneupd (rvec, howmny, select, dr, di, z, ldz, sigmar,
c
if (type .eq. 'SHIFTI' .and. msglvl .gt. 1) then
call svout (logfil, nconv, dr, ndigit,
- & '_neupd: Untransformed real part of the Ritz valuess.')
+ & '_neupd: Untransformed real part of the Ritz values.')
call svout (logfil, nconv, di, ndigit,
- & '_neupd: Untransformed imag part of the Ritz valuess.')
+ & '_neupd: Untransformed imag part of the Ritz values.')
call svout (logfil, nconv, workl(ihbds), ndigit,
& '_neupd: Ritz estimates of untransformed Ritz values.')
else if (type .eq. 'REGULR' .and. msglvl .gt. 1) then
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaitr.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaitr.f
index a1c810e9f..96ff7fca3 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaitr.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaitr.f
@@ -370,10 +370,10 @@ subroutine ssaitr
& '_saitr: B-norm of the current residual =')
end if
c
-c %---------------------------------------------------------%
-c | Check for exact zero. Equivalent to determing whether a |
-c | j-step Arnoldi factorization is present. |
-c %---------------------------------------------------------%
+c %-----------------------------------------------------------%
+c | Check for exact zero. Equivalent to determining whether a |
+c | j-step Arnoldi factorization is present. |
+c %-----------------------------------------------------------%
c
if (rnorm .gt. zero) go to 40
c
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssapps.f
index b1eb5e343..2534c8102 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssapps.f
@@ -192,9 +192,9 @@ subroutine ssapps
c
intrinsic abs
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
@@ -264,7 +264,7 @@ subroutine ssapps
call ivout (logfil, 1, i, ndigit,
& '_sapps: deflation at row/column no.')
call ivout (logfil, 1, jj, ndigit,
- & '_sapps: occured before shift number.')
+ & '_sapps: occurred before shift number.')
call svout (logfil, 1, h(i+1,1), ndigit,
& '_sapps: the corresponding off diagonal element')
end if
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaup2.f
index 42fd76895..f3c701f65 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaup2.f
@@ -27,7 +27,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication. 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV since a leading block of the current
c upper Tridiagonal matrix has split off and contains "unwanted"
c Ritz values.
@@ -587,7 +587,7 @@ subroutine ssaup2
c
c %----------------------------------------------------%
c | Sort the Ritz values according to the scaled Ritz |
-c | esitmates. This will push all the converged ones |
+c | estimates. This will push all the converged ones |
c | towards the front of ritzr, ritzi, bounds |
c | (in the case when NCONV < NEV.) |
c %----------------------------------------------------%
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaupd.f
index 0fa6a9d09..7c66d19de 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/ssaupd.f
@@ -264,12 +264,12 @@
c = -6: BMAT must be one of 'I' or 'G'.
c = -7: Length of private work array WORKL is not sufficient.
c = -8: Error return from trid. eigenvalue calculation;
-c Informatinal error from LAPACK routine ssteqr.
+c Informational error from LAPACK routine ssteqr.
c = -9: Starting vector is zero.
c = -10: IPARAM(7) must be 1,2,3,4,5.
-c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatable.
+c = -11: IPARAM(7) = 1 and BMAT = 'G' are incompatible.
c = -12: IPARAM(1) must be equal to 0 or 1.
-c = -13: NEV and WHICH = 'BE' are incompatable.
+c = -13: NEV and WHICH = 'BE' are incompatible.
c = -9999: Could not build an Arnoldi factorization.
c IPARAM(5) returns the size of the current Arnoldi
c factorization. The user is advised to check that
@@ -297,7 +297,7 @@
c L'z = x where x is a Ritz vector of OP.
c
c 4. At present there is no a-priori analysis to guide the selection
-c of NCV relative to NEV. The only formal requrement is that NCV > NEV.
+c of NCV relative to NEV. The only formal requirement is that NCV > NEV.
c However, it is recommended that NCV .ge. 2*NEV. If many problems of
c the same type are to be solved, one should experiment with increasing
c NCV while keeping NEV fixed for a given test problem. This will
@@ -306,7 +306,7 @@
c basis vectors. The optimal "cross-over" with respect to CPU time
c is problem dependent and must be determined empirically.
c
-c 5. If IPARAM(7) = 2 then in the Reverse commuication interface the user
+c 5. If IPARAM(7) = 2 then in the Reverse communication interface the user
c must do the following. When IDO = 1, Y = OP * X is to be computed.
c When IPARAM(7) = 2 OP = inv(B)*A. After computing A*X the user
c must overwrite X with A*X. Y is then the solution to the linear set
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sseupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sseupd.f
index 12aec54ba..b8c0c0a5a 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sseupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sseupd.f
@@ -443,31 +443,31 @@ subroutine sseupd (rvec, howmny, select, d, z, ldz, sigma, bmat,
c
if (rvec) then
c
-c %------------------------------------------------%
-c | Get the converged Ritz value on the boundary. |
-c | This value will be used to dermine whether we |
-c | need to reorder the eigenvalues and |
-c | eigenvectors comupted by _steqr, and is |
-c | referred to as the "threshold" value. |
-c | |
-c | A Ritz value gamma is said to be a wanted |
-c | one, if |
-c | abs(gamma) .ge. threshold, when WHICH = 'LM'; |
-c | abs(gamma) .le. threshold, when WHICH = 'SM'; |
-c | gamma .ge. threshold, when WHICH = 'LA'; |
-c | gamma .le. threshold, when WHICH = 'SA'; |
-c | gamma .le. thres1 .or. gamma .ge. thres2 |
-c | when WHICH = 'BE'; |
-c | |
-c | Note: converged Ritz values and associated |
-c | Ritz estimates have been placed in the first |
-c | NCONV locations in workl(ritz) and |
-c | workl(bounds) respectively. They have been |
-c | sorted (in _saup2) according to the WHICH |
-c | selection criterion. (Except in the case |
-c | WHICH = 'BE', they are sorted in an increasing |
-c | order.) |
-c %------------------------------------------------%
+c %--------------------------------------------------%
+c | Get the converged Ritz value on the boundary. |
+c | This value will be used to determine whether we |
+c | need to reorder the eigenvalues and |
+c | eigenvectors comupted by _steqr, and is |
+c | referred to as the "threshold" value. |
+c | |
+c | A Ritz value gamma is said to be a wanted |
+c | one, if |
+c | abs(gamma) .ge. threshold, when WHICH = 'LM'; |
+c | abs(gamma) .le. threshold, when WHICH = 'SM'; |
+c | gamma .ge. threshold, when WHICH = 'LA'; |
+c | gamma .le. threshold, when WHICH = 'SA'; |
+c | gamma .le. thres1 .or. gamma .ge. thres2 |
+c | when WHICH = 'BE'; |
+c | |
+c | Note: converged Ritz values and associated |
+c | Ritz estimates have been placed in the first |
+c | NCONV locations in workl(ritz) and |
+c | workl(bounds) respectively. They have been |
+c | sorted (in _saup2) according to the WHICH |
+c | selection criterion. (Except in the case |
+c | WHICH = 'BE', they are sorted in an increasing |
+c | order.) |
+c %--------------------------------------------------%
c
if (which .eq. 'LM' .or. which .eq. 'SM'
& .or. which .eq. 'LA' .or. which .eq. 'SA' ) then
@@ -513,7 +513,7 @@ subroutine sseupd (rvec, howmny, select, d, z, ldz, sigma, bmat,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sstqrb.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sstqrb.f
index 9fd1e1925..d92933e37 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sstqrb.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/sstqrb.f
@@ -93,7 +93,7 @@
c\Remarks
c 1. Starting with version 2.5, this routine is a modified version
c of LAPACK version 2.0 subroutine SSTEQR. No lines are deleted,
-c only commeted out and new lines inserted.
+c only commented out and new lines inserted.
c All lines commented out have "c$$$" at the beginning.
c Note that the LAPACK version 1.0 subroutine SSTEQR contained
c bugs.
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znapps.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znapps.f
index 95bbce425..93675693a 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znapps.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znapps.f
@@ -208,9 +208,9 @@ subroutine znapps
& zabs1
zabs1( cdum ) = abs( dble( cdum ) ) + abs( dimag( cdum ) )
c
-c %----------------%
-c | Data statments |
-c %----------------%
+c %-----------------%
+c | Data statements |
+c %-----------------%
c
data first / .true. /
c
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znaup2.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znaup2.f
index b862f68f1..0e590a6f1 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znaup2.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/znaup2.f
@@ -26,7 +26,7 @@
c The logic for adjusting is contained within the current
c subroutine.
c If ISHIFT=0, NP is the number of shifts the user needs
-c to provide via reverse comunication. 0 < NP < NCV-NEV.
+c to provide via reverse communication. 0 < NP < NCV-NEV.
c NP may be less than NCV-NEV since a leading block of the current
c upper Hessenberg matrix has split off and contains "unwanted"
c Ritz values.
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/zneupd.f b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/zneupd.f
index 369c58f20..cdc0891a9 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/zneupd.f
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ARPACK/zneupd.f
@@ -488,7 +488,7 @@ subroutine zneupd (rvec, howmny, select, d, z, ldz, sigma,
c | |
c | 2) If it is wanted, then check the corresponding Ritz |
c | estimate to see if it has converged. If it has, set |
-c | correponding entry in the logical array SELECT to |
+c | corresponding entry in the logical array SELECT to |
c | .TRUE.. |
c | |
c | If SELECT(j) = .TRUE. and j > NCONV, then there is a |
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt b/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt
index d74086719..0cf9837f5 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt
@@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.10)
if(CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
#
- # Only for debuging... I need mkl
+ # Only for debugging... I need mkl
#
project(nataf_gsa_project)
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt.UBUNTU b/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt.UBUNTU
index 812ac02e3..7e6b11052 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt.UBUNTU
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/CMakeLists.txt.UBUNTU
@@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.10)
if(CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
#
- # Only for debuging... I need mkl
+ # Only for debugging... I need mkl
#
project(nataf_gsa_project)
enable_language(Fortran)
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/ERANataf.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/ERANataf.cpp
index 29b0de58a..97755be42 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/ERANataf.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/ERANataf.cpp
@@ -593,20 +593,20 @@ void ERANataf::simulateAppBatch(string workflowDriver,
std::cerr << "copyDir:" << copyDir << "\n";
std::cerr << "runningFEM analysis.." << "\n\n";
//
- // If we find result.out in the templete dir. emit error;
+ // If we find result.out in the template dir. emit error;
//
std::string existingResultsFile = inp.workDir + "/templatedir/results.out";
if (std::filesystem::exists(existingResultsFile)) {
//*ERROR*
- std::string errMsg = "Error running SimCenterUQ: your templete directory already contains results.out file. Please clean up the directory where input file is located.";
+ std::string errMsg = "Error running SimCenterUQ: your template directory already contains results.out file. Please clean up the directory where input file is located.";
theErrorFile.write(errMsg);
}
std::string existingParamsFile = inp.workDir + "/templatedir/params.in";
if (std::filesystem::exists(existingParamsFile)) {
//*ERROR*
- std::string errMsg = "Error running SimCenterUQ: your templete directory already contains params.in file. Please clean up the directory where input file is located.";
+ std::string errMsg = "Error running SimCenterUQ: your template directory already contains params.in file. Please clean up the directory where input file is located.";
theErrorFile.write(errMsg);
}
}
@@ -984,20 +984,20 @@ void ERANataf::simulateAppBatchSurrogate(string workflowDriver,
std::cout << "runningFEM analysis.." << "\n\n";
//
- // If we find result.out in the templete dir. emit error;
+ // If we find result.out in the template dir. emit error;
//
std::string existingResultsFile = inp.workDir + "/templatedir/results.out";
if (std::filesystem::exists(existingResultsFile)) {
//*ERROR*
- std::string errMsg = "Error running SimCenterUQ: your templete directory already contains results.out file. Please clean up the directory where input file is located.";
+ std::string errMsg = "Error running SimCenterUQ: your template directory already contains results.out file. Please clean up the directory where input file is located.";
theErrorFile.write(errMsg);
}
std::string existingParamsFile = inp.workDir + "/templatedir/params.in";
if (std::filesystem::exists(existingParamsFile)) {
//*ERROR*
- std::string errMsg = "Error running SimCenterUQ: your templete directory already contains params.in file. Please clean up the directory where input file is located.";
+ std::string errMsg = "Error running SimCenterUQ: your template directory already contains params.in file. Please clean up the directory where input file is located.";
theErrorFile.write(errMsg);
}
}
@@ -1048,7 +1048,7 @@ void ERANataf::readBin(string filename,int ndim, vector> &mat, in
}
else {
nsamp = num_elements / ndim;
- std::cout << " - Found " << num_elements << " values from the bianry file\n";
+ std::cout << " - Found " << num_elements << " values from the binary file\n";
std::cout << " - Number of samples: " << nsamp << "\n";
std::cout << " - Dimension: " << ndim << "\n";
}
@@ -1100,13 +1100,13 @@ void ERANataf::readCSV(string filename, int ndim, vector>& mat, i
vector mattmp;
mattmp.reserve(ndim);
- // split string by delimeter
+ // split string by delimiter
int start = 0U;
int end = line.find(delimiter);
j = 0;
try {
- // if comma seperated
+ // if comma separated
while (end != std::string::npos) {
fileIsCsv = true;
if (start != end)
@@ -1118,7 +1118,7 @@ void ERANataf::readCSV(string filename, int ndim, vector>& mat, i
end = line.find(delimiter, start);
}
- // if space seperated
+ // if space separated
if (j == 0) {
end = line.find(delimiter2);
while (end != std::string::npos) {
@@ -1133,7 +1133,7 @@ void ERANataf::readCSV(string filename, int ndim, vector>& mat, i
}
}
- // if tab seperated
+ // if tab separated
if (j == 0) {
end = line.find(delimiter3);
while (end != std::string::npos) {
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/conanfile.py b/modules/performUQ/SimCenterUQ/nataf_gsa/conanfile.py
index 23dbae743..ed26f300f 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/conanfile.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/conanfile.py
@@ -1,56 +1,61 @@
-from conans import ConanFile, CMake, tools
-import os
-
-class simCenterBackendApps(ConanFile):
- name = "nataf_gsa_cpp_mpi"
- version = "1.0.0"
- description = "Software for creating nataf_gsa"
- license = "BSD 2-Clause"
- settings = {"os": None, "build_type": None, "compiler": None, "arch": ["x86_64"]}
- options = {"shared": [True, False]}
- default_options = {"mkl-static:threaded": False, "ipp-static:simcenter_backend": True}
- generators = "cmake"
- build_policy = "missing"
- requires = "eigen/3.3.7", \
- "jsonformoderncpp/3.7.0", \
- "mkl-static/2019.4@simcenter/stable", \
- "boost/1.74.0", \
- "nlopt/2.6.2",\
+import os # noqa: CPY001, D100, INP001
+from conans import CMake, ConanFile
+
+
+class simCenterBackendApps(ConanFile): # noqa: D101
+ name = 'nataf_gsa_cpp_mpi'
+ version = '1.0.0'
+ description = 'Software for creating nataf_gsa'
+ license = 'BSD 2-Clause'
+ settings = {'os': None, 'build_type': None, 'compiler': None, 'arch': ['x86_64']} # noqa: RUF012
+ options = {'shared': [True, False]} # noqa: RUF012
+ default_options = { # noqa: RUF012
+ 'mkl-static:threaded': False,
+ 'ipp-static:simcenter_backend': True,
+ }
+ generators = 'cmake'
+ build_policy = 'missing'
+ requires = (
+ 'eigen/3.3.7',
+ 'jsonformoderncpp/3.7.0',
+ 'mkl-static/2019.4@simcenter/stable',
+ 'boost/1.74.0',
+ 'nlopt/2.6.2',
+ )
# Custom attributes for Bincrafters recipe conventions
- _source_subfolder = "source_subfolder"
- _build_subfolder = "build_subfolder"
+ _source_subfolder = 'source_subfolder'
+ _build_subfolder = 'build_subfolder'
# Set short paths for Windows
- short_paths = True
- scm = {
- "type": "git", # Use "type": "svn", if local repo is managed using SVN
- "subfolder": _source_subfolder,
- "url": "auto",
- "revision": "auto"
+ short_paths = True
+ scm = { # noqa: RUF012
+ 'type': 'git', # Use "type": "svn", if local repo is managed using SVN
+ 'subfolder': _source_subfolder,
+ 'url': 'auto',
+ 'revision': 'auto',
}
-
- def configure(self):
+ def configure(self): # noqa: D102
self.options.shared = False
- if self.settings.os == "Windows":
- self.options["libcurl"].with_winssl = True
- self.options["libcurl"].with_openssl = False
+ if self.settings.os == 'Windows':
+ self.options['libcurl'].with_winssl = True
+ self.options['libcurl'].with_openssl = False
- def configure_cmake(self):
+ def configure_cmake(self): # noqa: D102
cmake = CMake(self)
cmake.configure(source_folder=self._source_subfolder)
return cmake
-
- def build(self):
+
+ def build(self): # noqa: D102
cmake = self.configure_cmake()
cmake.build()
- def package(self):
- self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder)
+ def package(self): # noqa: D102
+ self.copy(pattern='LICENSE', dst='licenses', src=self._source_subfolder)
cmake = self.configure_cmake()
cmake.install()
- self.copy("*", dst="bin", src=self._source_subfolder + "/applications")
+ self.copy('*', dst='bin', src=self._source_subfolder + '/applications')
- def package_info(self):
- self.env_info.PATH.append(os.path.join(self.package_folder, "bin"))
+ def package_info(self): # noqa: D102
+ self.env_info.PATH.append(os.path.join(self.package_folder, 'bin')) # noqa: PTH118
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/jsonInput.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/jsonInput.cpp
index 3c20569ec..080e64862 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/jsonInput.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/jsonInput.cpp
@@ -59,7 +59,7 @@ jsonInput::jsonInput(string workDir, string inpFile, int procno)
//
// sy & frank - designsafe shell will always pass me scInput.json as input file name which is not good for non-quofem apps
- // check if the sc_Input.json exists and if so overwirte the input file name
+ // check if the sc_Input.json exists and if so overwrite the input file name
std::string sc_jsonPath = workDir + "/templatedir/sc_scInput.json";
bool sc_exists = std::filesystem::exists(sc_jsonPath);
@@ -757,7 +757,7 @@ jsonInput::fromTextToId(string groupTxt, vector& groupPool, vector groupString;
while (ss.good()) {
std::string substr;
- getline(ss, substr, ','); // incase we have multiple strings inside {}
+ getline(ss, substr, ','); // in case we have multiple strings inside {}
groupString.push_back(substr);
std::vector::iterator itr = std::find(groupPool.begin(), groupPool.end(), substr);
@@ -797,7 +797,7 @@ jsonInput::fromTextToStr(string groupTxt, vector>& groupStringVec
std::vector groupString;
while (ss.good()) {
std::string substr;
- getline(ss, substr, ','); // incase we have multiple strings inside {}
+ getline(ss, substr, ','); // in case we have multiple strings inside {}
groupString.push_back(substr);
flattenStringVect.push_back(substr);
}
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Cube_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Cube_meat.hpp
index 3c6837491..7a6107236 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Cube_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Cube_meat.hpp
@@ -819,7 +819,7 @@ Cube::operator-=(const subview_cube& X)
-//! in-place element-wise cube mutiplication (using a subcube on the right-hand-side)
+//! in-place element-wise cube multiplication (using a subcube on the right-hand-side)
template
inline
Cube&
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/GenCube_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/GenCube_meat.hpp
index 182d80ab5..f06b3a39d 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/GenCube_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/GenCube_meat.hpp
@@ -78,7 +78,7 @@ GenCube::apply(Cube& out) const
arma_extra_debug_sigprint();
// NOTE: we're assuming that the cube has already been set to the correct size;
- // this is done by either the Cube contructor or operator=()
+ // this is done by either the Cube constructor or operator=()
if(is_same_type::yes) { out.ones(); }
else if(is_same_type::yes) { out.zeros(); }
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Gen_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Gen_meat.hpp
index 6c3da3979..2f279d061 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Gen_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Gen_meat.hpp
@@ -95,7 +95,7 @@ Gen::apply(Mat& out) const
arma_extra_debug_sigprint();
// NOTE: we're assuming that the matrix has already been set to the correct size;
- // this is done by either the Mat contructor or operator=()
+ // this is done by either the Mat constructor or operator=()
if(is_same_type::yes) { out.eye(); }
else if(is_same_type::yes) { out.ones(); }
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/MapMat_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/MapMat_meat.hpp
index 57fce451b..5bcfeee0e 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/MapMat_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/MapMat_meat.hpp
@@ -1018,7 +1018,7 @@ MapMat_val::operator/=(const eT in_val)
}
else
{
- // silly operation, but included for completness
+ // silly operation, but included for completeness
const eT val = eT(0) / in_val;
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Mat_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Mat_meat.hpp
index 1b69180c0..dbb6d7a9b 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Mat_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/Mat_meat.hpp
@@ -1924,7 +1924,7 @@ Mat::operator-=(const subview& X)
-//! in-place matrix mutiplication (using a submatrix on the right-hand-side)
+//! in-place matrix multiplication (using a submatrix on the right-hand-side)
template
inline
Mat&
@@ -1939,7 +1939,7 @@ Mat::operator*=(const subview& X)
-//! in-place element-wise matrix mutiplication (using a submatrix on the right-hand-side)
+//! in-place element-wise matrix multiplication (using a submatrix on the right-hand-side)
template
inline
Mat&
@@ -2114,7 +2114,7 @@ Mat::operator-=(const subview_cube& X)
-//! in-place matrix mutiplication (using a single-slice subcube on the right-hand-side)
+//! in-place matrix multiplication (using a single-slice subcube on the right-hand-side)
template
inline
Mat&
@@ -2130,7 +2130,7 @@ Mat::operator*=(const subview_cube& X)
-//! in-place element-wise matrix mutiplication (using a single-slice subcube on the right-hand-side)
+//! in-place element-wise matrix multiplication (using a single-slice subcube on the right-hand-side)
template
inline
Mat&
@@ -2239,7 +2239,7 @@ Mat::operator-=(const diagview& X)
-//! in-place matrix mutiplication (using a diagview on the right-hand-side)
+//! in-place matrix multiplication (using a diagview on the right-hand-side)
template
inline
Mat&
@@ -2254,7 +2254,7 @@ Mat::operator*=(const diagview& X)
-//! in-place element-wise matrix mutiplication (using a diagview on the right-hand-side)
+//! in-place element-wise matrix multiplication (using a diagview on the right-hand-side)
template
inline
Mat&
@@ -6080,7 +6080,7 @@ Mat::is_colvec() const
-//! returns true if the object has the same number of non-zero rows and columnns
+//! returns true if the object has the same number of non-zero rows and columns
template
arma_inline
arma_warn_unused
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpMat_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpMat_meat.hpp
index 2354e9c76..4fe7c59db 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpMat_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpMat_meat.hpp
@@ -3095,7 +3095,7 @@ SpMat::shed_rows(const uword in_row1, const uword in_row2)
arma_debug_check
(
(in_row1 > in_row2) || (in_row2 >= n_rows),
- "SpMat::shed_rows(): indices out of bounds or incorectly used"
+ "SpMat::shed_rows(): indices out of bounds or incorrectly used"
);
sync_csc();
@@ -3253,7 +3253,7 @@ SpMat::shed_cols(const uword in_col1, const uword in_col2)
/**
- * Element access; acces the i'th element (works identically to the Mat accessors).
+ * Element access; access the i'th element (works identically to the Mat accessors).
* If there is nothing at element i, 0 is returned.
*/
@@ -3439,7 +3439,7 @@ SpMat::is_colvec() const
-//! returns true if the object has the same number of non-zero rows and columnns
+//! returns true if the object has the same number of non-zero rows and columns
template
arma_inline
arma_warn_unused
@@ -5328,7 +5328,7 @@ SpMat::init(const MapMat& x)
const uword x_index = x_entry.first;
const eT x_val = x_entry.second;
- // have we gone past the curent column?
+ // have we gone past the current column?
if(x_index >= x_col_index_endp1)
{
x_col = x_index / x_n_rows;
@@ -5597,7 +5597,7 @@ SpMat::init_batch_add(const Mat& locs, const Mat& vals, const boo
uvec sorted_indices = sort_index(abslocs); // Ascending sort.
- // work out the number of unique elments
+ // work out the number of unique elements
uword n_unique = 1; // first element is unique
for(uword i=1; i < sorted_indices.n_elem; ++i)
@@ -5652,7 +5652,7 @@ SpMat::init_batch_add(const Mat& locs, const Mat& vals, const boo
if( (sort_locations == false) || (actually_sorted == true) )
{
- // work out the number of unique elments
+ // work out the number of unique elements
uword n_unique = 1; // first element is unique
for(uword i=1; i < locs.n_cols; ++i)
@@ -6025,7 +6025,7 @@ SpMat::init_xform_mt(const SpBase& A, const Functor& func)
{
eT& t_values_i = t_values[i];
- t_values_i = func(x_values[i]); // NOTE: func() must produce a value of type eT (ie. act as a convertor between eT2 and eT)
+ t_values_i = func(x_values[i]); // NOTE: func() must produce a value of type eT (ie. act as a converter between eT2 and eT)
if(t_values_i == eT(0)) { has_zero = true; }
}
@@ -6043,7 +6043,7 @@ SpMat::init_xform_mt(const SpBase& A, const Functor& func)
while(it != it_end)
{
- const eT val = func(*it); // NOTE: func() must produce a value of type eT (ie. act as a convertor between eT2 and eT)
+ const eT val = func(*it); // NOTE: func() must produce a value of type eT (ie. act as a converter between eT2 and eT)
if(val == eT(0)) { has_zero = true; }
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpSubview_iterators_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpSubview_iterators_meat.hpp
index 52cf83573..90f4f5bda 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpSubview_iterators_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/SpSubview_iterators_meat.hpp
@@ -832,7 +832,7 @@ SpSubview::const_row_iterator::operator++()
else if((*pos_ptr) == next_min_row + aux_row && col < next_min_col && (*pos_ptr) < aux_row + iterator_base::M->n_rows)
{
// The first element in this column is in a subsequent row that we
- // already have another elemnt for, but the column index is less so
+ // already have another element for, but the column index is less so
// this element will come first.
next_min_col = col;
next_actual_pos = col_offset + (pos_ptr - start_ptr);
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/arma_forward.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/arma_forward.hpp
index 7c177043a..7e3797019 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/arma_forward.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/arma_forward.hpp
@@ -363,7 +363,7 @@ enum struct file_type : unsigned int
ppm_binary, //!< Portable Pixel Map (colour image), used by the field and cube classes
hdf5_binary, //!< HDF5: open binary format, not specific to Armadillo, which can store arbitrary data
hdf5_binary_trans, //!< [DO NOT USE - deprecated] as per hdf5_binary, but save/load the data with columns transposed to rows
- coord_ascii //!< simple co-ordinate format for sparse matrices (indices start at zero)
+ coord_ascii //!< simple coordinate format for sparse matrices (indices start at zero)
};
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/band_helper.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/band_helper.hpp
index f4621b98f..e16e84008 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/band_helper.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/band_helper.hpp
@@ -240,7 +240,7 @@ compress(Mat& AB, const Mat& A, const uword KL, const uword KU, const bo
// http://www.netlib.org/lapack/lug/node124.html
// for ?gbsv, matrix AB size: 2*KL+KU+1 x N; band representation of A stored in rows KL+1 to 2*KL+KU+1 (note: fortran counts from 1)
- // for ?gbsvx, matrix AB size: KL+KU+1 x N; band representaiton of A stored in rows 1 to KL+KU+1 (note: fortran counts from 1)
+ // for ?gbsvx, matrix AB size: KL+KU+1 x N; band representation of A stored in rows 1 to KL+KU+1 (note: fortran counts from 1)
//
// the +1 in the above formulas is to take into account the main diagonal
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diagview_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diagview_meat.hpp
index ea8d78f4d..09e8b8f8d 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diagview_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diagview_meat.hpp
@@ -548,7 +548,7 @@ diagview::extract(Mat& out, const diagview& in)
arma_extra_debug_sigprint();
// NOTE: we're assuming that the matrix has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Mat contructor or operator=()
+ // size setting and alias checking is done by either the Mat constructor or operator=()
const Mat& in_m = in.m;
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diskio_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diskio_meat.hpp
index 60ec56548..0c52537e4 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diskio_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/diskio_meat.hpp
@@ -3035,7 +3035,7 @@ diskio::load_arma_binary(SpMat& x, std::istream& f, std::string& err_msg)
{
arma_extra_debug_print("detected inconsistent data while loading; re-reading integer parts as u32");
- // inconstency could be due to a different uword size used during saving,
+ // inconsistency could be due to a different uword size used during saving,
// so try loading the row_indices and col_ptrs under the assumption of 32 bit unsigned integers
f.clear();
@@ -3820,7 +3820,7 @@ diskio::load_hdf5_binary(Cube& x, const hdf5_name& spec, std::string& err_ms
return false;
}
- if(ndims == 1) { dims[1] = 1; dims[2] = 1; } // Vector case; one row/colum, several slices
+ if(ndims == 1) { dims[1] = 1; dims[2] = 1; } // Vector case; one row/column, several slices
if(ndims == 2) { dims[2] = 1; } // Matrix case; one column, several rows/slices
x.set_size(dims[2], dims[1], dims[0]);
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eglue_core_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eglue_core_meat.hpp
index ecdd08900..707b33adf 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eglue_core_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eglue_core_meat.hpp
@@ -266,7 +266,7 @@ eglue_core::apply(outT& out, const eGlue& x)
const bool use_mp = (Proxy::use_mp || Proxy::use_mp) && (arma_config::openmp);
// NOTE: we're assuming that the matrix has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Mat contructor or operator=()
+ // size setting and alias checking is done by either the Mat constructor or operator=()
eT* out_mem = out.memptr();
@@ -763,7 +763,7 @@ eglue_core::apply(Cube& out, const eGlueCube
const bool use_mp = (ProxyCube::use_mp || ProxyCube::use_mp) && (arma_config::openmp);
// NOTE: we're assuming that the cube has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Cube contructor or operator=()
+ // size setting and alias checking is done by either the Cube constructor or operator=()
eT* out_mem = out.memptr();
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_aux.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_aux.hpp
index 6d3decaf1..d250986b9 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_aux.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_aux.hpp
@@ -148,7 +148,7 @@ class eop_aux
{
//arma_extra_debug_sigprint();
- // acording to IEEE Standard for Floating-Point Arithmetic (IEEE 754)
+ // according to IEEE Standard for Floating-Point Arithmetic (IEEE 754)
// the mantissa length for double is 53 bits = std::numeric_limits::digits
// the mantissa length for float is 24 bits = std::numeric_limits::digits
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_core_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_core_meat.hpp
index a3594e0e6..e3b8fdbdc 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_core_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/eop_core_meat.hpp
@@ -249,7 +249,7 @@ eop_core::apply(outT& out, const eOp& x)
typedef typename T1::elem_type eT;
// NOTE: we're assuming that the matrix has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Mat contructor or operator=()
+ // size setting and alias checking is done by either the Mat constructor or operator=()
const eT k = x.aux;
eT* out_mem = out.memptr();
@@ -630,7 +630,7 @@ eop_core::apply(Cube& out, const eOpCube::rows(const uword in_row1, const uword in_row2)
arma_debug_check
(
( (in_row1 > in_row2) || (in_row2 >= n_rows) ),
- "field::rows(): indicies out of bounds or incorrectly used"
+ "field::rows(): indices out of bounds or incorrectly used"
);
const uword sub_n_rows = in_row2 - in_row1 + 1;
@@ -759,7 +759,7 @@ field::rows(const uword in_row1, const uword in_row2) const
arma_debug_check
(
( (in_row1 > in_row2) || (in_row2 >= n_rows) ),
- "field::rows(): indicies out of bounds or incorrectly used"
+ "field::rows(): indices out of bounds or incorrectly used"
);
const uword sub_n_rows = in_row2 - in_row1 + 1;
@@ -782,7 +782,7 @@ field::cols(const uword in_col1, const uword in_col2)
arma_debug_check
(
( (in_col1 > in_col2) || (in_col2 >= n_cols) ),
- "field::cols(): indicies out of bounds or incorrectly used"
+ "field::cols(): indices out of bounds or incorrectly used"
);
const uword sub_n_cols = in_col2 - in_col1 + 1;
@@ -805,7 +805,7 @@ field::cols(const uword in_col1, const uword in_col2) const
arma_debug_check
(
( (in_col1 > in_col2) || (in_col2 >= n_cols) ),
- "field::cols(): indicies out of bounds or incorrectly used"
+ "field::cols(): indices out of bounds or incorrectly used"
);
const uword sub_n_cols = in_col2 - in_col1 + 1;
@@ -826,7 +826,7 @@ field::slices(const uword in_slice1, const uword in_slice2)
arma_debug_check
(
( (in_slice1 > in_slice2) || (in_slice2 >= n_slices) ),
- "field::slices(): indicies out of bounds or incorrectly used"
+ "field::slices(): indices out of bounds or incorrectly used"
);
const uword sub_n_slices = in_slice2 - in_slice1 + 1;
@@ -847,7 +847,7 @@ field::slices(const uword in_slice1, const uword in_slice2) const
arma_debug_check
(
( (in_slice1 > in_slice2) || (in_slice2 >= n_slices) ),
- "field::slices(): indicies out of bounds or incorrectly used"
+ "field::slices(): indices out of bounds or incorrectly used"
);
const uword sub_n_slices = in_slice2 - in_slice1 + 1;
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_interp1.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_interp1.hpp
index ccae59a20..06b5f0b48 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_interp1.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_interp1.hpp
@@ -307,7 +307,7 @@ interp1
else if(c1 == 'l') { sig = 20; } // linear
else
{
- if( (c1 == '*') && (c2 == 'n') ) { sig = 11; } // nearest neighour, assume monotonic increase in X and XI
+ if( (c1 == '*') && (c2 == 'n') ) { sig = 11; } // nearest neighbour, assume monotonic increase in X and XI
if( (c1 == '*') && (c2 == 'l') ) { sig = 21; } // linear, assume monotonic increase in X and XI
}
}
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_size.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_size.hpp
index 507bc6bbb..8a5da9112 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_size.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/fn_size.hpp
@@ -46,7 +46,7 @@ size(const Base& X)
-// explicit overload to workround ADL issues with C++17 std::size()
+// explicit overload to workaround ADL issues with C++17 std::size()
template
arma_warn_unused
inline
@@ -60,7 +60,7 @@ size(const Mat& X)
-// explicit overload to workround ADL issues with C++17 std::size()
+// explicit overload to workaround ADL issues with C++17 std::size()
template
arma_warn_unused
inline
@@ -74,7 +74,7 @@ size(const Row& X)
-// explicit overload to workround ADL issues with C++17 std::size()
+// explicit overload to workaround ADL issues with C++17 std::size()
template
arma_warn_unused
inline
@@ -130,7 +130,7 @@ size(const BaseCube& X)
-// explicit overload to workround ADL issues with C++17 std::size()
+// explicit overload to workaround ADL issues with C++17 std::size()
template
arma_warn_unused
inline
@@ -174,7 +174,7 @@ size(const SpBase& X)
-// explicit overload to workround ADL issues with C++17 std::size()
+// explicit overload to workaround ADL issues with C++17 std::size()
template
arma_warn_unused
inline
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_diag_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_diag_meat.hpp
index 0e1e8d904..50c750348 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_diag_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_diag_meat.hpp
@@ -2235,7 +2235,7 @@ gmm_diag::km_iterate(const Mat& X, const uword max_iter, const bool verb
}
else
{
- // recover by using a randomly seleced sample (last resort)
+ // recover by using a randomly selected sample (last resort)
proposed_i = as_scalar(randi(1, distr_param(0,X_n_cols-1)));
}
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_full_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_full_meat.hpp
index d20292ca4..0cb23481c 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_full_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/gmm_full_meat.hpp
@@ -2264,7 +2264,7 @@ gmm_full::km_iterate(const Mat& X, const uword max_iter, const bool verb
}
else
{
- // recover by using a randomly seleced sample (last resort)
+ // recover by using a randomly selected sample (last resort)
proposed_i = as_scalar(randi(1, distr_param(0,X_n_cols-1)));
}
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/include_superlu.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/include_superlu.hpp
index 577fc165f..2bfa6395a 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/include_superlu.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/include_superlu.hpp
@@ -55,7 +55,7 @@
#if defined(ARMA_USE_SUPERLU_HEADERS) || defined(ARMA_SUPERLU_INCLUDE_DIR)
-// Since we need to suport float, double, cx_float and cx_double,
+// Since we need to support float, double, cx_float and cx_double,
// as well as preserve the sanity of the user,
// we cannot simply include all the SuperLU headers due to their messy state
// (duplicate definitions, pollution of global namespace, bizarro defines).
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/op_fft_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/op_fft_meat.hpp
index ed69f2ea1..72e177fc6 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/op_fft_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/op_fft_meat.hpp
@@ -96,7 +96,7 @@ op_fft_real::apply( Mat< std::complex >& out, const mtOp<
}
else
{
- // process each column seperately
+ // process each column separately
out.set_size(N_user, n_cols);
@@ -220,7 +220,7 @@ op_fft_cx::apply_noalias(Mat& out, const Proxy& P, c
}
else
{
- // process each column seperately
+ // process each column separately
out.set_size(N_user, n_cols);
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_bones.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_bones.hpp
index c7dcd384a..0ba00d9ef 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_bones.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_bones.hpp
@@ -18,7 +18,7 @@
//! @{
-//! wrapper for accesing external functions in ARPACK and SuperLU
+//! wrapper for accessing external functions in ARPACK and SuperLU
class sp_auxlib
{
public:
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_meat.hpp
index 1667f5862..053ed870e 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/sp_auxlib_meat.hpp
@@ -1473,7 +1473,7 @@ sp_auxlib::spsolve_refine(Mat& X, typename T1::pod_type&
if(dn == nullptr) { return false; }
dn->lda = A.n_rows;
- dn->nzval = (void*) A.memptr(); // re-use memory instead of copying
+ dn->nzval = (void*) A.memptr(); // reuse memory instead of copying
out.nrow = A.n_rows;
out.ncol = A.n_cols;
@@ -1505,7 +1505,7 @@ sp_auxlib::spsolve_refine(Mat& X, typename T1::pod_type&
{
// superlu::destroy_dense_mat(&out);
- // since dn->nzval is set to re-use memory from a Mat object (which manages its own memory),
+ // since dn->nzval is set to reuse memory from a Mat object (which manages its own memory),
// we cannot simply call superlu::destroy_dense_mat().
// Only the out.Store structure can be freed.
@@ -1579,11 +1579,11 @@ sp_auxlib::run_aupd
resid.set_size(n);
- // Two contraints on NCV: (NCV > NEV) for sym problems or
+ // Two constraints on NCV: (NCV > NEV) for sym problems or
// (NCV > NEV + 2) for gen problems and (NCV <= N)
//
// We're calling either arpack::saupd() or arpack::naupd(),
- // which have slighly different minimum constraint and recommended value for NCV:
+ // which have slightly different minimum constraint and recommended value for NCV:
// http://www.caam.rice.edu/software/ARPACK/UG/node136.html
// http://www.caam.rice.edu/software/ARPACK/UG/node138.html
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/spdiagview_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/spdiagview_meat.hpp
index 4071cb00a..9c03fd2a5 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/spdiagview_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/spdiagview_meat.hpp
@@ -727,7 +727,7 @@ spdiagview::extract(Mat& out, const spdiagview& in)
arma_extra_debug_sigprint();
// NOTE: we're assuming that the 'out' matrix has already been set to the correct size;
- // size setting is done by either the Mat contructor or Mat::operator=()
+ // size setting is done by either the Mat constructor or Mat::operator=()
const SpMat& in_m = in.m;
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_cube_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_cube_meat.hpp
index 99d391b1f..11e33fdc0 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_cube_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_cube_meat.hpp
@@ -1676,7 +1676,7 @@ subview_cube::extract(Cube& out, const subview_cube& in)
arma_extra_debug_sigprint();
// NOTE: we're assuming that the cube has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Cube contructor or operator=()
+ // size setting and alias checking is done by either the Cube constructor or operator=()
const uword n_rows = in.n_rows;
const uword n_cols = in.n_cols;
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_meat.hpp b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_meat.hpp
index d11117664..9fee80560 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_meat.hpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/lib_armadillo/armadillo-10.1.0/include/armadillo_bits/subview_meat.hpp
@@ -1429,7 +1429,7 @@ subview::extract(Mat& out, const subview& in)
arma_extra_debug_sigprint();
// NOTE: we're assuming that the matrix has already been set to the correct size and there is no aliasing;
- // size setting and alias checking is done by either the Mat contructor or operator=()
+ // size setting and alias checking is done by either the Mat constructor or operator=()
const uword n_rows = in.n_rows; // number of rows in the subview
const uword n_cols = in.n_cols; // number of columns in the subview
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/main.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/main.cpp
index a9c2e80b0..76bea33ba 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/main.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/main.cpp
@@ -91,7 +91,7 @@ int main(int argc, char** argv) {
#endif
if ((argc != 6) && (procno == 0)) {
- std::string errMsg = "Number of the additional commend line arguments is " + std::to_string(argc - 1) +
+ std::string errMsg = "Number of the additional command line arguments is " + std::to_string(argc - 1) +
", but 5 is required. The arguments should always include the working directory / input file name / workflow driver name / os type / run type";
std::cerr << errMsg << std::endl;
theErrorFile.abort();
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.cpp
index 48d2e5303..8f0451e54 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.cpp
@@ -39,7 +39,7 @@ UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
* @author Sang-ri Yi
* @date 8/2021
* @section DESCRIPTION
- * Noraml distribution class
+ * Normal distribution class
*/
#include "normalDist.h"
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.h b/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.h
index 5369d3ea5..6c891a4e6 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.h
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/normalDist.h
@@ -41,7 +41,7 @@ UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
* @author Sang-ri Yi
* @date 8/2021
* @section DESCRIPTION
- * Noraml distribution class
+ * Normal distribution class
*/
#include "RVDist.h"
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.cpp
index 05811f2f2..819cfa38f 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.cpp
@@ -39,7 +39,7 @@ UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
* @author Sang-ri Yi
* @date 8/2021
* @section DESCRIPTION
- * Calcualtes the moments of QoIs and writes the results at dakotaTab.out
+ * Calculates the moments of QoIs and writes the results at dakotaTab.out
*/
#include "runForward.h"
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.h b/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.h
index df85113e5..846634b30 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.h
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/runForward.h
@@ -41,7 +41,7 @@ UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
* @author Sang-ri Yi
* @date 8/2021
* @section DESCRIPTION
- * Calcualtes the moments of QoIs and writes the results at dakotaTab.out
+ * Calculates the moments of QoIs and writes the results at dakotaTab.out
*/
#include
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/runGSA.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/runGSA.cpp
index 0f821c151..b182a804d 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/runGSA.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/runGSA.cpp
@@ -592,6 +592,7 @@ void runGSA::runSingleCombGSA(vector> gmat, int Ko, vector c
const int endm = comb.size(); // (nx+ng)-1
const int endx = endm - 1; // (nx)-1
+ //no need for gsa
if (endm == 0)
{
if (Opt == 'T')
@@ -676,8 +677,18 @@ void runGSA::runSingleCombGSA(vector> gmat, int Ko, vector c
}
while (1) {
- status = model.learn(data, Kos, maha_dist, static_subset, 1000, 1000, V * 1.e-12, false);// max kmeans iter = 100, max EM iter = 200, convergence variance = V*1.e-15
- logL = model.sum_log_p(data);
+
+ try
+ {
+ status = model.learn(data, Kos, maha_dist, static_subset, 1000, 1000, V * 1.e-12, false);// max kmeans iter = 100, max EM iter = 200, convergence variance = V*1.e-15
+ logL = model.sum_log_p(data);
+ }
+ catch (std::exception& e)
+ {
+ std::string errMsg = "GSA engine failed to fit a Gaussian Mixture model. Check if your input and output random variables are continuous. If so, a larger number of samples is desired.";
+ theErrorFile.write(errMsg);
+ }
+
if ((logL < oldLogL) || (Kos >= Kthres)) {
break;
}
@@ -796,7 +807,7 @@ void runGSA::runSingleCombGSA(vector> gmat, int Ko, vector c
}
if (performPCA) {
- std::cout << " - Converting PCA sobol incides (" << nqoi_red << ") to orginal domain values (" << nqoi << ")...\n";
+ std::cout << " - Converting PCA sobol incides (" << nqoi_red << ") to original domain values (" << nqoi << ")...\n";
auto readStart = std::chrono::high_resolution_clock::now();
@@ -1182,7 +1193,7 @@ void runGSA::runPCA(vector> gmat, vector>& gmat_re
vec Lvece;
eig_sym(Lvece, U_matrix, gramMat, "dc"); // gramMat = U*L*U^T
//std::cout << "lambda is " << Lvece << std::endl;
- svec = sqrt(reverse(Lvece)); // becasue eigenvalues are ascending order
+ svec = sqrt(reverse(Lvece)); // because eigenvalues are ascending order
U_matrix = fliplr((U_matrix)); // because eigenvalues are ascending order
svec.replace(datum::nan, min(svec));
@@ -1678,4 +1689,4 @@ void runGSA::writeTabOutputs(jsonInput inp, int procno)
bool runGSA::isInteger(double a) {
double b = round(a), epsilon = 1e-9; //some small range of error
return (a <= b + epsilon && a >= b - epsilon);
-}
\ No newline at end of file
+}
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/runMFMC.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/runMFMC.cpp
index dceb30278..59f8af3f4 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/runMFMC.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/runMFMC.cpp
@@ -670,7 +670,7 @@ double CB = (CB_init - time_passed); //seconds
speedUp_list.push_back(speedUpNum/speedUpDenom);
//
- // Find the id of the most conservative QoI - that simulates HF largest amout
+ // Find the id of the most conservative QoI - that simulates HF largest amount
//
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/MRF_4Story_Concentrated_model.tcl b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/MRF_4Story_Concentrated_model.tcl
index 0b19d369c..e13540d3c 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/MRF_4Story_Concentrated_model.tcl
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/MRF_4Story_Concentrated_model.tcl
@@ -246,7 +246,7 @@ proc rotSpring2DModIKModel {eleID nodeR nodeC K asPos asNeg MyPos MyNeg LS LK LA
equalDOF 15 45 $dof1; # Floor 5: Pier 1 to Pier 4
equalDOF 15 55 $dof1; # Floor 5: Pier 1 to Pier 5
-# assign boundary condidtions
+# assign boundary conditions
# command: fix nodeID dxFixity dyFixity rzFixity
# fixity values: 1 = constrained; 0 = unconstrained
# fix the base of the building; pin P-delta column at base
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.py
index 75f19f8fd..78115d96c 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.py
@@ -1,53 +1,52 @@
-#!/usr/bin/python
+#!/usr/bin/python # noqa: CPY001, D100, EXE001
# written: fmk, adamzs 01/18
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import sys
-def process_results(inputArgs):
+def process_results(inputArgs): # noqa: N803, D103
#
# process output file "node.out" for nodal displacements
#
- with open ('node.out', 'rt') as inFile:
+ with open('node.out') as inFile: # noqa: N806, PLW1514, PTH123
line = inFile.readline()
line = inFile.readline()
line = inFile.readline()
displ = line.split()
- numNode = len(displ)
+ numNode = len(displ) # noqa: N806
- inFile.close
+ inFile.close # noqa: B018
# now process the input args and write the results file
- outFile = open('results.out','w')
+ outFile = open('results.out', 'w') # noqa: N806, PLW1514, PTH123, SIM115
# note for now assuming no ERROR in user data
for i in inputArgs:
+ theList = i.split('_') # noqa: N806
- theList=i.split('_')
-
- if (len(theList) == 4):
+ if len(theList) == 4: # noqa: PLR2004
dof = int(theList[3])
else:
dof = 1
-
- if (theList[0] == "Node"):
- nodeTag = int(theList[1])
- if (nodeTag > 0 and nodeTag <= numNode):
- if (theList[2] == "Disp"):
- nodeDisp = abs(float(displ[((nodeTag-1)*2)+dof-1]))
+ if theList[0] == 'Node':
+ nodeTag = int(theList[1]) # noqa: N806
+
+ if nodeTag > 0 and nodeTag <= numNode:
+ if theList[2] == 'Disp':
+ nodeDisp = abs(float(displ[((nodeTag - 1) * 2) + dof - 1])) # noqa: N806
outFile.write(str(nodeDisp))
outFile.write(' ')
else:
@@ -57,13 +56,13 @@ def process_results(inputArgs):
else:
outFile.write('0. ')
- outFile.close
+ outFile.close # noqa: B018
-if __name__ == "__main__":
+if __name__ == '__main__':
n = len(sys.argv)
responses = []
- for i in range(1,n):
- responses.append(sys.argv[i])
+ for i in range(1, n):
+ responses.append(sys.argv[i]) # noqa: PERF401
process_results(responses)
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.tcl b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.tcl
index 644557015..520dc90bd 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.tcl
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/postprocess.tcl
@@ -9,7 +9,7 @@ puts $maxDisplacement
set resultFile [open results.out w]
set results []
-# for each quanity in list of QoI passed
+# for each quantity in list of QoI passed
# - get nodeTag
# - get nodal displacement if valid node, output 0.0 if not
# - for valid node output displacement, note if dof not provided output 1'st dof
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/writeParam.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/writeParam.py
index 315a2ec33..2cbf2c9a6 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/writeParam.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/EE_Test1/templatedir/writeParam.py
@@ -1,34 +1,33 @@
+import os # noqa: CPY001, D100, INP001
import sys
-import os
-from subprocess import Popen, PIPE
-import subprocess
-def main():
- paramsIn = sys.argv[1]
- paramsOut = sys.argv[2]
- if not os.path.isfile(paramsIn):
- print('Input param file {} does not exist. Exiting...'.format(paramsIn))
- sys.exit()
+def main(): # noqa: D103
+ paramsIn = sys.argv[1] # noqa: N806
+ paramsOut = sys.argv[2] # noqa: N806
- outFILE = open(paramsOut, 'w')
+ if not os.path.isfile(paramsIn): # noqa: PTH113
+ print(f'Input param file {paramsIn} does not exist. Exiting...') # noqa: T201
+ sys.exit()
- with open(paramsIn) as inFILE:
+ outFILE = open(paramsOut, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ with open(paramsIn) as inFILE: # noqa: N806, PLW1514, PTH123
line = inFILE.readline()
- splitLine = line.split()
- numRV = int(splitLine[3])
+ splitLine = line.split() # noqa: N806
+ numRV = int(splitLine[3]) # noqa: N806
print(numRV, file=outFILE)
- for i in range(numRV):
+ for i in range(numRV): # noqa: B007
line = inFILE.readline()
- splitLine = line.split()
- nameRV = splitLine[1]
- valueRV = splitLine[3]
- print('{} {}'.format(nameRV, valueRV), file=outFILE)
+ splitLine = line.split() # noqa: N806
+ nameRV = splitLine[1] # noqa: N806
+ valueRV = splitLine[3] # noqa: N806
+ print(f'{nameRV} {valueRV}', file=outFILE)
+
+ outFILE.close # noqa: B018
+ inFILE.close # noqa: B018
- outFILE.close
- inFILE.close
if __name__ == '__main__':
- main()
+ main()
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.py
index bc2d53968..edc392fc4 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.py
@@ -1,51 +1,50 @@
-#!/usr/bin/python
+#!/usr/bin/python # noqa: CPY001, D100, EXE001
# written: fmk, adamzs 01/18
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import sys
-def process_results(inputArgs):
+def process_results(inputArgs): # noqa: N803, D103
#
# process output file "node.out" for nodal displacements
#
- with open ('node.out', 'rt') as inFile:
+ with open('node.out') as inFile: # noqa: N806, PLW1514, PTH123
line = inFile.readline()
displ = line.split()
- numNode = len(displ)
+ numNode = len(displ) # noqa: N806
- inFile.close
+ inFile.close # noqa: B018
# now process the input args and write the results file
- outFile = open('results.out','w')
+ outFile = open('results.out', 'w') # noqa: N806, PLW1514, PTH123, SIM115
# note for now assuming no ERROR in user data
for i in inputArgs:
+ theList = i.split('_') # noqa: N806
- theList=i.split('_')
-
- if (len(theList) == 4):
+ if len(theList) == 4: # noqa: PLR2004
dof = int(theList[3])
else:
dof = 1
-
- if (theList[0] == "Node"):
- nodeTag = int(theList[1])
- if (nodeTag > 0 and nodeTag <= numNode):
- if (theList[2] == "Disp"):
- nodeDisp = abs(float(displ[((nodeTag-1)*2)+dof-1]))
+ if theList[0] == 'Node':
+ nodeTag = int(theList[1]) # noqa: N806
+
+ if nodeTag > 0 and nodeTag <= numNode:
+ if theList[2] == 'Disp':
+ nodeDisp = abs(float(displ[((nodeTag - 1) * 2) + dof - 1])) # noqa: N806
outFile.write(str(nodeDisp))
outFile.write(' ')
else:
@@ -55,13 +54,13 @@ def process_results(inputArgs):
else:
outFile.write('0. ')
- outFile.close
+ outFile.close # noqa: B018
-if __name__ == "__main__":
+if __name__ == '__main__':
n = len(sys.argv)
responses = []
- for i in range(1,n):
- responses.append(sys.argv[i])
+ for i in range(1, n):
+ responses.append(sys.argv[i]) # noqa: PERF401
process_results(responses)
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.tcl b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.tcl
index 0c6069cce..729ff4097 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.tcl
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test1/templatedir/TrussPost.tcl
@@ -5,7 +5,7 @@ set results []
# get list of valid nodeTags
set nodeTags [getNodeTags]
-# for each quanity in list of QoI passed
+# for each quantity in list of QoI passed
# - get nodeTag
# - get nodal displacement if valid node, output 0.0 if not
# - for valid node output displacement, note if dof not provided output 1'st dof
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussModel.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussModel.py
index c3c3e9b8e..74cbfbefd 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussModel.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussModel.py
@@ -1,95 +1,86 @@
-#!/usr/bin/python
+#!/usr/bin/python # noqa: CPY001, D100, EXE001
# written: fmk, adamzs 06/20
# units kN & mm
import sys
+
import openseespy.opensees as ops
ops.wipe()
-from TrussParams import *
+from TrussParams import * # noqa: E402, F403
-def run_analysis():
- # build the model
+def run_analysis(): # noqa: D103
+ # build the model
- ops.model('basic', '-ndm', 2, '-ndf', 2)
+ ops.model('basic', '-ndm', 2, '-ndf', 2)
- ops.node(1, 0, 0)
- ops.node(2, 4000, 0)
- ops.node(3, 8000, 0)
- ops.node(4, 12000, 0)
- ops.node(5, 4000, 4000)
- ops.node(6, 8000, 4000)
+ ops.node(1, 0, 0)
+ ops.node(2, 4000, 0)
+ ops.node(3, 8000, 0)
+ ops.node(4, 12000, 0)
+ ops.node(5, 4000, 4000)
+ ops.node(6, 8000, 4000)
- ops.fix(1, 1, 1)
- ops.fix(4, 0, 1)
+ ops.fix(1, 1, 1)
+ ops.fix(4, 0, 1)
- ops.uniaxialMaterial('Elastic', 1, E)
+ ops.uniaxialMaterial('Elastic', 1, E) # noqa: F405
- ops.element('truss', 1, 1, 2, Ao, 1)
- ops.element('truss', 2, 2, 3, Ao, 1)
- ops.element('truss', 3, 3, 4, Ao, 1)
- ops.element('truss', 4, 1, 5, Au, 1)
- ops.element('truss', 5, 5, 6, Au, 1)
- ops.element('truss', 6, 6, 4, Au, 1)
- ops.element('truss', 7, 2, 5, Ao, 1)
- ops.element('truss', 8, 3, 6, Ao, 1)
- ops.element('truss', 9, 5, 3, Ao, 1)
+ ops.element('truss', 1, 1, 2, Ao, 1) # noqa: F405
+ ops.element('truss', 2, 2, 3, Ao, 1) # noqa: F405
+ ops.element('truss', 3, 3, 4, Ao, 1) # noqa: F405
+ ops.element('truss', 4, 1, 5, Au, 1) # noqa: F405
+ ops.element('truss', 5, 5, 6, Au, 1) # noqa: F405
+ ops.element('truss', 6, 6, 4, Au, 1) # noqa: F405
+ ops.element('truss', 7, 2, 5, Ao, 1) # noqa: F405
+ ops.element('truss', 8, 3, 6, Ao, 1) # noqa: F405
+ ops.element('truss', 9, 5, 3, Ao, 1) # noqa: F405
- ops.timeSeries('Linear', 1)
- ops.pattern('Plain', 1, 1)
+ ops.timeSeries('Linear', 1)
+ ops.pattern('Plain', 1, 1)
- ops.load(2, 0, -P)
- ops.load(3, 0, -P)
+ ops.load(2, 0, -P) # noqa: F405
+ ops.load(3, 0, -P) # noqa: F405
- # build and perform the analysis
+ # build and perform the analysis
- ops.algorithm('Linear')
- ops.integrator('LoadControl', 1.0)
- ops.system('ProfileSPD')
- ops.numberer('RCM')
- ops.constraints('Plain')
- ops.analysis('Static')
- ops.analyze(1)
+ ops.algorithm('Linear')
+ ops.integrator('LoadControl', 1.0)
+ ops.system('ProfileSPD')
+ ops.numberer('RCM')
+ ops.constraints('Plain')
+ ops.analysis('Static')
+ ops.analyze(1)
- node_disp = [[ops.nodeDisp(node_i, dof_j)
- for dof_j in [1,2]] for node_i in range(1, 7)]
+ node_disp = [
+ [ops.nodeDisp(node_i, dof_j) for dof_j in [1, 2]] for node_i in range(1, 7)
+ ]
- return node_disp
+ return node_disp # noqa: RET504
-def process_results(responses, node_disp):
- # identify the responses of interest
- nodes = [int(r.split('_')[1]) for r in responses]
- dofs = [int(r.split('_')[3]) if len(r.split('_'))>2 else 1
- for r in responses]
+def process_results(responses, node_disp): # noqa: D103
+ # identify the responses of interest
+ nodes = [int(r.split('_')[1]) for r in responses]
+ dofs = [int(r.split('_')[3]) if len(r.split('_')) > 2 else 1 for r in responses] # noqa: PLR2004
- # get the results
- results = []
- for n_i, d_i in zip(nodes, dofs):
- try:
- results.append(str(node_disp[n_i-1][d_i-1]))
- except:
- results.append('0.0')
+ # get the results
+ results = []
+ for n_i, d_i in zip(nodes, dofs):
+ try:
+ results.append(str(node_disp[n_i - 1][d_i - 1]))
+ except: # noqa: PERF203, E722
+ results.append('0.0')
- # save the results
- with open('results.out', 'w') as f:
- f.write(' '.join(results))
+ # save the results
+ with open('results.out', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write(' '.join(results))
-if __name__ == "__main__":
+if __name__ == '__main__':
node_disp = run_analysis()
process_results(sys.argv[1:], node_disp)
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussParams.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussParams.py
index 9be59d1b1..980e794f7 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussParams.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test2/templatedir/TrussParams.py
@@ -1,6 +1,6 @@
-# set some parameters
+# set some parameters # noqa: CPY001, D100, INP001
-E = "RV.E"
-P = "RV.P"
-Ao = "RV.Ao"
-Au = "RV.Au"
+E = 'RV.E'
+P = 'RV.P'
+Ao = 'RV.Ao'
+Au = 'RV.Au'
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.py b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.py
index bc2d53968..edc392fc4 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.py
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.py
@@ -1,51 +1,50 @@
-#!/usr/bin/python
+#!/usr/bin/python # noqa: CPY001, D100, EXE001
# written: fmk, adamzs 01/18
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import sys
-def process_results(inputArgs):
+def process_results(inputArgs): # noqa: N803, D103
#
# process output file "node.out" for nodal displacements
#
- with open ('node.out', 'rt') as inFile:
+ with open('node.out') as inFile: # noqa: N806, PLW1514, PTH123
line = inFile.readline()
displ = line.split()
- numNode = len(displ)
+ numNode = len(displ) # noqa: N806
- inFile.close
+ inFile.close # noqa: B018
# now process the input args and write the results file
- outFile = open('results.out','w')
+ outFile = open('results.out', 'w') # noqa: N806, PLW1514, PTH123, SIM115
# note for now assuming no ERROR in user data
for i in inputArgs:
+ theList = i.split('_') # noqa: N806
- theList=i.split('_')
-
- if (len(theList) == 4):
+ if len(theList) == 4: # noqa: PLR2004
dof = int(theList[3])
else:
dof = 1
-
- if (theList[0] == "Node"):
- nodeTag = int(theList[1])
- if (nodeTag > 0 and nodeTag <= numNode):
- if (theList[2] == "Disp"):
- nodeDisp = abs(float(displ[((nodeTag-1)*2)+dof-1]))
+ if theList[0] == 'Node':
+ nodeTag = int(theList[1]) # noqa: N806
+
+ if nodeTag > 0 and nodeTag <= numNode:
+ if theList[2] == 'Disp':
+ nodeDisp = abs(float(displ[((nodeTag - 1) * 2) + dof - 1])) # noqa: N806
outFile.write(str(nodeDisp))
outFile.write(' ')
else:
@@ -55,13 +54,13 @@ def process_results(inputArgs):
else:
outFile.write('0. ')
- outFile.close
+ outFile.close # noqa: B018
-if __name__ == "__main__":
+if __name__ == '__main__':
n = len(sys.argv)
responses = []
- for i in range(1,n):
- responses.append(sys.argv[i])
+ for i in range(1, n):
+ responses.append(sys.argv[i]) # noqa: PERF401
process_results(responses)
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.tcl b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.tcl
index 0c6069cce..729ff4097 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.tcl
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test3/templatedir/TrussPost.tcl
@@ -5,7 +5,7 @@ set results []
# get list of valid nodeTags
set nodeTags [getNodeTags]
-# for each quanity in list of QoI passed
+# for each quantity in list of QoI passed
# - get nodeTag
# - get nodal displacement if valid node, output 0.0 if not
# - for valid node output displacement, note if dof not provided output 1'st dof
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test4/templatedir/TrussPost.tcl b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test4/templatedir/TrussPost.tcl
index 0c6069cce..729ff4097 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test4/templatedir/TrussPost.tcl
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/test/Examples/Test4/templatedir/TrussPost.tcl
@@ -5,7 +5,7 @@ set results []
# get list of valid nodeTags
set nodeTags [getNodeTags]
-# for each quanity in list of QoI passed
+# for each quantity in list of QoI passed
# - get nodeTag
# - get nodal displacement if valid node, output 0.0 if not
# - for valid node output displacement, note if dof not provided output 1'st dof
diff --git a/modules/performUQ/SimCenterUQ/nataf_gsa/weibullDist.cpp b/modules/performUQ/SimCenterUQ/nataf_gsa/weibullDist.cpp
index 05a551230..546c59488 100644
--- a/modules/performUQ/SimCenterUQ/nataf_gsa/weibullDist.cpp
+++ b/modules/performUQ/SimCenterUQ/nataf_gsa/weibullDist.cpp
@@ -51,7 +51,7 @@ UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#include
// bet is lamb, an, - follow ERA notation
// alp is k
-// boost recieves in order (k,an)
+// boost receives in order (k,an)
using std::vector;
@@ -276,4 +276,4 @@ double paramWeibObj(unsigned n, const double* x, double* grad, void* my_func_dat
}
return abs(std::sqrt(tgamma(1 + 2 / x[0]) - (tgamma(1 + 1 / x[0]))* tgamma(1 + 1 / x[0])) / tgamma(1 + 1 / x[0]) - val[1] / val[0]);
-}
\ No newline at end of file
+}
diff --git a/modules/performUQ/SimCenterUQ/notBeingUsed/SimCenterUQFEM.py b/modules/performUQ/SimCenterUQ/notBeingUsed/SimCenterUQFEM.py
index 2f2c231cc..6c232e67e 100644
--- a/modules/performUQ/SimCenterUQ/notBeingUsed/SimCenterUQFEM.py
+++ b/modules/performUQ/SimCenterUQ/notBeingUsed/SimCenterUQFEM.py
@@ -1,91 +1,104 @@
-# import functions for Python 2.X support
-from __future__ import division, print_function
-import sys, os
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+# import functions for Python 2.X support # noqa: CPY001, D100, INP001
+import os
+import sys
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
-import json
-import numpy as np
+import argparse
import platform
import shutil
-import subprocess
import stat
-import argparse
+import subprocess # noqa: S404
+
from preprocessJSON import preProcessDakota
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--workflowInput')
- parser.add_argument('--workflowOutput')
+ parser.add_argument('--workflowOutput')
parser.add_argument('--driverFile')
parser.add_argument('--runType')
- parser.add_argument('--filesWithRV',nargs='*')
- parser.add_argument('--filesWithEDP',nargs='*')
- parser.add_argument('--workdir')
+ parser.add_argument('--filesWithRV', nargs='*')
+ parser.add_argument('--filesWithEDP', nargs='*')
+ parser.add_argument('--workdir')
- args,unknowns = parser.parse_known_args()
+ args, unknowns = parser.parse_known_args() # noqa: F841
- inputFile = args.workflowInput
- runType = args.runType
- workflow_driver = args.driverFile
- outputFile = args.workflowOutput
- rvFiles = args.filesWithRV
- edpFiles = args.filesWithEDP
+ inputFile = args.workflowInput # noqa: N806, F841
+ runType = args.runType # noqa: N806, F841
+ workflow_driver = args.driverFile # noqa: F841
+ outputFile = args.workflowOutput # noqa: N806, F841
+ rvFiles = args.filesWithRV # noqa: N806, F841
+ edpFiles = args.filesWithEDP # noqa: N806, F841
- myScriptDir = os.path.dirname(os.path.realpath(__file__))
+ myScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
# desktop applications
- if uqData['samples'] is None: # this happens with new applications, workflow to change
-
- print("RUNNING PREPROCESSOR\n")
- osType = platform.system()
- preprocessorCommand = '"{}/preprocessDakota" {} {} {} {} {} {} {} {}'.format(myScriptDir, bimName, samName, evtName, edpName, simName, driverFile, runDakota, osType)
- subprocess.Popen(preprocessorCommand, shell=True).wait()
- print("DONE RUNNING PREPROCESSOR\n")
+ if (
+ uqData['samples'] is None # noqa: F821
+ ): # this happens with new applications, workflow to change
+ print('RUNNING PREPROCESSOR\n') # noqa: T201
+ osType = platform.system() # noqa: N806
+ preprocessorCommand = f'"{myScriptDir}/preprocessDakota" {bimName} {samName} {evtName} {edpName} {simName} {driverFile} {runDakota} {osType}' # noqa: N806, F821
+ subprocess.Popen(preprocessorCommand, shell=True).wait() # noqa: S602
+ print('DONE RUNNING PREPROCESSOR\n') # noqa: T201
else:
-
- scriptDir = os.path.dirname(os.path.realpath(__file__))
- numRVs = preProcessDakota(bimName, evtName, samName, edpName, simName, driverFile, runDakota, uqData)
-
- shutil.move(bimName, "bim.j")
- shutil.move(evtName, "evt.j")
- if os.path.isfile(samName): shutil.move(samName, "sam.j")
- shutil.move(edpName, "edp.j")
-
- #Setting Workflow Driver Name
- workflowDriverName = 'workflow_driver'
- if ((platform.system() == 'Windows') and (runDakota == 'run')):
- workflowDriverName = 'workflow_driver.bat'
-
- #Change permision of workflow driver
- st = os.stat(workflowDriverName)
- os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC)
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806, F841
+ numRVs = preProcessDakota( # noqa: N806, F841
+ bimName, # noqa: F821
+ evtName, # noqa: F821
+ samName, # noqa: F821
+ edpName, # noqa: F821
+ simName, # noqa: F821
+ driverFile, # noqa: F821
+ runDakota, # noqa: F821
+ uqData, # noqa: F821
+ )
+
+ shutil.move(bimName, 'bim.j') # noqa: F821
+ shutil.move(evtName, 'evt.j') # noqa: F821
+ if os.path.isfile(samName): # noqa: PTH113, F821
+ shutil.move(samName, 'sam.j') # noqa: F821
+ shutil.move(edpName, 'edp.j') # noqa: F821
+
+ # Setting Workflow Driver Name
+ workflowDriverName = 'workflow_driver' # noqa: N806
+ if (platform.system() == 'Windows') and (runDakota == 'run'): # noqa: F821
+ workflowDriverName = 'workflow_driver.bat' # noqa: N806
+
+ # Change permission of workflow driver
+ st = os.stat(workflowDriverName) # noqa: PTH116
+ os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC) # noqa: PTH101
# copy the dakota input file to the main working dir for the structure
- shutil.move("dakota.in", "../")
+ shutil.move('dakota.in', '../')
# change dir to the main working dir for the structure
- os.chdir("../")
+ os.chdir('../')
- if runDakota == "run":
-
- dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
- print('running Dakota: ', dakotaCommand)
+ if runDakota == 'run': # noqa: F821
+ dakotaCommand = ( # noqa: N806
+ 'dakota -input dakota.in -output dakota.out -error dakota.err'
+ )
+ print('running Dakota: ', dakotaCommand) # noqa: T201
try:
- result = subprocess.check_output(dakotaCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ dakotaCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
except subprocess.CalledProcessError as e:
- result = e.output
- returncode = e.returncode
+ result = e.output # noqa: F841
+ returncode = e.returncode # noqa: F841
-if __name__ == '__main__':
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/performUQ/SimCenterUQ/notBeingUsed/parseSimCenterUQ.py b/modules/performUQ/SimCenterUQ/notBeingUsed/parseSimCenterUQ.py
index fec98482c..6584d5cd6 100644
--- a/modules/performUQ/SimCenterUQ/notBeingUsed/parseSimCenterUQ.py
+++ b/modules/performUQ/SimCenterUQ/notBeingUsed/parseSimCenterUQ.py
@@ -1,152 +1,153 @@
-# written: UQ team @ SimCenter
+# written: UQ team @ SimCenter # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-import shutil
import json
import os
+import platform
import stat
+import subprocess # noqa: S404
import sys
-import platform
-from subprocess import Popen, PIPE
-import subprocess
-
-inputArgs = sys.argv
+inputArgs = sys.argv # noqa: N816
workdir_main = inputArgs[1]
workdir_temp = inputArgs[2]
run_type = inputArgs[3]
-# Replace the PATH TO strings with the path to the given executable in your
-# computer. The 'darwin' part corresponds to Mac, the 'else' clause corresponds
-# to Windows. You only need the path to either Feap or OpenSees depending on
-# which one you plan to use for the analysis.
+# Replace the PATH TO strings with the path to the given executable in your
+# computer. The 'darwin' part corresponds to Mac, the 'else' clause corresponds
+# to Windows. You only need the path to either Feap or OpenSees depending on
+# which one you plan to use for the analysis.
# run on local computer
-if run_type in ['runningLocal',]:
+if run_type == 'runningLocal':
# MAC
- if (sys.platform == 'darwin'):
+ if sys.platform == 'darwin':
OpenSees = 'OpenSees'
- surrogate = 'surrogateBuild.py'
- natafExe = 'nataf_gsa'
+ surrogate = 'surrogateBuild.py'
+ natafExe = 'nataf_gsa' # noqa: N816
Feap = 'feappv'
Dakota = 'dakota'
- plomScript = 'runPLoM.py'
+ plomScript = 'runPLoM.py' # noqa: N816
workflow_driver = 'workflow_driver'
- osType = 'Darwin'
+ osType = 'Darwin' # noqa: N816
# Windows
else:
OpenSees = 'OpenSees'
Feap = 'Feappv41.exe'
- surrogate = 'surrogateBuild.py'
- natafExe = 'nataf_gsa.exe'
+ surrogate = 'surrogateBuild.py'
+ natafExe = 'nataf_gsa.exe' # noqa: N816
Dakota = 'dakota'
- plomScript = 'runPLoM.py'
+ plomScript = 'runPLoM.py' # noqa: N816
workflow_driver = 'workflow_driver.bat'
- osType = 'Windows'
+ osType = 'Windows' # noqa: N816
# Stampede @ DesignSafe, DON'T EDIT
-elif run_type in ['runningRemote',]:
+elif run_type == 'runningRemote':
OpenSees = '/home1/00477/tg457427/bin/OpenSees'
Feap = '/home1/00477/tg457427/bin/feappv'
Dakota = 'dakota'
workflow_driver = 'workflow_driver'
- osType = 'Linux'
+ osType = 'Linux' # noqa: N816
# change workdir to the templatedir
os.chdir(workdir_temp)
-cwd = os.getcwd()
+cwd = os.getcwd() # noqa: PTH109
-print(cwd)
+print(cwd) # noqa: T201
# open the dakota json file
-with open('dakota.json') as data_file:
+with open('dakota.json') as data_file: # noqa: PLW1514, PTH123
data = json.load(data_file)
-uq_data = data["UQ_Method"]
-fem_data = data["fem"]
-rnd_data = data["randomVariables"]
-my_edps = data["EDP"]
+uq_data = data['UQ_Method']
+fem_data = data['fem']
+rnd_data = data['randomVariables']
+my_edps = data['EDP']
-myScriptDir = os.path.dirname(os.path.realpath(__file__))
-inputFile = "dakota.json"
+myScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
+inputFile = 'dakota.json' # noqa: N816
-osType = platform.system()
-#preprocessorCommand = '"{}/preprocessSimCenterUQ" {} {} {} {}'.format(myScriptDir, inputFile, workflow_driver, run_type, osType)
-#subprocess.Popen(preprocessorCommand, shell=True).wait()
-#print("DONE RUNNING PREPROCESSOR\n")
+osType = platform.system() # noqa: N816
+# preprocessorCommand = '"{}/preprocessSimCenterUQ" {} {} {} {}'.format(myScriptDir, inputFile, workflow_driver, run_type, osType)
+# subprocess.Popen(preprocessorCommand, shell=True).wait()
+# print("DONE RUNNING PREPROCESSOR\n")
# edps = samplingData["edps"]
-numResponses = 0
-responseDescriptors=[]
+numResponses = 0 # noqa: N816
+responseDescriptors = [] # noqa: N816
for edp in my_edps:
- responseDescriptors.append(edp["name"])
- numResponses += 1
+ responseDescriptors.append(edp['name'])
+ numResponses += 1 # noqa: SIM113, N816
-femProgram = fem_data["program"]
-print(femProgram)
+femProgram = fem_data['program'] # noqa: N816
+print(femProgram) # noqa: T201
-if run_type in ['runningLocal']:
- os.chmod(workflow_driver, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
+if run_type == 'runningLocal':
+ os.chmod(workflow_driver, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH) # noqa: PTH101
-#command = Dakota + ' -input dakota.in -output dakota.out -error dakota.err'
+# command = Dakota + ' -input dakota.in -output dakota.out -error dakota.err'
-#Change permision of workflow driver
-st = os.stat(workflow_driver)
-os.chmod(workflow_driver, st.st_mode | stat.S_IEXEC)
+# Change permission of workflow driver
+st = os.stat(workflow_driver) # noqa: PTH116
+os.chmod(workflow_driver, st.st_mode | stat.S_IEXEC) # noqa: PTH101
# change dir to the main working dir for the structure
-os.chdir("../")
+os.chdir('../')
-cwd = os.getcwd()
-print(cwd)
+cwd = os.getcwd() # noqa: PTH109
+print(cwd) # noqa: T201
-if run_type in ['runningLocal']:
+if run_type == 'runningLocal':
+ # p = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
+ # for line in p.stdout:
+ # print(str(line))
-# p = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
-# for line in p.stdout:
-# print(str(line))
+ # dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
-# dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
-
- '''
+ """
LATER, CHANGE THE LOCATION
- '''
- #
+ """
if uq_data['uqType'] == 'Train GP Surrogate Model':
# simCenterUQCommand = 'python "{}/{}" {} {} {}'.format(myScriptDir,surrogate,workdir_main,osType,run_type)
- simCenterUQCommand = '"{}" "{}/{}" "{}" {} {}'.format(data['python'],myScriptDir,surrogate,workdir_main,osType,run_type)
- elif uq_data['uqType'] == 'Sensitivity Analysis':
- simCenterUQCommand = '"{}/{}" "{}" {} {}'.format(myScriptDir,natafExe,workdir_main,osType,run_type)
- elif uq_data['uqType'] == 'Forward Propagation':
- simCenterUQCommand = '"{}/{}" "{}" {} {}'.format(myScriptDir,natafExe,workdir_main,osType,run_type)
+ simCenterUQCommand = '"{}" "{}/{}" "{}" {} {}'.format( # noqa: N816
+ data['python'], myScriptDir, surrogate, workdir_main, osType, run_type
+ )
+ elif (
+ uq_data['uqType'] == 'Sensitivity Analysis'
+ or uq_data['uqType'] == 'Forward Propagation'
+ ):
+ simCenterUQCommand = ( # noqa: N816
+ f'"{myScriptDir}/{natafExe}" "{workdir_main}" {osType} {run_type}'
+ )
elif uq_data['uqType'] == 'Train PLoM Model':
- simCenterUQCommand = '"{}" "{}/{}" "{}" {} {}'.format(data['python'],myScriptDir,plomScript,workdir_main,osType,run_type)
-
+ simCenterUQCommand = '"{}" "{}/{}" "{}" {} {}'.format( # noqa: N816
+ data['python'], myScriptDir, plomScript, workdir_main, osType, run_type
+ )
-
- print('running SimCenterUQ: ', simCenterUQCommand)
+ print('running SimCenterUQ: ', simCenterUQCommand) # noqa: T201
# subprocess.Popen(simCenterUQCommand, shell=True).wait()
-
+
try:
- result = subprocess.check_output(simCenterUQCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ simCenterUQCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
- print('DONE SUCESS')
+ print('DONE SUCESS') # noqa: T201
except subprocess.CalledProcessError as e:
result = e.output
returncode = e.returncode
- print('DONE FAIL')
-
+ print('DONE FAIL') # noqa: T201
diff --git a/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild.py b/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild.py
index 221af28e6..f995fc0d0 100644
--- a/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild.py
+++ b/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild.py
@@ -1,31 +1,38 @@
-import time
-import shutil
+import glob # noqa: CPY001, D100, INP001
+import json
+import math
import os
+import pickle # noqa: S403
+import random
+import shutil
+import subprocess # noqa: S404
import sys
-import subprocess
-import math
-import pickle
-import glob
-import json
-from scipy.stats import lognorm, norm
-import numpy as np
-import GPy as GPy
-
-from copy import deepcopy
-from pyDOE import lhs
+import time
import warnings
-import random
-
-from multiprocessing import Pool
+from copy import deepcopy
import emukit.multi_fidelity as emf
+import GPy as GPy # noqa: PLC0414
+import numpy as np
from emukit.model_wrappers.gpy_model_wrappers import GPyMultiOutputWrapper
-from emukit.multi_fidelity.convert_lists_to_array import convert_x_list_to_array, convert_xy_lists_to_arrays
-
-class GpFromModel(object):
+from emukit.multi_fidelity.convert_lists_to_array import (
+ convert_x_list_to_array,
+)
+from pyDOE import lhs
+from scipy.stats import lognorm, norm
- def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp, errlog):
+class GpFromModel: # noqa: D101
+ def __init__( # noqa: C901, PLR0912, PLR0914, PLR0915
+ self,
+ work_dir,
+ inputFile, # noqa: N803
+ workflowDriver, # noqa: N803
+ run_type,
+ os_type,
+ inp,
+ errlog,
+ ):
t_init = time.time()
self.errlog = errlog
self.work_dir = work_dir
@@ -38,12 +45,12 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
# From external READ JSON FILE
#
- rv_name = list()
- self.g_name = list()
+ rv_name = list() # noqa: C408
+ self.g_name = list() # noqa: C408
x_dim = 0
y_dim = 0
for rv in inp['randomVariables']:
- rv_name = rv_name + [rv['name']]
+ rv_name = rv_name + [rv['name']] # noqa: PLR6104, RUF005
x_dim += 1
if x_dim == 0:
@@ -51,12 +58,12 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
for g in inp['EDP']:
- if g['length']==1: # scalar
- self.g_name = self.g_name + [g['name']]
+ if g['length'] == 1: # scalar
+ self.g_name = self.g_name + [g['name']] # noqa: PLR6104, RUF005
y_dim += 1
- else: # vector
+ else: # vector
for nl in range(g['length']):
- self.g_name = self.g_name + ["{}_{}".format(g['name'],nl+1)]
+ self.g_name = self.g_name + ['{}_{}'.format(g['name'], nl + 1)] # noqa: PLR6104, RUF005
y_dim += 1
if y_dim == 0:
@@ -72,87 +79,92 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
self.do_predictive = False
automate_doe = False
- surrogateInfo = inp["UQ_Method"]["surrogateMethodInfo"]
+ surrogateInfo = inp['UQ_Method']['surrogateMethodInfo'] # noqa: N806
try:
- self.do_parallel = surrogateInfo["parallelExecution"]
- except:
+ self.do_parallel = surrogateInfo['parallelExecution']
+ except: # noqa: E722
self.do_parallel = True
if self.do_parallel:
if self.run_type.lower() == 'runninglocal':
self.n_processor = os.cpu_count()
- from multiprocessing import Pool
+ from multiprocessing import Pool # noqa: PLC0415
+
self.pool = Pool(2)
else:
# Always
- from mpi4py import MPI
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py import MPI # noqa: PLC0415
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
+
self.world = MPI.COMM_WORLD
self.pool = MPIPoolExecutor()
self.n_processor = self.world.Get_size()
- #self.n_processor =20
- print("nprocessor :")
- print(self.n_processor)
- #self.cal_interval = 5
+ # self.n_processor =20
+ print('nprocessor :') # noqa: T201
+ print(self.n_processor) # noqa: T201
+ # self.cal_interval = 5
self.cal_interval = self.n_processor
else:
self.pool = 0
self.cal_interval = 5
- if surrogateInfo["method"] == "Sampling and Simulation":
+ if surrogateInfo['method'] == 'Sampling and Simulation':
self.do_mf = False
do_sampling = True
do_simulation = True
- self.use_existing = surrogateInfo["existingDoE"]
+ self.use_existing = surrogateInfo['existingDoE']
if self.use_existing:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile.in")
+ self.inpData = os.path.join(work_dir, 'templatedir/inpFile.in') # noqa: PTH118
+ self.outData = os.path.join(work_dir, 'templatedir/outFile.in') # noqa: PTH118
thr_count = surrogateInfo['samples'] # number of samples
-
- if surrogateInfo["advancedOpt"]:
- self.doe_method = surrogateInfo["DoEmethod"]
- if surrogateInfo["DoEmethod"] == "None":
+ if surrogateInfo['advancedOpt']:
+ self.doe_method = surrogateInfo['DoEmethod']
+ if surrogateInfo['DoEmethod'] == 'None':
do_doe = False
user_init = thr_count
else:
do_doe = True
- user_init = surrogateInfo["initialDoE"]
+ user_init = surrogateInfo['initialDoE']
else:
- self.doe_method = "pareto" #default
+ self.doe_method = 'pareto' # default
do_doe = True
user_init = -100
- elif surrogateInfo["method"] == "Import Data File":
+ elif surrogateInfo['method'] == 'Import Data File':
self.do_mf = False
do_sampling = False
- do_simulation = not surrogateInfo["outputData"]
- self.doe_method = "None" # default
+ do_simulation = not surrogateInfo['outputData']
+ self.doe_method = 'None' # default
do_doe = False
# self.inpData = surrogateInfo['inpFile']
- self.inpData = os.path.join(work_dir, "templatedir/inpFile.in")
+ self.inpData = os.path.join(work_dir, 'templatedir/inpFile.in') # noqa: PTH118
if not do_simulation:
# self.outData = surrogateInfo['outFile']
- self.outData = os.path.join(work_dir, "templatedir/outFile.in")
+ self.outData = os.path.join(work_dir, 'templatedir/outFile.in') # noqa: PTH118
- elif surrogateInfo["method"] == "Import Multi-fidelity Data File":
+ elif surrogateInfo['method'] == 'Import Multi-fidelity Data File':
self.do_mf = True
- self.doe_method = "None" # default
+ self.doe_method = 'None' # default
self.hf_is_model = surrogateInfo['HFfromModel']
self.lf_is_model = surrogateInfo['LFfromModel']
- if self. hf_is_model:
- self.use_existing_hf = surrogateInfo["existingDoE_HF"]
- self.samples_hf = surrogateInfo["samples_HF"]
+ if self.hf_is_model:
+ self.use_existing_hf = surrogateInfo['existingDoE_HF']
+ self.samples_hf = surrogateInfo['samples_HF']
if self.use_existing_hf:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile_HF.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile_HF.in")
+ self.inpData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/inpFile_HF.in'
+ )
+ self.outData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/outFile_HF.in'
+ )
else:
- self.inpData_hf = os.path.join(work_dir, "templatedir/inpFile_HF.in")
- self.outData_hf = os.path.join(work_dir, "templatedir/outFile_HF.in")
+ self.inpData_hf = os.path.join(work_dir, 'templatedir/inpFile_HF.in') # noqa: PTH118
+ self.outData_hf = os.path.join(work_dir, 'templatedir/outFile_HF.in') # noqa: PTH118
self.X_hf = read_txt(self.inpData_hf, errlog)
self.Y_hf = read_txt(self.outData_hf, errlog)
if self.X_hf.shape[0] != self.Y_hf.shape[0]:
@@ -160,14 +172,18 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
if self.lf_is_model:
- self.use_existing_lf = surrogateInfo["existingDoE_LF"]
- self.samples_lf = surrogateInfo["samples_LF"]
+ self.use_existing_lf = surrogateInfo['existingDoE_LF']
+ self.samples_lf = surrogateInfo['samples_LF']
if self.use_existing_lf:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile_LF.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile_LF.in")
+ self.inpData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/inpFile_LF.in'
+ )
+ self.outData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/outFile_LF.in'
+ )
else:
- self.inpData_lf = os.path.join(work_dir, "templatedir/inpFile_LF.in")
- self.outData_lf = os.path.join(work_dir, "templatedir/outFile_LF.in")
+ self.inpData_lf = os.path.join(work_dir, 'templatedir/inpFile_LF.in') # noqa: PTH118
+ self.outData_lf = os.path.join(work_dir, 'templatedir/outFile_LF.in') # noqa: PTH118
self.X_lf = read_txt(self.inpData_lf, errlog)
self.Y_lf = read_txt(self.outData_lf, errlog)
if self.X_lf.shape[0] != self.Y_lf.shape[0]:
@@ -175,10 +191,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
if (not self.hf_is_model) and self.lf_is_model:
- self.mf_case = "data-model"
+ self.mf_case = 'data-model'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
self.use_existing = self.use_existing_lf
if self.lf_is_model:
if self.use_existing_lf:
@@ -194,10 +210,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
thr_count = self.samples_lf # number of samples
elif self.hf_is_model and (not self.lf_is_model):
- self.mf_case = "model-data"
+ self.mf_case = 'model-data'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
self.use_existing = self.use_existing_hf
if self.hf_is_model:
if self.use_existing_hf:
@@ -213,13 +229,13 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
thr_count = self.samples_hf # number of samples
elif self.hf_is_model and self.lf_is_model:
- self.mf_case = "model-model"
+ self.mf_case = 'model-model'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
elif (not self.hf_is_model) and (not self.lf_is_model):
- self.mf_case = "data-data"
+ self.mf_case = 'data-data'
do_sampling = False
do_simulation = False
do_doe = False
@@ -230,38 +246,43 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
msg = 'Error reading json: either select "Import Data File" or "Sampling and Simulation"'
errlog.exit(msg)
- if surrogateInfo["advancedOpt"]:
- self.do_logtransform = surrogateInfo["logTransform"]
- kernel = surrogateInfo["kernel"]
- do_linear = surrogateInfo["linear"]
- nugget_opt = surrogateInfo["nuggetOpt"]
+ if surrogateInfo['advancedOpt']:
+ self.do_logtransform = surrogateInfo['logTransform']
+ kernel = surrogateInfo['kernel']
+ do_linear = surrogateInfo['linear']
+ nugget_opt = surrogateInfo['nuggetOpt']
try:
- self.nuggetVal = np.array(json.loads("[{}]".format(surrogateInfo["nuggetString"])))
+ self.nuggetVal = np.array(
+ json.loads('[{}]'.format(surrogateInfo['nuggetString']))
+ )
except json.decoder.JSONDecodeError:
msg = 'Error reading json: improper format of nugget values/bounds. Provide nugget values/bounds of each QoI with comma delimiter'
errlog.exit(msg)
- if self.nuggetVal.shape[0]!=self.y_dim and self.nuggetVal.shape[0]!=0 :
- msg = 'Error reading json: Number of nugget quantities ({}) does not match # QoIs ({})'.format(self.nuggetVal.shape[0],self.y_dim)
+ if (
+ self.nuggetVal.shape[0] != self.y_dim
+ and self.nuggetVal.shape[0] != 0
+ ):
+ msg = f'Error reading json: Number of nugget quantities ({self.nuggetVal.shape[0]}) does not match # QoIs ({self.y_dim})'
errlog.exit(msg)
- if nugget_opt == "Fixed Values":
- for Vals in self.nuggetVal:
- if (not np.isscalar(Vals)):
+ if nugget_opt == 'Fixed Values':
+ for Vals in self.nuggetVal: # noqa: N806
+ if not np.isscalar(Vals):
msg = 'Error reading json: provide nugget values of each QoI with comma delimiter'
errlog.exit(msg)
- elif nugget_opt == "Fixed Bounds":
- for Bous in self.nuggetVal:
- if (np.isscalar(Bous)):
+ elif nugget_opt == 'Fixed Bounds':
+ for Bous in self.nuggetVal: # noqa: N806
+ if np.isscalar(Bous):
msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
errlog.exit(msg)
- elif (isinstance(Bous,list)):
+ elif isinstance(Bous, list):
msg = 'Error reading json: provide both lower and upper bounds of nugget'
errlog.exit(msg)
- elif Bous.shape[0]!=2:
+ elif Bous.shape[0] != 2: # noqa: PLR2004
msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
errlog.exit(msg)
- elif Bous[0]>Bous[1]:
+ elif Bous[0] > Bous[1]:
msg = 'Error reading json: the lower bound of a nugget value should be smaller than its upper bound'
errlog.exit(msg)
@@ -277,10 +298,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
self.do_logtransform = False
kernel = 'Matern 5/2'
do_linear = False
- #do_nugget = True
- nugget_opt = "optimize"
+ # do_nugget = True
+ nugget_opt = 'optimize'
- #if not self.do_mf:
+ # if not self.do_mf:
# if do_simulation:
# femInfo = inp["fem"]
# self.inpFile = femInfo["inputFile"]
@@ -292,17 +313,19 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
#
if do_sampling:
- thr_NRMSE = surrogateInfo["accuracyLimit"]
- thr_t = surrogateInfo["timeLimit"] * 60
+ thr_NRMSE = surrogateInfo['accuracyLimit'] # noqa: N806
+ thr_t = surrogateInfo['timeLimit'] * 60
np.random.seed(surrogateInfo['seed'])
random.seed(surrogateInfo['seed'])
self.xrange = np.empty((0, 2), float)
for rv in inp['randomVariables']:
- if "lowerbound" not in rv:
+ if 'lowerbound' not in rv:
msg = 'Error in input RV: all RV should be set to Uniform distribution'
errlog.exit(msg)
- self.xrange = np.vstack((self.xrange, [rv['lowerbound'], rv['upperbound']]))
+ self.xrange = np.vstack(
+ (self.xrange, [rv['lowerbound'], rv['upperbound']])
+ )
self.len = np.abs(np.diff(self.xrange).T[0])
if sum(self.len == 0) > 0:
@@ -314,71 +337,89 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
#
if self.use_existing:
- X_tmp = read_txt(self.inpData,errlog)
- Y_tmp = read_txt(self.outData,errlog)
+ X_tmp = read_txt(self.inpData, errlog) # noqa: N806
+ Y_tmp = read_txt(self.outData, errlog) # noqa: N806
n_ex = X_tmp.shape[0]
if self.do_mf:
if X_tmp.shape[1] != self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} RV column(s) but low fidelity model have {}.'.format(
- self.X_hf.shape[1], X_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.X_hf.shape[1]} RV column(s) but low fidelity model have {X_tmp.shape[1]}.'
errlog.exit(msg)
if Y_tmp.shape[1] != self.Y_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} QoI column(s) but low fidelity model have {}.'.format(
- self.Y_hf.shape[1], Y_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.Y_hf.shape[1]} QoI column(s) but low fidelity model have {Y_tmp.shape[1]}.'
errlog.exit(msg)
if X_tmp.shape[1] != x_dim:
- msg = 'Error importing input data: dimension inconsistent: have {} RV(s) but have {} column(s).'.format(
- x_dim, X_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: have {x_dim} RV(s) but have {X_tmp.shape[1]} column(s).'
errlog.exit(msg)
if Y_tmp.shape[1] != y_dim:
- msg = 'Error importing input data: dimension inconsistent: have {} QoI(s) but have {} column(s).'.format(
- y_dim, Y_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: have {y_dim} QoI(s) but have {Y_tmp.shape[1]} column(s).'
errlog.exit(msg)
if n_ex != Y_tmp.shape[0]:
- msg = 'Error importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent'.format(n_ex, Y_tmp.shape[0])
+ msg = f'Error importing input data: numbers of samples of inputs ({n_ex}) and outputs ({Y_tmp.shape[0]}) are inconsistent'
errlog.exit(msg)
else:
n_ex = 0
- if user_init ==0:
- #msg = 'Error reading json: # of initial DoE should be greater than 0'
- #errlog.exit(msg)
- user_init = -1;
- X_tmp = np.zeros((0, x_dim))
- Y_tmp = np.zeros((0, y_dim))
+ if user_init == 0:
+ # msg = 'Error reading json: # of initial DoE should be greater than 0'
+ # errlog.exit(msg)
+ user_init = -1
+ X_tmp = np.zeros((0, x_dim)) # noqa: N806
+ Y_tmp = np.zeros((0, y_dim)) # noqa: N806
if user_init < 0:
n_init_ref = min(4 * x_dim, thr_count + n_ex - 1, 500)
if self.do_parallel:
- n_init_ref = int(np.ceil(n_init_ref/self.n_processor)*self.n_processor) # Let's not waste resource
+ n_init_ref = int(
+ np.ceil(n_init_ref / self.n_processor) * self.n_processor
+ ) # Let's not waste resource
if n_init_ref > n_ex:
n_init = n_init_ref - n_ex
else:
n_init = 0
-
+
else:
n_init = user_init
n_iter = thr_count - n_init
- def FEM_batch(Xs, id_sim):
- return run_FEM_batch(Xs, id_sim, self.rv_name, self.do_parallel, self.y_dim, self.os_type, self.run_type, self.pool, t_init, thr_t, self.workflowDriver)
+ def FEM_batch(Xs, id_sim): # noqa: N802, N803
+ return run_FEM_batch(
+ Xs,
+ id_sim,
+ self.rv_name,
+ self.do_parallel,
+ self.y_dim,
+ self.os_type,
+ self.run_type,
+ self.pool,
+ t_init,
+ thr_t,
+ self.workflowDriver,
+ )
# check validity of datafile
if n_ex > 0:
- #Y_test, self.id_sim = FEM_batch(X_tmp[0, :][np.newaxis], self.id_sim)
- # TODO : Fix this
- print(X_tmp[0, :][np.newaxis].shape)
- X_test, Y_test ,self.id_sim= FEM_batch(X_tmp[0, :][np.newaxis] ,self.id_sim)
- if np.sum(abs((Y_test - Y_tmp[0, :][np.newaxis]) / Y_test) > 0.01, axis=1) > 0:
+ # Y_test, self.id_sim = FEM_batch(X_tmp[0, :][np.newaxis], self.id_sim)
+ # TODO : Fix this # noqa: TD002
+ print(X_tmp[0, :][np.newaxis].shape) # noqa: T201
+ X_test, Y_test, self.id_sim = FEM_batch( # noqa: F841, N806
+ X_tmp[0, :][np.newaxis], self.id_sim
+ )
+ if (
+ np.sum(
+ abs((Y_test - Y_tmp[0, :][np.newaxis]) / Y_test) > 0.01, # noqa: PLR2004
+ axis=1,
+ )
+ > 0
+ ):
msg = 'Consistency check failed. Your data is not consistent to your model response.'
errlog.exit(msg)
- if n_init>0:
+ if n_init > 0:
n_init -= 1
else:
n_iter -= 1
@@ -387,41 +428,41 @@ def FEM_batch(Xs, id_sim):
# generate initial samples
#
- if n_init>0:
- U = lhs(x_dim, samples=(n_init))
- X = np.vstack([X_tmp, np.zeros((n_init, x_dim))])
+ if n_init > 0:
+ U = lhs(x_dim, samples=(n_init)) # noqa: N806
+ X = np.vstack([X_tmp, np.zeros((n_init, x_dim))]) # noqa: N806
for nx in range(x_dim):
- X[n_ex:n_ex+n_init, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ X[n_ex : n_ex + n_init, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
else:
- X = X_tmp
+ X = X_tmp # noqa: N806
- if sum(abs(self.len / self.xrange[:, 0]) < 1.e-7) > 1:
+ if sum(abs(self.len / self.xrange[:, 0]) < 1.0e-7) > 1: # noqa: PLR2004
msg = 'Error : upperbound and lowerbound should not be the same'
errlog.exit(msg)
n_iter = thr_count - n_init
else:
-
n_ex = 0
- thr_NRMSE = 0.02 # default
+ thr_NRMSE = 0.02 # default # noqa: N806
thr_t = float('inf')
#
# Read sample locations from directory
#
- X = read_txt(self.inpData,errlog)
+ X = read_txt(self.inpData, errlog) # noqa: N806
if self.do_mf:
if X.shape[1] != self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} RV column(s) but low fidelity model have {}.'.format(
- self.X_hf.shape[1], X.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.X_hf.shape[1]} RV column(s) but low fidelity model have {X.shape[1]}.'
errlog.exit(msg)
if X.shape[1] != x_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} RV(s) but {} column(s).' \
- .format(x_dim, X.shape[1])
+ msg = f'Error importing input data: Number of dimension inconsistent: have {x_dim} RV(s) but {X.shape[1]} column(s).'
errlog.exit(msg)
self.xrange = np.vstack([np.min(X, axis=0), np.max(X, axis=0)]).T
@@ -431,14 +472,12 @@ def FEM_batch(Xs, id_sim):
n_init = thr_count
n_iter = 0
-
# give error
- if thr_count <= 2:
+ if thr_count <= 2: # noqa: PLR2004
msg = 'Number of samples should be greater than 2.'
errlog.exit(msg)
-
if do_doe:
ac = 1 # pre-screening time = time*ac
ar = 1 # cluster
@@ -450,8 +489,8 @@ def FEM_batch(Xs, id_sim):
else:
ac = 1 # pre-screening time = time*ac
ar = 1 # cluster
- n_candi = 1 # candidate points
- n_integ = 1 # integration points
+ n_candi = 1 # candidate points
+ n_integ = 1 # integration points
user_init = thr_count
#
@@ -462,51 +501,55 @@ def FEM_batch(Xs, id_sim):
#
# SimCenter workflow setting
#
- if os.path.exists('{}/workdir.1'.format(work_dir)):
+ if os.path.exists(f'{work_dir}/workdir.1'): # noqa: PTH110
is_left = True
idx = 0
def change_permissions_recursive(path, mode):
- for root, dirs, files in os.walk(path, topdown=False):
- for dir in [os.path.join(root, d) for d in dirs]:
- os.chmod(dir, mode)
- for file in [os.path.join(root, f) for f in files]:
- os.chmod(file, mode)
+ for root, dirs, files in os.walk(path, topdown=False): # noqa: B007
+ for dir in [os.path.join(root, d) for d in dirs]: # noqa: A001, PTH118
+ os.chmod(dir, mode) # noqa: PTH101
+ for file in [os.path.join(root, f) for f in files]: # noqa: PTH118
+ os.chmod(file, mode) # noqa: PTH101
while is_left:
- idx = idx + 1
+ idx = idx + 1 # noqa: PLR6104
try:
- if os.path.exists('{}/workdir.{}/{}'.format(work_dir, idx, workflowDriver)):
- #os.chmod('{}/workdir.{}'.format(work_dir, idx), 777)
- change_permissions_recursive('{}/workdir.{}'.format(work_dir, idx), 0o777)
- my_dir = '{}/workdir.{}'.format(work_dir, idx)
- os.chmod(my_dir, 0o777)
+ if os.path.exists( # noqa: PTH110
+ f'{work_dir}/workdir.{idx}/{workflowDriver}'
+ ):
+ # os.chmod('{}/workdir.{}'.format(work_dir, idx), 777)
+ change_permissions_recursive(
+ f'{work_dir}/workdir.{idx}', 0o777
+ )
+ my_dir = f'{work_dir}/workdir.{idx}'
+ os.chmod(my_dir, 0o777) # noqa: S103, PTH101
shutil.rmtree(my_dir)
- #shutil.rmtree('{}/workdir.{}'.format(work_dir, idx), ignore_errors=False, onerror=handleRemoveReadonly)
+ # shutil.rmtree('{}/workdir.{}'.format(work_dir, idx), ignore_errors=False, onerror=handleRemoveReadonly)
- except Exception as ex:
- print(ex)
+ except Exception as ex: # noqa: BLE001
+ print(ex) # noqa: T201
is_left = True
break
- print("Cleaned the working directory")
+ print('Cleaned the working directory') # noqa: T201
else:
- print("Work directory is clean")
+ print('Work directory is clean') # noqa: T201
- if os.path.exists('{}/dakotaTab.out'.format(work_dir)):
- os.remove('{}/dakotaTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/dakotaTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/dakotaTab.out') # noqa: PTH107
- if os.path.exists('{}/inputTab.out'.format(work_dir)):
- os.remove('{}/inputTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/inputTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/inputTab.out') # noqa: PTH107
- if os.path.exists('{}/outputTab.out'.format(work_dir)):
- os.remove('{}/outputTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/outputTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/outputTab.out') # noqa: PTH107
- if os.path.exists('{}/SimGpModel.pkl'.format(work_dir)):
- os.remove('{}/SimGpModel.pkl'.format(work_dir))
+ if os.path.exists(f'{work_dir}/SimGpModel.pkl'): # noqa: PTH110
+ os.remove(f'{work_dir}/SimGpModel.pkl') # noqa: PTH107
- if os.path.exists('{}/verif.out'.format(work_dir)):
- os.remove('{}/verif.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/verif.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/verif.out') # noqa: PTH107
# func = self.__run_FEM(X,self.id_sim, self.rv_name)
@@ -515,58 +558,58 @@ def change_permissions_recursive(path, mode):
#
t_tmp = time.time()
- X_fem, Y_fem ,self.id_sim= FEM_batch(X[n_ex:, :],self.id_sim)
- Y = np.vstack((Y_tmp,Y_fem))
- X = np.vstack((X[0:n_ex, :],X_fem))
+ X_fem, Y_fem, self.id_sim = FEM_batch(X[n_ex:, :], self.id_sim) # noqa: N806
+ Y = np.vstack((Y_tmp, Y_fem)) # noqa: N806
+ X = np.vstack((X[0:n_ex, :], X_fem)) # noqa: N806
t_sim_all = time.time() - t_tmp
if automate_doe:
self.t_sim_each = t_sim_all / n_init
else:
- self.t_sim_each = float("inf")
+ self.t_sim_each = float('inf')
#
# Generate predictive samples
#
if self.do_predictive:
n_pred = 100
- Xt = np.zeros((n_pred, x_dim))
- U = lhs(x_dim, samples=n_pred)
+ Xt = np.zeros((n_pred, x_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_pred) # noqa: N806
for nx in range(x_dim):
- Xt[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ Xt[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
#
# Yt = np.zeros((n_pred, y_dim))
# for ns in range(n_pred):
# Yt[ns, :],self.id_sim = run_FEM(Xt[ns, :][np.newaxis],self.id_sim, self.rv_name)
- Yt = np.zeros((n_pred, y_dim))
- Xt, Yt ,self.id_sim= FEM_batch(Xt,self.id_sim)
+ Yt = np.zeros((n_pred, y_dim)) # noqa: N806
+ Xt, Yt, self.id_sim = FEM_batch(Xt, self.id_sim) # noqa: N806
else:
-
#
# READ SAMPLES FROM DIRECTORY
#
- Y = read_txt(self.outData,errlog)
+ Y = read_txt(self.outData, errlog) # noqa: N806
if self.do_mf:
if Y.shape[1] != self.Y_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} QoI column(s) but low fidelity model have {}.'.format(
- self.Y_hf.shape[1], Y.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.Y_hf.shape[1]} QoI column(s) but low fidelity model have {Y.shape[1]}.'
errlog.exit(msg)
if Y.shape[1] != y_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} QoI(s) but {} column(s).' \
- .format(y_dim, Y.shape[1])
+ msg = f'Error importing input data: Number of dimension inconsistent: have {y_dim} QoI(s) but {Y.shape[1]} column(s).'
errlog.exit(msg)
if X.shape[0] != Y.shape[0]:
- msg = 'Error importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent'.format(X.shape[0], Y.shape[0])
+ msg = f'Error importing input data: numbers of samples of inputs ({X.shape[0]}) and outputs ({Y.shape[0]}) are inconsistent'
errlog.exit(msg)
- thr_count = 0
- self.t_sim_each = float("inf")
+ thr_count = 0
+ self.t_sim_each = float('inf')
#
# GP function
#
@@ -581,39 +624,61 @@ def change_permissions_recursive(path, mode):
kr = GPy.kern.Matern52(input_dim=x_dim, ARD=True)
if do_linear:
- kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True)
+ kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True) # noqa: PLR6104
if not self.do_mf:
kg = kr
- self.m_list = list()
+ self.m_list = list() # noqa: C408
for i in range(y_dim):
- self.m_list = self.m_list + [GPy.models.GPRegression(X, Y[:, i][np.newaxis].transpose(), kernel=kg.copy(),normalizer=True)]
+ self.m_list = self.m_list + [ # noqa: PLR6104, RUF005
+ GPy.models.GPRegression(
+ X,
+ Y[:, i][np.newaxis].transpose(),
+ kernel=kg.copy(),
+ normalizer=True,
+ )
+ ]
for parname in self.m_list[i].parameter_names():
if parname.endswith('lengthscale'):
- exec('self.m_list[i].' + parname + '=self.len')
+ exec('self.m_list[i].' + parname + '=self.len') # noqa: S102
else:
kgs = emf.kernels.LinearMultiFidelityKernel([kr.copy(), kr.copy()])
if not self.hf_is_model:
- if not X.shape[1]==self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension of low ({}) and high ({}) fidelity models (datasets) are inconsistent'.format(X.shape[1], self.X_hf.shape[1])
+ if X.shape[1] != self.X_hf.shape[1]:
+ msg = f'Error importing input data: dimension of low ({X.shape[1]}) and high ({self.X_hf.shape[1]}) fidelity models (datasets) are inconsistent'
errlog.exit(msg)
if not self.lf_is_model:
- if not X.shape[1]==self.X_lf.shape[1]:
- msg = 'Error importing input data: dimension of low ({}) and high ({}) fidelity models (datasets) are inconsistent'.format(X.shape[1], self.X_hf.shape[1])
+ if X.shape[1] != self.X_lf.shape[1]:
+ msg = f'Error importing input data: dimension of low ({X.shape[1]}) and high ({self.X_hf.shape[1]}) fidelity models (datasets) are inconsistent'
errlog.exit(msg)
- if self.mf_case == 'data-model' or self.mf_case=='data-data':
- X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, self.X_hf], [Y, self.Y_hf])
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ X_list, Y_list = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, self.X_hf], [Y, self.Y_hf]
+ )
+ )
elif self.mf_case == 'model-data':
- X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X], [self.Y_lf, Y])
-
- self.m_list = list()
- for i in range(y_dim):
- self.m_list = self.m_list + [GPyMultiOutputWrapper(emf.models.GPyLinearMultiFidelityModel(X_list, Y_list, kernel=kgs.copy(), n_fidelities=2), 2, n_optimization_restarts=15)]
-
+ X_list, Y_list = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X], [self.Y_lf, Y]
+ )
+ )
+
+ self.m_list = list() # noqa: C408
+ for i in range(y_dim): # noqa: B007
+ self.m_list = self.m_list + [ # noqa: PLR6104, RUF005
+ GPyMultiOutputWrapper(
+ emf.models.GPyLinearMultiFidelityModel(
+ X_list, Y_list, kernel=kgs.copy(), n_fidelities=2
+ ),
+ 2,
+ n_optimization_restarts=15,
+ )
+ ]
#
# Verification measures
@@ -621,7 +686,7 @@ def change_permissions_recursive(path, mode):
self.NRMSE_hist = np.zeros((1, y_dim), float)
self.NRMSE_idx = np.zeros((1, 1), int)
- #leng_hist = np.zeros((1, self.m_list[0]._param_array_.shape[0]), int)
+ # leng_hist = np.zeros((1, self.m_list[0]._param_array_.shape[0]), int)
if self.do_predictive:
self.NRMSE_pred_hist = np.empty((1, y_dim), float)
@@ -631,61 +696,72 @@ def change_permissions_recursive(path, mode):
break_doe = False
- print("======== RUNNING GP DoE ===========")
+ print('======== RUNNING GP DoE ===========') # noqa: T201
exit_code = 'count' # num iter
i = 0
- x_new = np.zeros((0,x_dim))
+ x_new = np.zeros((0, x_dim))
n_new = 0
- doe_off = False # false if true
+ doe_off = False # false if true
while not doe_off:
-
- t = time.time()
- if self.doe_method == "random":
- do_cal = True
- elif self.doe_method == "pareto":
- do_cal = True
- elif np.mod(i, self.cal_interval) == 0:
+ t = time.time() # noqa: F841
+ if (
+ self.doe_method == 'random' # noqa: PLR1714
+ or self.doe_method == 'pareto'
+ or np.mod(i, self.cal_interval) == 0
+ ):
do_cal = True
else:
do_cal = False
t_tmp = time.time()
- [x_new, self.m_list, err, idx, Y_cv, Y_cv_var] = self.__design_of_experiments(X, Y, ac, ar, n_candi,
- n_integ, self.m_list,
- do_cal, nugget_opt, do_doe)
+ [x_new, self.m_list, err, idx, Y_cv, Y_cv_var] = ( # noqa: F841, N806
+ self.__design_of_experiments(
+ X,
+ Y,
+ ac,
+ ar,
+ n_candi,
+ n_integ,
+ self.m_list,
+ do_cal,
+ nugget_opt,
+ do_doe,
+ )
+ )
t_doe = time.time() - t_tmp
- print('DoE Time: {:.2f} s'.format(t_doe))
+ print(f'DoE Time: {t_doe:.2f} s') # noqa: T201
if automate_doe:
if t_doe > self.t_sim_each:
break_doe = True
- print('========>> DOE OFF')
+ print('========>> DOE OFF') # noqa: T201
n_left = n_iter - i
break
if not self.do_mf:
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y)
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf)
- elif self.mf_case == 'model-data' :
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y)
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y) # noqa: N806
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf) # noqa: N806
+ elif self.mf_case == 'model-data':
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y) # noqa: N806
self.NRMSE_hist = np.vstack((self.NRMSE_hist, np.array(NRMSE_val)))
self.NRMSE_idx = np.vstack((self.NRMSE_idx, i))
if self.do_predictive:
- Yt_pred = np.zeros((n_pred, y_dim))
+ Yt_pred = np.zeros((n_pred, y_dim)) # noqa: N806
for ny in range(y_dim):
- y_pred_tmp, dummy = self.__predict(self.m_list[ny],Xt)
+ y_pred_tmp, dummy = self.__predict(self.m_list[ny], Xt) # noqa: F841
Yt_pred[:, ny] = y_pred_tmp.transpose()
if self.do_logtransform:
- Yt_pred = np.exp(Yt_pred)
- NRMSE_pred_val = self.__normalized_mean_sq_error(Yt_pred, Yt)
- self.NRMSE_pred_hist = np.vstack((self.NRMSE_pred_hist, np.array(NRMSE_pred_val)))
+ Yt_pred = np.exp(Yt_pred) # noqa: N806
+ NRMSE_pred_val = self.__normalized_mean_sq_error(Yt_pred, Yt) # noqa: N806
+ self.NRMSE_pred_hist = np.vstack(
+ (self.NRMSE_pred_hist, np.array(NRMSE_pred_val))
+ )
if self.id_sim >= thr_count:
n_iter = i
@@ -715,7 +791,7 @@ def change_permissions_recursive(path, mode):
break
n_new = x_new.shape[0]
- if not (n_new + self.id_sim < n_init + n_iter +1):
+ if not (n_new + self.id_sim < n_init + n_iter + 1):
n_new = n_init + n_iter - self.id_sim
x_new = x_new[0:n_new, :]
@@ -724,25 +800,27 @@ def change_permissions_recursive(path, mode):
# y_new = np.zeros((n_new, y_dim))
# for ny in range(n_new):
# y_new[ny, :],self.id_sim = run_FEM(x_new[ny, :][np.newaxis],self.id_sim, self.rv_name)
- x_new, y_new, self.id_sim = FEM_batch(x_new,self.id_sim)
-
- #print(">> {:.2f} s".format(time.time() - t_init))
- X = np.vstack([X, x_new])
- Y = np.vstack([Y, y_new])
+ x_new, y_new, self.id_sim = FEM_batch(x_new, self.id_sim)
+ # print(">> {:.2f} s".format(time.time() - t_init))
+ X = np.vstack([X, x_new]) # noqa: N806
+ Y = np.vstack([Y, y_new]) # noqa: N806
- print("======== RUNNING GP Calibration ===========")
+ print('======== RUNNING GP Calibration ===========') # noqa: T201
# not used
if break_doe:
- X_tmp = np.zeros((n_left, x_dim))
- Y_tmp = np.zeros((n_left, y_dim))
- U = lhs(x_dim, samples=n_left)
+ X_tmp = np.zeros((n_left, x_dim)) # noqa: N806
+ Y_tmp = np.zeros((n_left, y_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_left) # noqa: N806
for nx in range(x_dim):
# X[:,nx] = np.random.uniform(xrange[nx,0], xrange[nx,1], (1, n_init))
- X_tmp[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ X_tmp[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
- X_tmp, Y_tmp, self.id_sim = FEM_batch(X_tmp,self.id_sim)
+ X_tmp, Y_tmp, self.id_sim = FEM_batch(X_tmp, self.id_sim) # noqa: N806
# for ns in np.arange(n_left):
# Y_tmp[ns, :],self.id_sim = run_FEM(X_tmp[ns, :][np.newaxis],self.id_sim, self.rv_name)
@@ -752,8 +830,8 @@ def change_permissions_recursive(path, mode):
# Y_tmp = Y_tmp[:ns, :]
# break
- X = np.vstack((X, X_tmp))
- Y = np.vstack((Y, Y_tmp))
+ X = np.vstack((X, X_tmp)) # noqa: N806
+ Y = np.vstack((Y, Y_tmp)) # noqa: N806
do_doe = False
# if not do_doe:
@@ -768,8 +846,6 @@ def change_permissions_recursive(path, mode):
# else:
# NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf)
-
-
sim_time = time.time() - t_init
n_samp = Y.shape[0]
@@ -810,18 +886,21 @@ def change_permissions_recursive(path, mode):
# plt.show()
# plt.plot(Y_cv[:,1], Y[:,1], 'x')
# plt.show()
- print('my exit code = {}'.format(exit_code))
- print('1. count = {}'.format(self.id_sim))
- print('2. max(NRMSE) = {}'.format(np.max(NRMSE_val)))
- print('3. time = {:.2f} s'.format(sim_time))
+ print(f'my exit code = {exit_code}') # noqa: T201
+ print(f'1. count = {self.id_sim}') # noqa: T201
+ print(f'2. max(NRMSE) = {np.max(NRMSE_val)}') # noqa: T201
+ print(f'3. time = {sim_time:.2f} s') # noqa: T201
# for user information
if do_simulation:
n_err = 1000
- Xerr = np.zeros((n_err, x_dim))
- U = lhs(x_dim, samples=n_err)
+ Xerr = np.zeros((n_err, x_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_err) # noqa: N806
for nx in range(x_dim):
- Xerr[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ Xerr[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
y_pred_var = np.zeros((n_err, y_dim))
y_data_var = np.zeros((n_err, y_dim))
@@ -830,50 +909,59 @@ def change_permissions_recursive(path, mode):
# m_tmp = self.m_list[ny].copy()
m_tmp = self.m_list[ny]
if self.do_logtransform:
- #y_var_val = np.var(np.log(Y[:, ny]))
+ # y_var_val = np.var(np.log(Y[:, ny]))
log_mean = np.mean(np.log(Y[:, ny]))
log_var = np.var(np.log(Y[:, ny]))
- y_var_val = np.exp(2*log_mean+log_var)*(np.exp(log_var)-1) # in linear space
+ y_var_val = np.exp(2 * log_mean + log_var) * (
+ np.exp(log_var) - 1
+ ) # in linear space
else:
y_var_val = np.var(Y[:, ny])
for ns in range(n_err):
- y_pred_tmp, y_pred_var_tmp = self.__predict(m_tmp,Xerr[ns, :][np.newaxis])
+ y_pred_tmp, y_pred_var_tmp = self.__predict(
+ m_tmp, Xerr[ns, :][np.newaxis]
+ )
if self.do_logtransform:
- y_pred_var[ns, ny] = np.exp(2 * y_pred_tmp + y_pred_var_tmp) * (np.exp(y_pred_var_tmp) - 1)
+ y_pred_var[ns, ny] = np.exp(
+ 2 * y_pred_tmp + y_pred_var_tmp
+ ) * (np.exp(y_pred_var_tmp) - 1)
else:
y_pred_var[ns, ny] = y_pred_var_tmp
y_data_var[ns, ny] = y_var_val
- #for parname in m_tmp.parameter_names():
+ # for parname in m_tmp.parameter_names():
# if ('Mat52' in parname) and parname.endswith('variance'):
# exec('y_pred_prior_var[ns,ny]=m_tmp.' + parname)
- #error_ratio1_Pr = (y_pred_var / y_pred_prior_var)
- error_ratio2_Pr = (y_pred_var / y_data_var)
- #np.max(error_ratio1_Pr, axis=0)
+ # error_ratio1_Pr = (y_pred_var / y_pred_prior_var)
+ error_ratio2_Pr = y_pred_var / y_data_var # noqa: N806
+ # np.max(error_ratio1_Pr, axis=0)
np.max(error_ratio2_Pr, axis=0)
- self.perc_thr = np.hstack([np.array([1]), np.arange(10, 1000, 50), np.array([999])])
+ self.perc_thr = np.hstack(
+ [np.array([1]), np.arange(10, 1000, 50), np.array([999])]
+ )
error_sorted = np.sort(np.max(error_ratio2_Pr, axis=1), axis=0)
self.perc_val = error_sorted[self.perc_thr] # criteria
self.perc_thr = 1 - (self.perc_thr) * 0.001 # ratio=simulation/sampling
corr_val = np.zeros((y_dim,))
- R2_val = np.zeros((y_dim,))
+ R2_val = np.zeros((y_dim,)) # noqa: N806
for ny in range(y_dim):
if not self.do_mf:
- Y_ex = Y[:, ny]
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- Y_ex = self.Y_hf[:, ny]
- elif self.mf_case == 'model-data':
- Y_ex = Y[:, ny]
+ Y_ex = Y[:, ny] # noqa: N806
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ Y_ex = self.Y_hf[:, ny] # noqa: N806
+ elif self.mf_case == 'model-data':
+ Y_ex = Y[:, ny] # noqa: N806
corr_val[ny] = np.corrcoef(Y_ex, Y_cv[:, ny])[0, 1]
- R2_val[ny] = 1 - np.sum(pow(Y_cv[:, ny] - Y_ex, 2)) / np.sum(pow(Y_cv[:, ny] - np.mean(Y_cv[:, ny]), 2))
- if np.var(Y_ex)==0:
+ R2_val[ny] = 1 - np.sum(pow(Y_cv[:, ny] - Y_ex, 2)) / np.sum(
+ pow(Y_cv[:, ny] - np.mean(Y_cv[:, ny]), 2)
+ )
+ if np.var(Y_ex) == 0:
corr_val[ny] = 1
R2_val[ny] = 0
@@ -908,24 +996,24 @@ def change_permissions_recursive(path, mode):
self.rvDist = []
self.rvVal = []
for nx in range(x_dim):
- rvInfo = inp["randomVariables"][nx]
- self.rvName = self.rvName + [rvInfo["name"]]
- self.rvDist = self.rvDist + [rvInfo["distribution"]]
+ rvInfo = inp['randomVariables'][nx] # noqa: N806
+ self.rvName = self.rvName + [rvInfo['name']] # noqa: PLR6104, RUF005
+ self.rvDist = self.rvDist + [rvInfo['distribution']] # noqa: PLR6104, RUF005
if do_sampling:
- self.rvVal = self.rvVal + [(rvInfo["upperbound"] + rvInfo["lowerbound"]) / 2]
+ self.rvVal = self.rvVal + [ # noqa: PLR6104, RUF005
+ (rvInfo['upperbound'] + rvInfo['lowerbound']) / 2
+ ]
else:
- self.rvVal = self.rvVal + [np.mean(X[:, nx])]
-
- def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
+ self.rvVal = self.rvVal + [np.mean(X[:, nx])] # noqa: PLR6104, RUF005
- warnings.filterwarnings("ignore")
+ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt): # noqa: ARG002, C901
+ warnings.filterwarnings('ignore')
t_opt = time.time()
- m_list = list()
+ m_list = list() # noqa: C408
- for ny in range(self.y_dim):
-
- print("y dimension {}:".format(ny))
+ for ny in range(self.y_dim): # noqa: PLR1702
+ print(f'y dimension {ny}:') # noqa: T201
nopt = 10
#
@@ -935,7 +1023,7 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
if not self.do_mf:
if np.var(m_tmp_list[ny].Y) == 0:
- nugget_opt_tmp = "Zero"
+ nugget_opt_tmp = 'Zero'
for parname in m_tmp_list[ny].parameter_names():
if parname.endswith('variance'):
m_tmp_list[ny][parname].constrain_fixed(0)
@@ -943,13 +1031,17 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m_init = m_tmp_list[ny]
m_tmp = m_init
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
m_tmp.optimize(clear_after_finish=True)
@@ -960,8 +1052,8 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m = m_tmp.copy()
id_opt = 1
- print('{} among {} Log-Likelihood: {}'.format(1, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
+ print(f'{1} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}') # noqa: T201
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
if time.time() - t_unfix > self.t_sim_each:
nopt = 1
@@ -972,15 +1064,19 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
for parname in m_tmp.parameter_names():
if parname.endswith('lengthscale'):
- exec('m_tmp.' + parname + '=self.len')
+ exec('m_tmp.' + parname + '=self.len') # noqa: S102
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
m_tmp.optimize(clear_after_finish=True)
@@ -992,8 +1088,8 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m = m_tmp.copy()
id_opt = 1
- print('{} among {} Log-Likelihood: {}'.format(2, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
+ print(f'{2} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}') # noqa: T201
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
if time.time() - t_unfix > self.t_sim_each:
nopt = 1
@@ -1004,29 +1100,45 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
for parname in m_tmp.parameter_names():
if parname.endswith('lengthscale'):
if math.isnan(m.log_likelihood()):
- exec('m_tmp.' + parname + '=np.random.exponential(1, (1, x_dim)) * m_init.' + parname)
+ exec( # noqa: S102
+ 'm_tmp.'
+ + parname
+ + '=np.random.exponential(1, (1, x_dim)) * m_init.'
+ + parname
+ )
else:
- exec('m_tmp.' + parname + '=np.random.exponential(1, (1, x_dim)) * m.' + parname)
-
- if nugget_opt_tmp == "Optimize":
+ exec( # noqa: S102
+ 'm_tmp.'
+ + parname
+ + '=np.random.exponential(1, (1, x_dim)) * m.'
+ + parname
+ )
+
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
- t_fix = time.time()
+ t_fix = time.time() # noqa: F841
try:
m_tmp.optimize()
# m_tmp.optimize_restarts(5)
- except Exception as ex:
- print("OS error: {0}".format(ex))
+ except Exception as ex: # noqa: BLE001
+ print(f'OS error: {ex}') # noqa: T201
- print('{} among {} Log-Likelihood: {}'.format(no + 3, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_fix))
+ print( # noqa: T201
+ f'{no + 3} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}'
+ )
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_fix))
if m_tmp.log_likelihood() > max_log_likli:
max_log_likli = m_tmp.log_likelihood()
@@ -1038,31 +1150,49 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
break
if math.isinf(-max_log_likli) or math.isnan(-max_log_likli):
- #msg = "Error GP optimization failed, perhaps QoI values are zero."
+ # msg = "Error GP optimization failed, perhaps QoI values are zero."
if np.var(m_tmp.Y) != 0:
- msg = "Error GP optimization failed for QoI #{}".format(ny+1)
+ msg = f'Error GP optimization failed for QoI #{ny + 1}'
self.errlog.exit(msg)
- m_list = m_list + [m]
- print(m)
+ m_list = m_list + [m] # noqa: PLR6104, RUF005
+ print(m) # noqa: T201
else:
-
-
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.unfix()
m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(self.nuggetVal[ny])
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(self.nuggetVal[ny])
-
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
-
- elif nugget_opt_tmp == "Zero":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(0)
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(0)
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(
+ self.nuggetVal[ny]
+ )
+
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+
+ elif nugget_opt_tmp == 'Zero':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(0)
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(0)
#
# if not do_nugget:
# m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.fix(0)
@@ -1073,31 +1203,40 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
id_opt = 0
self.calib_time = (time.time() - t_opt) * round(10 / nopt)
- print(' Calibration time: {:.2f} s, id_opt={}'.format(self.calib_time, id_opt))
+ print(f' Calibration time: {self.calib_time:.2f} s, id_opt={id_opt}') # noqa: T201
return m_tmp_list
- def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do_cal, nugget_opt, do_doe):
-
+ def __design_of_experiments( # noqa: C901, PLR0914, PLR0915
+ self,
+ X, # noqa: N803
+ Y, # noqa: N803
+ ac,
+ ar, # noqa: ARG002
+ n_candi,
+ n_integ,
+ pre_m_list,
+ do_cal,
+ nugget_opt,
+ do_doe,
+ ):
# do log transform
if self.do_logtransform:
-
- if np.min(Y)<0:
+ if np.min(Y) < 0:
msg = 'Error running SimCenterUQ. Response contains negative values. Please uncheck the log-transform option in the UQ tab'
- errlog.exit(msg)
- Y = np.log(Y)
+ errlog.exit(msg) # noqa: F821
+ Y = np.log(Y) # noqa: N806
if self.do_mf:
-
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- if np.min(self.Y_hf)<0:
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ if np.min(self.Y_hf) < 0:
msg = 'Error running SimCenterUQ. Response contains negative values. Please uncheck the log-transform option in the UQ tab'
- errlog.exit(msg)
+ errlog.exit(msg) # noqa: F821
self.Y_hf = np.log(self.Y_hf)
elif self.mf_case == 'mode-data':
if np.min(self.Y_lf) < 0:
msg = 'Error running SimCenterUQ. Response contains negative values. Please uncheck the log-transform option in the UQ tab'
- errlog.exit(msg)
+ errlog.exit(msg) # noqa: F821
self.Y_lf = np.log(self.Y_lf)
r = 1 # adaptively
@@ -1111,15 +1250,28 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
if not self.do_mf:
m_tmp_list[i].set_XY(X, Y[:, i][np.newaxis].transpose())
else:
-
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, self.X_hf],
- [Y[:, i][np.newaxis].transpose(), self.Y_hf[:, i][np.newaxis].transpose()])
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, self.X_hf],
+ [
+ Y[:, i][np.newaxis].transpose(),
+ self.Y_hf[:, i][np.newaxis].transpose(),
+ ],
+ )
+ )
elif self.mf_case == 'model-data':
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X],
- [self.Y_lf[:, i][np.newaxis].transpose(),Y[:, i][np.newaxis].transpose()])
-
- m_tmp_list[i].set_data(X=X_list_tmp,Y=Y_list_tmp)
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X],
+ [
+ self.Y_lf[:, i][np.newaxis].transpose(),
+ Y[:, i][np.newaxis].transpose(),
+ ],
+ )
+ )
+
+ m_tmp_list[i].set_data(X=X_list_tmp, Y=Y_list_tmp)
if do_cal:
m_list = self.__parameter_calibration(m_tmp_list, x_dim, nugget_opt)
@@ -1129,23 +1281,21 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
#
# cross validation errors
#
- Y_pred, Y_pred_var, e2 = self.__get_cross_validation(X,Y,m_list)
-
+ Y_pred, Y_pred_var, e2 = self.__get_cross_validation(X, Y, m_list) # noqa: N806
if self.do_logtransform:
-
mu = Y_pred
sig2 = Y_pred_var
median = np.exp(mu)
- mean = np.exp(mu + sig2/2)
- var = np.exp(2*mu + sig2)*(np.exp(sig2)-1)
+ mean = np.exp(mu + sig2 / 2) # noqa: F841
+ var = np.exp(2 * mu + sig2) * (np.exp(sig2) - 1)
- Y_pred = median
- Y_pred_var = var
+ Y_pred = median # noqa: N806
+ Y_pred_var = var # noqa: N806
if self.do_mf:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
self.Y_hf = np.exp(self.Y_hf)
elif self.mf_case == 'model-data':
self.Y_lf = np.exp(self.Y_lf)
@@ -1168,91 +1318,101 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
nc1 = round(n_candi)
self.doe_method = self.doe_method.lower()
- if self.doe_method == "pareto":
-
+ if self.doe_method == 'pareto':
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
nq = round(n_integ)
xq = np.zeros((nq, x_dim))
for nx in range(x_dim):
- xq[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nq))
+ xq[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nq)
+ )
#
# Lets Do Pareto
#
yc1_pred, yc1_var = self.__predict(m_idx, xc1) # use only variance
- score1 = np.zeros(yc1_pred.shape)
+ score1 = np.zeros(yc1_pred.shape) # noqa: F841
cri1 = np.zeros(yc1_pred.shape)
cri2 = np.zeros(yc1_pred.shape)
- # TODO: is this the best?
+ # TODO: is this the best? # noqa: TD002
ll = self.xrange[:, 1] - self.xrange[:, 0]
for i in range(nc1):
if not self.do_mf:
wei = self.weights_node2(xc1[i, :], X, ll)
- #phi = e2[closest_node(xc1[i, :], X, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], X)]
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- wei = self.weights_node2(xc1[i, :], self.X_hf, ll)
- #phi = e2[closest_node(xc1[i, :], self.X_hf, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], self.X_hf)]
- elif self.mf_case == 'model-data':
- wei = self.weights_node2(xc1[i, :], X, ll)
- #phi = e2[closest_node(xc1[i, :], X, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], X)]
-
- #cri1[i] = yc1_var[i]
+ # phi = e2[closest_node(xc1[i, :], X, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], X)]
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ wei = self.weights_node2(xc1[i, :], self.X_hf, ll)
+ # phi = e2[closest_node(xc1[i, :], self.X_hf, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], self.X_hf)]
+ elif self.mf_case == 'model-data':
+ wei = self.weights_node2(xc1[i, :], X, ll)
+ # phi = e2[closest_node(xc1[i, :], X, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], X)]
+
+ # cri1[i] = yc1_var[i]
cri2[i] = sum(e2[:, y_idx] / Y_pred_var[:, y_idx] * wei.T)
- #cri2[i] = pow(phi[y_idx],r)
+ # cri2[i] = pow(phi[y_idx],r)
- VOI = np.zeros(yc1_pred.shape)
+ VOI = np.zeros(yc1_pred.shape) # noqa: N806
for i in range(nc1):
- pdfvals = m_idx.kern.K(np.array([xq[i]]), xq)**2/m_idx.kern.K(np.array([xq[0]]))**2
- VOI[i] = np.mean(pdfvals)*np.prod(np.diff(self.xrange,axis=1)) # * np.prod(np.diff(self.xrange))
+ pdfvals = (
+ m_idx.kern.K(np.array([xq[i]]), xq) ** 2
+ / m_idx.kern.K(np.array([xq[0]])) ** 2
+ )
+ VOI[i] = np.mean(pdfvals) * np.prod(
+ np.diff(self.xrange, axis=1)
+ ) # * np.prod(np.diff(self.xrange))
cri1[i] = yc1_var[i] * VOI[i]
- cri1 = (cri1-np.min(cri1))/(np.max(cri1)-np.min(cri1))
- cri2 = (cri2-np.min(cri2))/(np.max(cri2)-np.min(cri2))
+ cri1 = (cri1 - np.min(cri1)) / (np.max(cri1) - np.min(cri1))
+ cri2 = (cri2 - np.min(cri2)) / (np.max(cri2) - np.min(cri2))
logcrimi1 = np.log(cri1[:, 0])
logcrimi2 = np.log(cri2[:, 0])
-
- idx_pareto_front = list()
+ idx_pareto_front = list() # noqa: C408, F841
rankid = np.zeros(nc1)
- varRank = np.zeros(nc1)
- biasRank = np.zeros(nc1)
- for id in range(nc1):
- idx_tmp = np.argwhere((logcrimi1 >= logcrimi1[id]) * (logcrimi2 >= logcrimi2[id]))
- varRank[id] = np.sum((logcrimi1 >= logcrimi1[id]))
- biasRank[id] = np.sum((logcrimi2 >= logcrimi2[id]))
+ varRank = np.zeros(nc1) # noqa: N806
+ biasRank = np.zeros(nc1) # noqa: N806
+ for id in range(nc1): # noqa: A001
+ idx_tmp = np.argwhere(
+ (logcrimi1 >= logcrimi1[id]) * (logcrimi2 >= logcrimi2[id])
+ )
+ varRank[id] = np.sum(logcrimi1 >= logcrimi1[id])
+ biasRank[id] = np.sum(logcrimi2 >= logcrimi2[id])
rankid[id] = idx_tmp.size
- idx_rank = np.argsort(rankid)
- sort_rank = np.sort(rankid)
- num_1rank = np.sum(rankid==1)
- idx_1rank = list((np.argwhere(rankid==1)).flatten())
- npareto = 4
+ idx_rank = np.argsort(rankid) # noqa: F841
+ sort_rank = np.sort(rankid) # noqa: F841
+ num_1rank = np.sum(rankid == 1)
+ idx_1rank = list((np.argwhere(rankid == 1)).flatten())
+ npareto = 4 # noqa: F841
if num_1rank < self.cal_interval:
prob = np.ones((nc1,))
- prob[list(rankid==1)]=0
- prob=prob/sum(prob)
- idx_pareto = idx_1rank + list(np.random.choice(nc1, self.cal_interval-num_1rank, p=prob))
+ prob[list(rankid == 1)] = 0
+ prob = prob / sum(prob) # noqa: PLR6104
+ idx_pareto = idx_1rank + list(
+ np.random.choice(nc1, self.cal_interval - num_1rank, p=prob)
+ )
else:
idx_pareto_candi = idx_1rank.copy()
- X_tmp = X
- Y_tmp = Y[:,y_idx][np.newaxis].T
+ X_tmp = X # noqa: N806
+ Y_tmp = Y[:, y_idx][np.newaxis].T # noqa: N806
m_tmp = m_idx.copy()
# get MMSEw
- score = np.squeeze(cri1*cri2)
+ score = np.squeeze(cri1 * cri2)
score_candi = score[idx_pareto_candi]
best_local = np.argsort(-score_candi)[0]
best_global = idx_1rank[best_local]
@@ -1260,37 +1420,40 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
idx_pareto_new = [best_global]
del idx_pareto_candi[best_local]
- for i in range(self.cal_interval-1):
- X_tmp = np.vstack([X_tmp, xc1[best_global, :][np.newaxis]])
- Y_tmp = np.vstack([Y_tmp, np.array([[0]]) ]) # any variables
+ for i in range(self.cal_interval - 1): # noqa: B007
+ X_tmp = np.vstack([X_tmp, xc1[best_global, :][np.newaxis]]) # noqa: N806
+ # any variables
+ Y_tmp = np.vstack([Y_tmp, np.array([[0]])]) # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp)
- dummy, Yq_var = m_tmp.predict(xc1[idx_pareto_candi, :])
+ dummy, Yq_var = m_tmp.predict(xc1[idx_pareto_candi, :]) # noqa: N806
cri1 = Yq_var * VOI[idx_pareto_candi]
cri1 = (cri1 - np.min(cri1)) / (np.max(cri1) - np.min(cri1))
- score_tmp = cri1 * cri2[idx_pareto_candi] # only update the variance
+ score_tmp = (
+ cri1 * cri2[idx_pareto_candi]
+ ) # only update the variance
best_local = np.argsort(-np.squeeze(score_tmp))[0]
best_global = idx_pareto_candi[best_local]
- idx_pareto_new = idx_pareto_new + [best_global]
+ idx_pareto_new = idx_pareto_new + [best_global] # noqa: PLR6104, RUF005
del idx_pareto_candi[best_local]
- #score_tmp = Yq_var * cri2[idx_pareto_left]/Y_pred_var[closest_node(xc1[i, :], X, self.m_list, self.xrange)]
+ # score_tmp = Yq_var * cri2[idx_pareto_left]/Y_pred_var[closest_node(xc1[i, :], X, self.m_list, self.xrange)]
- #idx_pareto = list(idx_rank[0:self.cal_interval])
+ # idx_pareto = list(idx_rank[0:self.cal_interval])
idx_pareto = idx_pareto_new
update_point = xc1[idx_pareto, :]
- update_IMSE = 0
-
+ update_IMSE = 0 # noqa: N806
+
# import matplotlib.pyplot as plt
# plt.plot(logcrimi1, logcrimi2, 'x');plt.plot(logcrimi1[idx_pareto], logcrimi2[idx_pareto], 'x'); plt.show()
# plt.plot(m_idx.X[:,0], m_idx.X[:,1], 'x'); plt.show()
- # plt.plot(X[:, 0],X[:, 1], 'ro');
+ # plt.plot(X[:, 0],X[:, 1], 'ro');
# plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(xc1[rankid==0,0], xc1[rankid==0,1], 'rx'); plt.show()
- # plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(update_point[:,0], update_point[:,1], 'rx'); plt.show()
+ # plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(update_point[:,0], update_point[:,1], 'rx'); plt.show()
# plt.scatter(xc1[:, 0], xc1[:, 1], c=cri2); plt.show()
#
- '''
+ """
idx_pareto = list()
for id in range(nc1):
idx_tmp = np.argwhere(logcrimi2 >= logcrimi2[id])
@@ -1305,73 +1468,85 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
idx_pareto2 = np.asarray(random_indices)
idx_pareto = np.asarray(idx_pareto)
idx_pareto = list(idx_pareto[idx_pareto2[0:self.cal_interval]])
- '''
-
- elif self.doe_method == "imsew":
-
+ """ # noqa: W293
+ elif self.doe_method == 'imsew':
nq = round(n_integ)
m_stack = m_idx.copy()
- X_stack = X
- Y_stack = Y
+ X_stack = X # noqa: N806
+ Y_stack = Y # noqa: N806
- update_point = np.zeros((self.cal_interval,self.x_dim))
- update_IMSE = np.zeros((self.cal_interval,1))
+ update_point = np.zeros((self.cal_interval, self.x_dim))
+ update_IMSE = np.zeros((self.cal_interval, 1)) # noqa: N806
#
# Initial candidates
#
for ni in range(self.cal_interval):
-
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
xq = np.zeros((nq, x_dim))
for nx in range(x_dim):
- xq[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nq))
+ xq[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nq)
+ )
- #TODO: is diff(xrange) the best?
+ # TODO: is diff(xrange) the best? # noqa: TD002
ll = self.xrange[:, 1] - self.xrange[:, 0]
phiq = np.zeros((nq, y_dim))
for i in range(nq):
- phiq[i,:] = e2[closest_node(xq[i, :], X, ll)]
+ phiq[i, :] = e2[closest_node(xq[i, :], X, ll)]
phiqr = pow(phiq[:, y_idx], r)
if self.do_parallel:
tmp = time.time()
- iterables = ((m_stack.copy(), xc1[i,:][np.newaxis], xq, phiqr, i) for i in range(nc1))
+ iterables = (
+ (m_stack.copy(), xc1[i, :][np.newaxis], xq, phiqr, i)
+ for i in range(nc1)
+ )
result_objs = list(self.pool.starmap(imse, iterables))
- IMSEc1 = np.zeros(nc1)
- for IMSE_val, idx in result_objs:
+ IMSEc1 = np.zeros(nc1) # noqa: N806
+ for IMSE_val, idx in result_objs: # noqa: N806
IMSEc1[idx] = IMSE_val
- print("IMSE: finding the next DOE {} in a parallel way.. time = {}".format(ni,time.time() -tmp)) # 7s # 3-4s
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} in a parallel way.. time = {time.time() - tmp}'
+ ) # 7s # 3-4s
else:
tmp = time.time()
phiqr = pow(phiq[:, y_idx], r)
- IMSEc1 = np.zeros(nc1)
+ IMSEc1 = np.zeros(nc1) # noqa: N806
for i in range(nc1):
- IMSEc1[i], dummy = imse(m_stack.copy(), xc1[i,:][np.newaxis], xq, phiqr, i)
- print("IMSE: finding the next DOE {} in a serial way.. time = {}".format(ni,time.time() -tmp)) # 4s
+ IMSEc1[i], dummy = imse( # noqa: F841
+ m_stack.copy(), xc1[i, :][np.newaxis], xq, phiqr, i
+ )
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} in a serial way.. time = {time.time() - tmp}'
+ ) # 4s
new_idx = np.argmin(IMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.zeros((Y_stack.shape[0] + 1, Y.shape[1])) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ Y_stack = np.zeros( # noqa: N806
+ (Y_stack.shape[0] + 1, Y.shape[1])
+ ) # any variables
m_stack.set_XY(X=X_stack, Y=Y_stack)
update_point[ni, :] = x_point
update_IMSE[ni, :] = IMSEc1[new_idx]
# import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.show()
- # import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.plot(update_point[:,0],update_point[:,1],'x'); plt.show()
+ # import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.plot(update_point[:,0],update_point[:,1],'x'); plt.show()
# import matplotlib.pyplot as plt; plt.scatter(X_stack[:,0],X_stack[:,1]); plt.show()
- '''
+ """
nc1 = round(n_candi)
xc1 = np.zeros((nc1, x_dim))
@@ -1452,29 +1627,31 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
update_point = xc3[new_idx, :][np.newaxis]
update_IMSE = IMSE[new_idx]
- '''
+ """ # noqa: W293
- elif self.doe_method == "random":
+ elif self.doe_method == 'random':
+ update_point = xc1[0 : self.cal_interval, :]
+ update_IMSE = 0 # noqa: N806
- update_point = xc1[0:self.cal_interval, :]
- update_IMSE = 0
-
- elif self.doe_method == "mmse":
- sort_idx_score1 = np.argsort(-cri1.T) # (-) sign to make it descending order
+ elif self.doe_method == 'mmse':
+ sort_idx_score1 = np.argsort(
+ -cri1.T
+ ) # (-) sign to make it descending order
nc2 = round(nc1 * ac)
xc2 = xc1[sort_idx_score1[0, 0:nc2], :]
update_point = xc2[0:1, :]
- update_IMSE = 0
-
- elif self.doe_method == "mmsew":
+ update_IMSE = 0 # noqa: N806
+ elif self.doe_method == 'mmsew':
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
m_stack = m_idx.copy()
ll = self.xrange[:, 1] - self.xrange[:, 0]
@@ -1484,38 +1661,43 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
phicr = pow(phic[:, y_idx], r)
- X_stack = X
- Y_stack = Y
+ X_stack = X # noqa: N806
+ Y_stack = Y # noqa: N806
- update_point = np.zeros((self.cal_interval,self.x_dim))
- update_IMSE = np.zeros((self.cal_interval,1))
+ update_point = np.zeros((self.cal_interval, self.x_dim))
+ update_IMSE = np.zeros((self.cal_interval, 1)) # noqa: N806
for ni in range(self.cal_interval):
yc1_pred, yc1_var = m_stack.predict(xc1) # use only variance
- MMSEc1 = yc1_var.flatten() * phicr.flatten()
-
+ MMSEc1 = yc1_var.flatten() * phicr.flatten() # noqa: N806
new_idx = np.argmax(MMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.zeros((Y_stack.shape[0] + 1, Y.shape[1])) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ Y_stack = np.zeros( # noqa: N806
+ (Y_stack.shape[0] + 1, Y.shape[1])
+ ) # any variables
m_stack.set_XY(X=X_stack, Y=Y_stack)
update_point[ni, :] = x_point
update_IMSE[ni, :] = MMSEc1[new_idx]
else:
- msg = 'Error running SimCenterUQ: cannot identify the doe method <' + self.doe_method + '>'
- errlog.exit(msg)
+ msg = (
+ 'Error running SimCenterUQ: cannot identify the doe method <'
+ + self.doe_method
+ + '>'
+ )
+ errlog.exit(msg) # noqa: F821
return update_point, m_list, update_IMSE, y_idx, Y_pred, Y_pred_var
- def __normalized_mean_sq_error(self, yp, ye):
+ def __normalized_mean_sq_error(self, yp, ye): # noqa: PLR6301
nt = yp.shape[0]
- data_bound = (np.max(ye, axis=0) - np.min(ye, axis=0))
- RMSE = np.sqrt(1 / nt * np.sum(pow(yp - ye, 2), axis=0))
- NRMSE =RMSE/data_bound
- NRMSE[np.argwhere((data_bound ==0))]=0
+ data_bound = np.max(ye, axis=0) - np.min(ye, axis=0)
+ RMSE = np.sqrt(1 / nt * np.sum(pow(yp - ye, 2), axis=0)) # noqa: N806
+ NRMSE = RMSE / data_bound # noqa: N806
+ NRMSE[np.argwhere(data_bound == 0)] = 0
return NRMSE
def __closest_node(self, node, nodes):
@@ -1524,216 +1706,294 @@ def __closest_node(self, node, nodes):
deltas_norm = np.zeros(deltas.shape)
for nx in range(self.x_dim):
- deltas_norm[:, nx] = (deltas[:, nx]) / (self.xrange[nx, 1] - self.xrange[nx, 0]) # additional weights?
+ deltas_norm[:, nx] = (deltas[:, nx]) / (
+ self.xrange[nx, 1] - self.xrange[nx, 0]
+ ) # additional weights?
# np.argmin(np.sum(pow(deltas_norm,2),axis=1))
dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm)
return np.argmin(dist_2)
- def __from_XY_into_list(self, X, Y):
- x_list = list()
- y_list = list()
+ def __from_XY_into_list(self, X, Y): # noqa: N802, N803, PLR6301
+ x_list = list() # noqa: C408
+ y_list = list() # noqa: C408
for i in range(Y.shape[1]):
- x_list = x_list + [X, ]
- y_list = y_list + [Y[:, [i, ]], ]
+ x_list = x_list + [ # noqa: PLR6104, RUF005
+ X,
+ ]
+ y_list = y_list + [ # noqa: PLR6104, RUF005
+ Y[
+ :,
+ [
+ i,
+ ],
+ ],
+ ]
return x_list, y_list
- def __predict(self, m, X):
-
- if not self.do_mf:
+ def __predict(self, m, X): # noqa: N803
+ if not self.do_mf: # noqa: RET503
return m.predict(X)
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- X_list = convert_x_list_to_array([X, X])
- X_list_l = X_list[:X.shape[0]]
- X_list_h = X_list[X.shape[0]:]
- return m.predict(X_list_h)
- elif self.mf_case == 'model-data':
- #return m.predict(X)
- X_list = convert_x_list_to_array([X, X])
- X_list_l = X_list[:X.shape[0]]
- X_list_h = X_list[X.shape[0]:]
- return m.predict(X_list_h)
-
-
-
- def __get_cross_validation(self,X,Y,m_list):
-
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: RET505, PLR1714
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806
+ X_list_l = X_list[: X.shape[0]] # noqa: N806
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
+ return m.predict(X_list_h)
+ elif self.mf_case == 'model-data':
+ # return m.predict(X)
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806
+ X_list_l = X_list[: X.shape[0]] # noqa: N806, F841
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
+ return m.predict(X_list_h)
+
+ def __get_cross_validation(self, X, Y, m_list): # noqa: N803
if not self.do_mf:
e2 = np.zeros(Y.shape)
- Y_pred = np.zeros(Y.shape)
- Y_pred_var = np.zeros(Y.shape)
+ Y_pred = np.zeros(Y.shape) # noqa: N806
+ Y_pred_var = np.zeros(Y.shape) # noqa: N806
for ny in range(Y.shape[1]):
m_tmp = m_list[ny].copy()
for ns in range(X.shape[0]):
- X_tmp = np.delete(X, ns, axis=0)
- Y_tmp = np.delete(Y, ns, axis=0)
+ X_tmp = np.delete(X, ns, axis=0) # noqa: N806
+ Y_tmp = np.delete(Y, ns, axis=0) # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp[:, ny][np.newaxis].transpose())
x_loo = X[ns, :][np.newaxis]
# Y_pred_tmp, Y_err_tmp = m_tmp.predict(x_loo)
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
Y_pred[ns, ny] = Y_pred_tmp
Y_pred_var[ns, ny] = Y_err_tmp
- e2[ns, ny] = pow((Y_pred[ns, ny] - Y[ns, ny]), 2) # for nD outputs
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - Y[ns, ny]), 2
+ ) # for nD outputs
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ e2 = np.zeros(self.Y_hf.shape)
+ Y_pred = np.zeros(self.Y_hf.shape) # noqa: N806
+ Y_pred_var = np.zeros(self.Y_hf.shape) # noqa: N806
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- e2 = np.zeros(self.Y_hf.shape)
- Y_pred = np.zeros(self.Y_hf.shape)
- Y_pred_var = np.zeros(self.Y_hf.shape)
-
- for ny in range(Y.shape[1]):
- m_tmp = deepcopy(m_list[ny])
- for ns in range(self.X_hf.shape[0]):
- X_hf_tmp = np.delete(self.X_hf, ns, axis=0)
- Y_hf_tmp = np.delete(self.Y_hf, ns, axis=0)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, X_hf_tmp],
- [Y[:, ny][np.newaxis].transpose(), Y_hf_tmp[:, ny][np.newaxis].transpose()])
- m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- x_loo = self.X_hf[ns][np.newaxis]
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
- Y_pred[ns,ny] = Y_pred_tmp
- Y_pred_var[ns,ny] = Y_err_tmp
- e2[ns,ny] = pow((Y_pred[ns,ny] - self.Y_hf[ns,ny]), 2) # for nD outputs
+ for ny in range(Y.shape[1]):
+ m_tmp = deepcopy(m_list[ny])
+ for ns in range(self.X_hf.shape[0]):
+ X_hf_tmp = np.delete(self.X_hf, ns, axis=0) # noqa: N806
+ Y_hf_tmp = np.delete(self.Y_hf, ns, axis=0) # noqa: N806
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, X_hf_tmp],
+ [
+ Y[:, ny][np.newaxis].transpose(),
+ Y_hf_tmp[:, ny][np.newaxis].transpose(),
+ ],
+ )
+ )
+ m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
+ x_loo = self.X_hf[ns][np.newaxis]
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
+ Y_pred[ns, ny] = Y_pred_tmp
+ Y_pred_var[ns, ny] = Y_err_tmp
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - self.Y_hf[ns, ny]), 2
+ ) # for nD outputs
- elif self.mf_case == 'model-data':
- e2 = np.zeros(Y.shape)
- Y_pred = np.zeros(Y.shape)
- Y_pred_var = np.zeros(Y.shape)
-
- for ny in range(Y.shape[1]):
- m_tmp = deepcopy(m_list[ny])
- for ns in range(X.shape[0]):
- X_tmp = np.delete(X, ns, axis=0)
- Y_tmp = np.delete(Y, ns, axis=0)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X_tmp],
- [self.Y_lf[:, ny][np.newaxis].transpose(), Y_tmp[:, ny][np.newaxis].transpose()])
- m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- #x_loo = np.hstack((X[ns], 1))[np.newaxis]
- x_loo = self.X_hf[ns][np.newaxis]
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
- Y_pred[ns,ny] = Y_pred_tmp
- Y_pred_var[ns,ny] = Y_err_tmp
- e2[ns,ny] = pow((Y_pred[ns,ny] - Y[ns,ny]), 2) # for nD outputs
+ elif self.mf_case == 'model-data':
+ e2 = np.zeros(Y.shape)
+ Y_pred = np.zeros(Y.shape) # noqa: N806
+ Y_pred_var = np.zeros(Y.shape) # noqa: N806
+
+ for ny in range(Y.shape[1]):
+ m_tmp = deepcopy(m_list[ny])
+ for ns in range(X.shape[0]):
+ X_tmp = np.delete(X, ns, axis=0) # noqa: N806
+ Y_tmp = np.delete(Y, ns, axis=0) # noqa: N806
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X_tmp],
+ [
+ self.Y_lf[:, ny][np.newaxis].transpose(),
+ Y_tmp[:, ny][np.newaxis].transpose(),
+ ],
+ )
+ )
+ m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
+ # x_loo = np.hstack((X[ns], 1))[np.newaxis]
+ x_loo = self.X_hf[ns][np.newaxis]
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
+ Y_pred[ns, ny] = Y_pred_tmp
+ Y_pred_var[ns, ny] = Y_err_tmp
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - Y[ns, ny]), 2
+ ) # for nD outputs
return Y_pred, Y_pred_var, e2
- def term(self):
+ def term(self): # noqa: D102
if self.do_parallel:
- if self.run_type != "runningLocal":
- print("RUNNING SUCCESSFUL")
- self.world.Abort(0) # to prevent deadlock
-
+ if self.run_type != 'runningLocal':
+ print('RUNNING SUCCESSFUL') # noqa: T201
+ self.world.Abort(0) # to prevent deadlock
- def save_model(self, filename):
- import json
+ def save_model(self, filename): # noqa: C901, D102, PLR0915
+ import json # noqa: PLC0415
- with open(self.work_dir + '/' + filename + '.pkl', 'wb') as file:
+ with open(self.work_dir + '/' + filename + '.pkl', 'wb') as file: # noqa: PTH123
pickle.dump(self.m_list, file)
# json.dump(self.m_list, file)
-
header_string_x = ' ' + ' '.join([str(elem) for elem in self.rv_name]) + ' '
header_string_y = ' ' + ' '.join([str(elem) for elem in self.g_name])
header_string = header_string_x + header_string_y
if not self.do_mf:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y), axis=1)
+ xy_data = np.concatenate(
+ (np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y),
+ axis=1,
+ )
+ elif not self.hf_is_model:
+ xy_data = np.concatenate(
+ (
+ np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T,
+ self.X_hf,
+ self.Y_hf,
+ ),
+ axis=1,
+ )
else:
- if not self.hf_is_model:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T, self.X_hf, self.Y_hf), axis=1)
- else:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y), axis=1)
- np.savetxt(self.work_dir + '/dakotaTab.out', xy_data, header=header_string, fmt='%1.4e', comments='%')
- np.savetxt(self.work_dir + '/inputTab.out', self.X, header=header_string_x, fmt='%1.4e', comments='%')
- np.savetxt(self.work_dir + '/outputTab.out', self.Y, header=header_string_y, fmt='%1.4e', comments='%')
+ xy_data = np.concatenate(
+ (
+ np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T,
+ self.X,
+ self.Y,
+ ),
+ axis=1,
+ )
+ np.savetxt(
+ self.work_dir + '/dakotaTab.out',
+ xy_data,
+ header=header_string,
+ fmt='%1.4e',
+ comments='%',
+ )
+ np.savetxt(
+ self.work_dir + '/inputTab.out',
+ self.X,
+ header=header_string_x,
+ fmt='%1.4e',
+ comments='%',
+ )
+ np.savetxt(
+ self.work_dir + '/outputTab.out',
+ self.Y,
+ header=header_string_y,
+ fmt='%1.4e',
+ comments='%',
+ )
y_ub = np.zeros(self.Y_loo.shape)
y_lb = np.zeros(self.Y_loo.shape)
-
if not self.do_logtransform:
for ny in range(self.y_dim):
- y_lb[:,ny] = norm.ppf(0.05, loc=self.Y_loo[:, ny],
- scale=np.sqrt(self.Y_loo_var[:, ny])).tolist()
- y_ub[:, ny] = norm.ppf(0.95, loc=self.Y_loo[:, ny],
- scale=np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ y_lb[:, ny] = norm.ppf(
+ 0.05, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
+ y_ub[:, ny] = norm.ppf(
+ 0.95, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
else:
for ny in range(self.y_dim):
mu = np.log(self.Y_loo[:, ny])
- sig = np.sqrt(np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1))
- y_lb[:,ny] = lognorm.ppf(0.05, s=sig, scale=np.exp(mu)).tolist()
+ sig = np.sqrt(
+ np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1)
+ )
+ y_lb[:, ny] = lognorm.ppf(0.05, s=sig, scale=np.exp(mu)).tolist()
y_ub[:, ny] = lognorm.ppf(0.95, s=sig, scale=np.exp(mu)).tolist()
- xy_sur_data = np.hstack((xy_data,self.Y_loo,y_lb,y_ub,self.Y_loo_var))
- g_name_sur = self.g_name
- header_string_sur = header_string + " " + ".median ".join(
- g_name_sur) + ".median " + ".q5 ".join(g_name_sur) + ".q5 " + ".q95 ".join(
- g_name_sur) + ".q95 " + ".var ".join(g_name_sur) + ".var"
-
- np.savetxt(self.work_dir + '/surrogateTab.out', xy_sur_data, header=header_string_sur, fmt='%1.4e', comments='%')
-
-
+ xy_sur_data = np.hstack((xy_data, self.Y_loo, y_lb, y_ub, self.Y_loo_var))
+ g_name_sur = self.g_name
+ header_string_sur = (
+ header_string
+ + ' '
+ + '.median '.join(g_name_sur)
+ + '.median '
+ + '.q5 '.join(g_name_sur)
+ + '.q5 '
+ + '.q95 '.join(g_name_sur)
+ + '.q95 '
+ + '.var '.join(g_name_sur)
+ + '.var'
+ )
+
+ np.savetxt(
+ self.work_dir + '/surrogateTab.out',
+ xy_sur_data,
+ header=header_string_sur,
+ fmt='%1.4e',
+ comments='%',
+ )
results = {}
- results["doSampling"] = self.do_sampling
- results["doSimulation"] = self.do_simulation
- results["doDoE"] = self.do_doe
- results["doLogtransform"] = self.do_logtransform
- results["doLinear"] = self.do_linear
- results["doMultiFidelity"] = self.do_mf
- results["kernName"] = self.kernel
- results["terminationCode"] = self.exit_code
- results["thrNRMSE"] = self.thr_NRMSE
- results["valSamp"] = self.n_samp
- results["valSim"] = self.n_sim
- results["valTime"] = self.sim_time
- results["xdim"] = self.x_dim
- results["ydim"] = self.y_dim
- results["xlabels"] = self.rv_name
- results["ylabels"] = self.g_name
- results["yExact"] = {}
- results["yPredict"] = {}
- results["valNugget"] = {}
- results["valNRMSE"] = {}
- results["valR2"] = {}
- results["valCorrCoeff"] = {}
- results["yPredict_CI_lb"] = {}
- results["yPredict_CI_ub"] = {}
- results["xExact"] = {}
+ results['doSampling'] = self.do_sampling
+ results['doSimulation'] = self.do_simulation
+ results['doDoE'] = self.do_doe
+ results['doLogtransform'] = self.do_logtransform
+ results['doLinear'] = self.do_linear
+ results['doMultiFidelity'] = self.do_mf
+ results['kernName'] = self.kernel
+ results['terminationCode'] = self.exit_code
+ results['thrNRMSE'] = self.thr_NRMSE
+ results['valSamp'] = self.n_samp
+ results['valSim'] = self.n_sim
+ results['valTime'] = self.sim_time
+ results['xdim'] = self.x_dim
+ results['ydim'] = self.y_dim
+ results['xlabels'] = self.rv_name
+ results['ylabels'] = self.g_name
+ results['yExact'] = {}
+ results['yPredict'] = {}
+ results['valNugget'] = {}
+ results['valNRMSE'] = {}
+ results['valR2'] = {}
+ results['valCorrCoeff'] = {}
+ results['yPredict_CI_lb'] = {}
+ results['yPredict_CI_ub'] = {}
+ results['xExact'] = {}
for nx in range(self.x_dim):
- results["xExact"][self.rv_name[nx]] = self.X[:, nx].tolist()
+ results['xExact'][self.rv_name[nx]] = self.X[:, nx].tolist()
for ny in range(self.y_dim):
if not self.do_mf:
- results["yExact"][self.g_name[ny]] = self.Y[:, ny].tolist()
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- results["yExact"][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
- elif self.mf_case == 'model-data':
- results["yExact"][self.g_name[ny]] = self.Y[:, ny].tolist()
+ results['yExact'][self.g_name[ny]] = self.Y[:, ny].tolist()
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ results['yExact'][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
+ elif self.mf_case == 'model-data':
+ results['yExact'][self.g_name[ny]] = self.Y[:, ny].tolist()
- results["yPredict"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()
+ results['yPredict'][self.g_name[ny]] = self.Y_loo[:, ny].tolist()
if not self.do_logtransform:
- #results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()+2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
- #results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()-2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
+ # results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()+2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
+ # results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()-2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
- results["yPredict_CI_lb"][self.g_name[ny]] = norm.ppf(0.25, loc = self.Y_loo[:, ny] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
- results["yPredict_CI_ub"][self.g_name[ny]] = norm.ppf(0.75, loc = self.Y_loo[:, ny] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ results['yPredict_CI_lb'][self.g_name[ny]] = norm.ppf(
+ 0.25, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
+ results['yPredict_CI_ub'][self.g_name[ny]] = norm.ppf(
+ 0.75, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
else:
+ mu = np.log(self.Y_loo[:, ny])
+ sig = np.sqrt(
+ np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1)
+ )
- mu = np.log(self.Y_loo[:, ny] )
- sig = np.sqrt(np.log(self.Y_loo_var[:, ny]/pow(self.Y_loo[:, ny] ,2)+1))
-
- results["yPredict_CI_lb"][self.g_name[ny]] = lognorm.ppf(0.25, s = sig, scale = np.exp(mu)).tolist()
- results["yPredict_CI_ub"][self.g_name[ny]] = lognorm.ppf(0.75, s = sig, scale = np.exp(mu)).tolist()
-
-
+ results['yPredict_CI_lb'][self.g_name[ny]] = lognorm.ppf(
+ 0.25, s=sig, scale=np.exp(mu)
+ ).tolist()
+ results['yPredict_CI_ub'][self.g_name[ny]] = lognorm.ppf(
+ 0.75, s=sig, scale=np.exp(mu)
+ ).tolist()
# if self.do_logtransform:
# log_mean = 0
@@ -1741,13 +2001,15 @@ def save_model(self, filename):
# nuggetVal_linear = np.exp(2*log_mean+log_var)*(np.exp(log_var)-1) # in linear space
if self.do_mf:
- #results["valNugget"][self.g_name[ny]] = float(self.m_list[ny].gpy_model['Gaussian_noise.variance'])
+ # results["valNugget"][self.g_name[ny]] = float(self.m_list[ny].gpy_model['Gaussian_noise.variance'])
pass
else:
- results["valNugget"][self.g_name[ny]] = float(self.m_list[ny]['Gaussian_noise.variance'])
- results["valNRMSE"][self.g_name[ny]] = self.NRMSE_val[ny]
- results["valR2"][self.g_name[ny]] = self.R2_val[ny]
- results["valCorrCoeff"][self.g_name[ny]] = self.corr_val[ny]
+ results['valNugget'][self.g_name[ny]] = float(
+ self.m_list[ny]['Gaussian_noise.variance']
+ )
+ results['valNRMSE'][self.g_name[ny]] = self.NRMSE_val[ny]
+ results['valR2'][self.g_name[ny]] = self.R2_val[ny]
+ results['valCorrCoeff'][self.g_name[ny]] = self.corr_val[ny]
# if np.isnan(self.NRMSE_val[ny]):
# results["valNRMSE"][self.g_name[ny]] = 0
@@ -1757,118 +2019,124 @@ def save_model(self, filename):
# results["valCorrCoeff"][self.g_name[ny]] = 0
if self.do_simulation:
- results["predError"] = {}
- results["predError"]["percent"] = self.perc_thr.tolist()
- results["predError"]["value"] = self.perc_val.tolist()
- results["fem"] = {}
- results["fem"]["workflow_driver"] = self.workflowDriver
- #results["fem"]["postprocessScript"] = self.postFile
- #results["fem"]["program"] = self.appName
+ results['predError'] = {}
+ results['predError']['percent'] = self.perc_thr.tolist()
+ results['predError']['value'] = self.perc_val.tolist()
+ results['fem'] = {}
+ results['fem']['workflow_driver'] = self.workflowDriver
+ # results["fem"]["postprocessScript"] = self.postFile
+ # results["fem"]["program"] = self.appName
if self.do_sampling:
if self.use_existing:
- results["inpData"] = self.inpData
- results["outData"] = self.outData
+ results['inpData'] = self.inpData
+ results['outData'] = self.outData
else:
- results["inpData"] = self.inpData
+ results['inpData'] = self.inpData
if not self.do_simulation:
- results["outData"] = self.outData
+ results['outData'] = self.outData
if self.do_mf:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- results["inpData_HF"] = self.inpData_hf
- results["outData_HF"] = self.outData_hf
- results["valSamp_HF"] = self.X_hf.shape[0]
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ results['inpData_HF'] = self.inpData_hf
+ results['outData_HF'] = self.outData_hf
+ results['valSamp_HF'] = self.X_hf.shape[0]
elif self.mf_case == 'model-data':
- results["inpData_LF"] = self.inpData_lf
- results["outData_LF"] = self.outData_lf
- results["valSamp_LF"] = self.X_lf.shape[0]
+ results['inpData_LF'] = self.inpData_lf
+ results['outData_LF'] = self.outData_lf
+ results['valSamp_LF'] = self.X_lf.shape[0]
rv_list = []
for nx in range(self.x_dim):
rvs = {}
- rvs["name"] = self.rvName[nx]
- rvs["distribution"] = self.rvDist[nx]
- rvs["value"] = self.rvVal[nx]
- rv_list = rv_list + [rvs]
- results["randomVariables"] = rv_list
-
+ rvs['name'] = self.rvName[nx]
+ rvs['distribution'] = self.rvDist[nx]
+ rvs['value'] = self.rvVal[nx]
+ rv_list = rv_list + [rvs] # noqa: PLR6104, RUF005
+ results['randomVariables'] = rv_list
-
- ### Used for surrogate
- results["modelInfo"] = {}
+ # Used for surrogate
+ results['modelInfo'] = {}
if not self.do_mf:
for ny in range(self.y_dim):
- results["modelInfo"][self.g_name[ny]] = {}
+ results['modelInfo'][self.g_name[ny]] = {}
for parname in self.m_list[ny].parameter_names():
- results["modelInfo"][self.g_name[ny]][parname] = list(eval('self.m_list[ny].' + parname))
-
+ results['modelInfo'][self.g_name[ny]][parname] = list(
+ eval('self.m_list[ny].' + parname) # noqa: S307
+ )
- with open(self.work_dir + '/dakota.out', 'w') as fp:
+ with open(self.work_dir + '/dakota.out', 'w') as fp: # noqa: PLW1514, PTH123
json.dump(results, fp, indent=1)
- with open(self.work_dir + '/GPresults.out', 'w') as file:
-
+ with open(self.work_dir + '/GPresults.out', 'w') as file: # noqa: PLR1702, PLW1514, PTH123
file.write('* Problem setting\n')
- file.write(' - dimension of x : {}\n'.format(self.x_dim))
- file.write(' - dimension of y : {}\n'.format(self.y_dim))
- file.write(" - sampling : {}\n".format(self.do_sampling))
- file.write(" - simulation : {}\n".format(self.do_simulation))
+ file.write(f' - dimension of x : {self.x_dim}\n')
+ file.write(f' - dimension of y : {self.y_dim}\n')
+ file.write(f' - sampling : {self.do_sampling}\n')
+ file.write(f' - simulation : {self.do_simulation}\n')
if self.do_doe:
- file.write(" - design of experiments : {} \n".format(self.do_doe))
+ file.write(f' - design of experiments : {self.do_doe} \n')
if not self.do_doe:
if self.do_simulation and self.do_sampling:
file.write(
- " - design of experiments (DoE) turned off - DoE evaluation time exceeds the model simulation time \n")
+ ' - design of experiments (DoE) turned off - DoE evaluation time exceeds the model simulation time \n'
+ )
file.write('\n')
file.write('* Convergence\n')
- file.write(' - exit code : "{}"\n'.format(self.exit_code))
+ file.write(f' - exit code : "{self.exit_code}"\n')
file.write(' simulation terminated as ')
if self.exit_code == 'count':
- file.write('number of counts reached the maximum (max={})"\n'.format(self.thr_count))
+ file.write(
+ f'number of counts reached the maximum (max={self.thr_count})"\n'
+ )
elif self.exit_code == 'accuracy':
- file.write('minimum accuracy level (NRMSE={:.2f}) is achieved"\n'.format(self.thr_NRMSE))
+ file.write(
+ f'minimum accuracy level (NRMSE={self.thr_NRMSE:.2f}) is achieved"\n'
+ )
elif self.exit_code == 'time':
- file.write('maximum running time (t={:.1f}s) reached"\n'.format(self.thr_t))
+ file.write(f'maximum running time (t={self.thr_t:.1f}s) reached"\n')
else:
file.write('cannot identify the exit code\n')
- file.write(' - number of simulations (count) : {}\n'.format(self.n_samp))
+ file.write(f' - number of simulations (count) : {self.n_samp}\n')
file.write(
- ' - maximum normalized root-mean-squared error (NRMSE): {:.5f}\n'.format(np.max(self.NRMSE_val)))
+ f' - maximum normalized root-mean-squared error (NRMSE): {np.max(self.NRMSE_val):.5f}\n'
+ )
for ny in range(self.y_dim):
- file.write(' {} : {:.2f}\n'.format(self.g_name[ny], self.NRMSE_val[ny]))
- file.write(' - analysis time : {:.1f} sec\n'.format(self.sim_time))
- file.write(' - calibration interval : {}\n'.format(self.cal_interval))
+ file.write(f' {self.g_name[ny]} : {self.NRMSE_val[ny]:.2f}\n')
+ file.write(f' - analysis time : {self.sim_time:.1f} sec\n')
+ file.write(f' - calibration interval : {self.cal_interval}\n')
file.write('\n')
- file.write('* GP parameters\n'.format(self.y_dim))
- file.write(' - Kernel : {}\n'.format(self.kernel))
- file.write(' - Linear : {}\n\n'.format(self.do_linear))
+ file.write('* GP parameters\n'.format())
+ file.write(f' - Kernel : {self.kernel}\n')
+ file.write(f' - Linear : {self.do_linear}\n\n')
if not self.do_mf:
for ny in range(self.y_dim):
- file.write(' [{}]\n'.format(self.g_name[ny]))
+ file.write(f' [{self.g_name[ny]}]\n')
m_tmp = self.m_list[ny]
for parname in m_tmp.parameter_names():
- file.write(' - {} '.format(parname))
- parvals = eval('m_tmp.' + parname)
+ file.write(f' - {parname} ')
+ parvals = eval('m_tmp.' + parname) # noqa: S307
if len(parvals) == self.x_dim:
file.write('\n')
for nx in range(self.x_dim):
- file.write(' {} : {:.2e}\n'.format(self.rv_name[nx], parvals[nx]))
+ file.write(
+ f' {self.rv_name[nx]} : {parvals[nx]:.2e}\n'
+ )
else:
- file.write(' : {:.2e}\n'.format(parvals[0]))
- file.write('\n'.format(self.g_name[ny]))
+ file.write(f' : {parvals[0]:.2e}\n')
+ file.write('\n'.format())
file.close()
- print("Results Saved")
+ print('Results Saved') # noqa: T201
return 0
- def weights_node2(self, node, nodes, ls):
+ def weights_node2(self, node, nodes, ls): # noqa: D102, PLR6301
nodes = np.asarray(nodes)
deltas = nodes - node
@@ -1878,239 +2146,267 @@ def weights_node2(self, node, nodes, ls):
dist_ls = np.sqrt(np.sum(pow(deltas_norm, 2), axis=1))
- weig = np.exp(-pow(dist_ls,2))
- if (sum(weig)==0):
+ weig = np.exp(-pow(dist_ls, 2))
+ if sum(weig) == 0:
weig = np.ones(nodes.shape[0])
- return weig/sum(weig)
+ return weig / sum(weig)
-def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver):
- X = np.atleast_2d(X)
+def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver): # noqa: N802, N803, D103
+ X = np.atleast_2d(X) # noqa: N806
x_dim = X.shape[1]
if X.shape[0] > 1:
- errlog = errorLog(work_dir)
+ errlog = errorLog(work_dir) # noqa: F821
msg = 'do one simulation at a time'
errlog.exit(msg)
-
# (1) create "workdir.idx " folder :need C++17 to use the files system namespace
current_dir_i = work_dir + '/workdir.' + str(id_sim + 1)
- print(id_sim)
+ print(id_sim) # noqa: T201
try:
shutil.copytree(work_dir + '/templatedir', current_dir_i)
- except Exception as ex:
+ except Exception as ex: # noqa: BLE001
errlog = errorLog_in_pool(work_dir)
msg = 'Error running FEM: ' + str(ex)
errlog.exit(msg)
-
# (2) write param.in file
- outF = open(current_dir_i + '/params.in', 'w')
+ outF = open(current_dir_i + '/params.in', 'w') # noqa: N806, PLW1514, PTH123, SIM115
- outF.write('{}\n'.format(x_dim))
+ outF.write(f'{x_dim}\n')
for i in range(x_dim):
- outF.write('{} {}\n'.format(rv_name[i], X[0, i]))
+ outF.write(f'{rv_name[i]} {X[0, i]}\n')
outF.close()
# (3) run workflow_driver.bat
os.chdir(current_dir_i)
- workflow_run_command = '{}/{}'.format(current_dir_i, workflowDriver)
- subprocess.check_call(workflow_run_command, shell=True)
+ workflow_run_command = f'{current_dir_i}/{workflowDriver}'
+ subprocess.check_call(workflow_run_command, shell=True) # noqa: S602
# (4) reading results
- if glob.glob('results.out'):
+ if glob.glob('results.out'): # noqa: PTH207
g = np.loadtxt('results.out').flatten()
else:
errlog = errorLog_in_pool(work_dir)
msg = 'Error running FEM: results.out missing at ' + current_dir_i
errlog.exit(msg)
- if g.shape[0]==0:
+ if g.shape[0] == 0:
errlog = errorLog_in_pool(work_dir)
msg = 'Error running FEM: results.out is empty'
errlog.exit(msg)
- os.chdir("../")
+ os.chdir('../')
if np.isnan(np.sum(g)):
errlog = errorLog_in_pool(work_dir)
- msg = 'Error running FEM: Response value at workdir.{} is NaN'.format(id_sim+1)
+ msg = f'Error running FEM: Response value at workdir.{id_sim + 1} is NaN'
errlog.exit(msg)
return g, id_sim
-def run_FEM_batch(X,id_sim, rv_name, do_parallel, y_dim, os_type, run_type, pool, t_init, t_thr, workflowDriver):
- X = np.atleast_2d(X)
+def run_FEM_batch( # noqa: N802, D103
+ X, # noqa: N803
+ id_sim,
+ rv_name,
+ do_parallel,
+ y_dim,
+ os_type, # noqa: ARG001
+ run_type, # noqa: ARG001
+ pool,
+ t_init,
+ t_thr,
+ workflowDriver, # noqa: N803
+):
+ X = np.atleast_2d(X) # noqa: N806
# Windows
- #if os_type.lower().startswith('win'):
+ # if os_type.lower().startswith('win'):
# workflowDriver = "workflow_driver.bat"
- #else:
+ # else:
# workflowDriver = "workflow_driver"
nsamp = X.shape[0]
if not do_parallel:
- Y = np.zeros((nsamp,y_dim))
+ Y = np.zeros((nsamp, y_dim)) # noqa: N806
for ns in range(nsamp):
- Y[ns,:], id_sim_current = run_FEM(X[ns,:],id_sim+ns,rv_name, work_dir, workflowDriver)
+ Y[ns, :], id_sim_current = run_FEM(
+ X[ns, :], id_sim + ns, rv_name, work_dir, workflowDriver
+ )
if time.time() - t_init > t_thr:
- X = X[:ns, :]
- Y = Y[:ns, :]
- break
+ X = X[:ns, :] # noqa: N806
+ Y = Y[:ns, :] # noqa: N806
+ break
- return X, Y, id_sim_current+1
+ return X, Y, id_sim_current + 1
if do_parallel:
- print("Running {} simulations in parallel".format(nsamp))
+ print(f'Running {nsamp} simulations in parallel') # noqa: T201
tmp = time.time()
- iterables = ((X[i, :][np.newaxis], id_sim + i, rv_name, work_dir, workflowDriver) for i in range(nsamp))
+ iterables = (
+ (X[i, :][np.newaxis], id_sim + i, rv_name, work_dir, workflowDriver)
+ for i in range(nsamp)
+ )
try:
result_objs = list(pool.starmap(run_FEM, iterables))
- print("Simulation time = {} s".format(time.time() - tmp)); tmp = time.time();
+ print(f'Simulation time = {time.time() - tmp} s') # noqa: T201
+ tmp = time.time()
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
pool.shutdown()
- except Exception:
+ except Exception: # noqa: BLE001
sys.exit()
- tmp = time.time();
- print("=====================================")
- Nsim = len(list((result_objs)))
- Y = np.zeros((Nsim, y_dim))
+ tmp = time.time()
+ print('=====================================') # noqa: T201
+ Nsim = len(list(result_objs)) # noqa: N806
+ Y = np.zeros((Nsim, y_dim)) # noqa: N806
- for val, id in result_objs:
+ for val, id in result_objs: # noqa: A001
if np.isnan(np.sum(val)):
- Nsim = id - id_sim
- X = X[:Nsim, :]
- Y = Y[:Nsim, :]
+ Nsim = id - id_sim # noqa: N806
+ X = X[:Nsim, :] # noqa: N806
+ Y = Y[:Nsim, :] # noqa: N806
else:
Y[id - id_sim, :] = val
return X, Y, id_sim + Nsim
-def read_txt(text_dir, errlog):
- if not os.path.exists(text_dir):
- msg = "Error: file does not exist: " + text_dir
+
+def read_txt(text_dir, errlog): # noqa: D103
+ if not os.path.exists(text_dir): # noqa: PTH110
+ msg = 'Error: file does not exist: ' + text_dir
errlog.exit(msg)
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PTH123
# Iterate through the file until the table starts
header_count = 0
for line in f:
if line.startswith('%'):
- header_count = header_count + 1
- print(line)
+ header_count = header_count + 1 # noqa: PLR6104
+ print(line) # noqa: T201
-
# X = np.loadtxt(f, skiprows=header_count, delimiter=',')
try:
- with open(text_dir) as f:
- X = np.loadtxt(f, skiprows=header_count)
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
+ X = np.loadtxt(f, skiprows=header_count) # noqa: N806
except ValueError:
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
try:
- X = np.genfromtxt(f, skip_header=header_count, delimiter=',')
+ X = np.genfromtxt(f, skip_header=header_count, delimiter=',') # noqa: N806
# if there are extra delimiter, remove nan
if np.isnan(X[-1, -1]):
- X = np.delete(X, -1, 1)
+ X = np.delete(X, -1, 1) # noqa: N806
# X = np.loadtxt(f, skiprows=header_count, delimiter=',')
except ValueError:
- msg = "Error: file format is not supported " + text_dir
+ msg = 'Error: file format is not supported ' + text_dir
errlog.exit(msg)
if X.ndim == 1:
- X = np.array([X]).transpose()
+ X = np.array([X]).transpose() # noqa: N806
return X
-def closest_node(node, nodes, ll):
+def closest_node(node, nodes, ll): # noqa: D103
nodes = np.asarray(nodes)
deltas = nodes - node
deltas_norm = np.zeros(deltas.shape)
for nx in range(nodes.shape[1]):
- deltas_norm[:, nx] = deltas[:, nx] / ll[nx]
+ deltas_norm[:, nx] = deltas[:, nx] / ll[nx]
- dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm) # square sum
+ dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm) # square sum
return np.argmin(dist_2)
-def imse(m_tmp, xcandi, xq, phiqr, i):
- X = m_tmp.X
- Y = m_tmp.Y
- X_tmp = np.vstack([X, xcandi])
- Y_tmp = np.zeros((Y.shape[0] + 1, Y.shape[1])) # any variables
+
+def imse(m_tmp, xcandi, xq, phiqr, i): # noqa: D103
+ X = m_tmp.X # noqa: N806
+ Y = m_tmp.Y # noqa: N806
+ X_tmp = np.vstack([X, xcandi]) # noqa: N806
+ Y_tmp = np.zeros((Y.shape[0] + 1, Y.shape[1])) # any variables # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp)
- dummy, Yq_var = m_tmp.predict(xq)
- IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() *Yq_var.flatten())
-
+ dummy, Yq_var = m_tmp.predict(xq) # noqa: F841, N806
+ IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten()) # noqa: N806
+
return IMSEc1, i
# ==========================================================================================
-class errorLog_in_pool(object):
+class errorLog_in_pool: # noqa: D101
def __init__(self, work_dir):
- self.file = open('{}/dakota.err'.format(work_dir), "w")
+ self.file = open(f'{work_dir}/dakota.err', 'w') # noqa: PLW1514, PTH123, SIM115
- def write(self, msg):
- print(msg)
+ def write(self, msg): # noqa: D102
+ print(msg) # noqa: T201
self.file.write(msg)
self.file.close()
- raise WorkerStopException()
- #exit(-1)
+ raise WorkerStopException() # noqa: RSE102, F821
+ # exit(-1)
- def terminate(self):
+ def terminate(self): # noqa: D102
self.file.close()
-def build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type):
+def build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type): # noqa: N803, D103
# t_total = time.process_time()
filename = 'SimGpModel'
- print('FILE: ' + work_dir + '/templatedir/' + inputFile)
- f = open(work_dir + '/templatedir/' + inputFile)
+ print('FILE: ' + work_dir + '/templatedir/' + inputFile) # noqa: T201
+ f = open(work_dir + '/templatedir/' + inputFile) # noqa: PLW1514, PTH123, SIM115
try:
inp = json.load(f)
except ValueError:
msg = 'invalid json format - ' + inputFile
- errlog.exit(msg)
+ errlog.exit(msg) # noqa: F821
f.close()
if inp['UQ_Method']['uqType'] != 'Train GP Surrogate Model':
- msg = 'UQ type inconsistency : user wanted <' + inp['UQ_Method'][
- 'uqType'] + '> but called program'
- errlog.exit(msg)
-
-
- gp = GpFromModel(work_dir, inputFile, workflowDriver, run_type, os_type, inp, errlog)
+ msg = (
+ 'UQ type inconsistency : user wanted <'
+ + inp['UQ_Method']['uqType']
+ + '> but called program'
+ )
+ errlog.exit(msg) # noqa: F821
+
+ gp = GpFromModel(
+ work_dir,
+ inputFile,
+ workflowDriver,
+ run_type,
+ os_type,
+ inp,
+ errlog, # noqa: F821
+ )
gp.save_model(filename)
gp.term()
+
# the actual execution
# ==========================================================================================
# the actual execution
-if __name__ == "__main__":
- inputArgs = sys.argv
+if __name__ == '__main__':
+ inputArgs = sys.argv # noqa: N816
work_dir = inputArgs[1].replace(os.sep, '/')
- #errlog = errorLog(work_dir)
+ # errlog = errorLog(work_dir)
- inputFile = inputArgs[2]
- workflowDriver = inputArgs[3]
+ inputFile = inputArgs[2] # noqa: N816
+ workflowDriver = inputArgs[3] # noqa: N816
os_type = inputArgs[4]
run_type = inputArgs[5]
- result_file = "results.out"
-
- #sys.exit(build_surrogate(work_dir, os_type, run_type))
+ result_file = 'results.out'
+
+ # sys.exit(build_surrogate(work_dir, os_type, run_type))
build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type)
diff --git a/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild_old.py b/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild_old.py
index 6e8edf9c8..d2a355a2c 100644
--- a/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild_old.py
+++ b/modules/performUQ/SimCenterUQ/notBeingUsed/surrogateBuild_old.py
@@ -1,31 +1,38 @@
-import time
-import shutil
+import glob # noqa: CPY001, D100, INP001
+import json
+import math
import os
+import pickle # noqa: S403
+import random
+import shutil
+import subprocess # noqa: S404
import sys
-import subprocess
-import math
-import pickle
-import glob
-import json
-from scipy.stats import lognorm, norm
-import numpy as np
-import GPy as GPy
-
-from copy import deepcopy
-from pyDOE import lhs
+import time
import warnings
-import random
-
-from multiprocessing import Pool
+from copy import deepcopy
import emukit.multi_fidelity as emf
+import GPy as GPy # noqa: PLC0414
+import numpy as np
from emukit.model_wrappers.gpy_model_wrappers import GPyMultiOutputWrapper
-from emukit.multi_fidelity.convert_lists_to_array import convert_x_list_to_array, convert_xy_lists_to_arrays
-
-class GpFromModel(object):
+from emukit.multi_fidelity.convert_lists_to_array import (
+ convert_x_list_to_array,
+)
+from pyDOE import lhs
+from scipy.stats import lognorm, norm
- def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp, errlog):
+class GpFromModel: # noqa: D101
+ def __init__( # noqa: C901, PLR0912, PLR0914, PLR0915
+ self,
+ work_dir,
+ inputFile, # noqa: N803
+ workflowDriver, # noqa: N803
+ run_type,
+ os_type,
+ inp,
+ errlog,
+ ):
t_init = time.time()
self.errlog = errlog
self.work_dir = work_dir
@@ -38,12 +45,12 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
# From external READ JSON FILE
#
- rv_name = list()
- self.g_name = list()
+ rv_name = list() # noqa: C408
+ self.g_name = list() # noqa: C408
x_dim = 0
y_dim = 0
for rv in inp['randomVariables']:
- rv_name = rv_name + [rv['name']]
+ rv_name = rv_name + [rv['name']] # noqa: PLR6104, RUF005
x_dim += 1
if x_dim == 0:
@@ -51,12 +58,12 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
for g in inp['EDP']:
- if g['length']==1: # scalar
- self.g_name = self.g_name + [g['name']]
+ if g['length'] == 1: # scalar
+ self.g_name = self.g_name + [g['name']] # noqa: PLR6104, RUF005
y_dim += 1
- else: # vector
+ else: # vector
for nl in range(g['length']):
- self.g_name = self.g_name + ["{}_{}".format(g['name'],nl+1)]
+ self.g_name = self.g_name + ['{}_{}'.format(g['name'], nl + 1)] # noqa: PLR6104, RUF005
y_dim += 1
if y_dim == 0:
@@ -72,87 +79,92 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
self.do_predictive = False
automate_doe = False
- surrogateInfo = inp["UQ_Method"]["surrogateMethodInfo"]
+ surrogateInfo = inp['UQ_Method']['surrogateMethodInfo'] # noqa: N806
try:
- self.do_parallel = surrogateInfo["parallelExecution"]
- except:
+ self.do_parallel = surrogateInfo['parallelExecution']
+ except: # noqa: E722
self.do_parallel = True
if self.do_parallel:
if self.run_type.lower() == 'runninglocal':
self.n_processor = os.cpu_count()
- from multiprocessing import Pool
+ from multiprocessing import Pool # noqa: PLC0415
+
self.pool = Pool(self.n_processor)
else:
# Always
- from mpi4py import MPI
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py import MPI # noqa: PLC0415
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
+
self.world = MPI.COMM_WORLD
self.pool = MPIPoolExecutor()
self.n_processor = self.world.Get_size()
- #self.n_processor =20
- print("nprocessor :")
- print(self.n_processor)
- #self.cal_interval = 5
+ # self.n_processor =20
+ print('nprocessor :') # noqa: T201
+ print(self.n_processor) # noqa: T201
+ # self.cal_interval = 5
self.cal_interval = self.n_processor
else:
self.pool = 0
self.cal_interval = 5
- if surrogateInfo["method"] == "Sampling and Simulation":
+ if surrogateInfo['method'] == 'Sampling and Simulation':
self.do_mf = False
do_sampling = True
do_simulation = True
- self.use_existing = surrogateInfo["existingDoE"]
+ self.use_existing = surrogateInfo['existingDoE']
if self.use_existing:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile.in")
+ self.inpData = os.path.join(work_dir, 'templatedir/inpFile.in') # noqa: PTH118
+ self.outData = os.path.join(work_dir, 'templatedir/outFile.in') # noqa: PTH118
thr_count = surrogateInfo['samples'] # number of samples
-
- if surrogateInfo["advancedOpt"]:
- self.doe_method = surrogateInfo["DoEmethod"]
- if surrogateInfo["DoEmethod"] == "None":
+ if surrogateInfo['advancedOpt']:
+ self.doe_method = surrogateInfo['DoEmethod']
+ if surrogateInfo['DoEmethod'] == 'None':
do_doe = False
user_init = thr_count
else:
do_doe = True
- user_init = surrogateInfo["initialDoE"]
+ user_init = surrogateInfo['initialDoE']
else:
- self.doe_method = "pareto" #default
+ self.doe_method = 'pareto' # default
do_doe = True
user_init = -100
- elif surrogateInfo["method"] == "Import Data File":
+ elif surrogateInfo['method'] == 'Import Data File':
self.do_mf = False
do_sampling = False
- do_simulation = not surrogateInfo["outputData"]
- self.doe_method = "None" # default
+ do_simulation = not surrogateInfo['outputData']
+ self.doe_method = 'None' # default
do_doe = False
# self.inpData = surrogateInfo['inpFile']
- self.inpData = os.path.join(work_dir, "templatedir/inpFile.in")
+ self.inpData = os.path.join(work_dir, 'templatedir/inpFile.in') # noqa: PTH118
if not do_simulation:
# self.outData = surrogateInfo['outFile']
- self.outData = os.path.join(work_dir, "templatedir/outFile.in")
+ self.outData = os.path.join(work_dir, 'templatedir/outFile.in') # noqa: PTH118
- elif surrogateInfo["method"] == "Import Multi-fidelity Data File":
+ elif surrogateInfo['method'] == 'Import Multi-fidelity Data File':
self.do_mf = True
- self.doe_method = "None" # default
+ self.doe_method = 'None' # default
self.hf_is_model = surrogateInfo['HFfromModel']
self.lf_is_model = surrogateInfo['LFfromModel']
- if self. hf_is_model:
- self.use_existing_hf = surrogateInfo["existingDoE_HF"]
- self.samples_hf = surrogateInfo["samples_HF"]
+ if self.hf_is_model:
+ self.use_existing_hf = surrogateInfo['existingDoE_HF']
+ self.samples_hf = surrogateInfo['samples_HF']
if self.use_existing_hf:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile_HF.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile_HF.in")
+ self.inpData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/inpFile_HF.in'
+ )
+ self.outData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/outFile_HF.in'
+ )
else:
- self.inpData_hf = os.path.join(work_dir, "templatedir/inpFile_HF.in")
- self.outData_hf = os.path.join(work_dir, "templatedir/outFile_HF.in")
+ self.inpData_hf = os.path.join(work_dir, 'templatedir/inpFile_HF.in') # noqa: PTH118
+ self.outData_hf = os.path.join(work_dir, 'templatedir/outFile_HF.in') # noqa: PTH118
self.X_hf = read_txt(self.inpData_hf, errlog)
self.Y_hf = read_txt(self.outData_hf, errlog)
if self.X_hf.shape[0] != self.Y_hf.shape[0]:
@@ -160,14 +172,18 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
if self.lf_is_model:
- self.use_existing_lf = surrogateInfo["existingDoE_LF"]
- self.samples_lf = surrogateInfo["samples_LF"]
+ self.use_existing_lf = surrogateInfo['existingDoE_LF']
+ self.samples_lf = surrogateInfo['samples_LF']
if self.use_existing_lf:
- self.inpData = os.path.join(work_dir, "templatedir/inpFile_LF.in")
- self.outData = os.path.join(work_dir, "templatedir/outFile_LF.in")
+ self.inpData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/inpFile_LF.in'
+ )
+ self.outData = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/outFile_LF.in'
+ )
else:
- self.inpData_lf = os.path.join(work_dir, "templatedir/inpFile_LF.in")
- self.outData_lf = os.path.join(work_dir, "templatedir/outFile_LF.in")
+ self.inpData_lf = os.path.join(work_dir, 'templatedir/inpFile_LF.in') # noqa: PTH118
+ self.outData_lf = os.path.join(work_dir, 'templatedir/outFile_LF.in') # noqa: PTH118
self.X_lf = read_txt(self.inpData_lf, errlog)
self.Y_lf = read_txt(self.outData_lf, errlog)
if self.X_lf.shape[0] != self.Y_lf.shape[0]:
@@ -175,10 +191,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
errlog.exit(msg)
if (not self.hf_is_model) and self.lf_is_model:
- self.mf_case = "data-model"
+ self.mf_case = 'data-model'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
self.use_existing = self.use_existing_lf
if self.lf_is_model:
if self.use_existing_lf:
@@ -194,10 +210,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
thr_count = self.samples_lf # number of samples
elif self.hf_is_model and (not self.lf_is_model):
- self.mf_case = "model-data"
+ self.mf_case = 'model-data'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
self.use_existing = self.use_existing_hf
if self.hf_is_model:
if self.use_existing_hf:
@@ -213,13 +229,13 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
thr_count = self.samples_hf # number of samples
elif self.hf_is_model and self.lf_is_model:
- self.mf_case = "model-model"
+ self.mf_case = 'model-model'
do_sampling = True
do_simulation = True
- do_doe = surrogateInfo["doDoE"]
+ do_doe = surrogateInfo['doDoE']
elif (not self.hf_is_model) and (not self.lf_is_model):
- self.mf_case = "data-data"
+ self.mf_case = 'data-data'
do_sampling = False
do_simulation = False
do_doe = False
@@ -230,38 +246,43 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
msg = 'Error reading json: either select "Import Data File" or "Sampling and Simulation"'
errlog.exit(msg)
- if surrogateInfo["advancedOpt"]:
- self.do_logtransform = surrogateInfo["logTransform"]
- kernel = surrogateInfo["kernel"]
- do_linear = surrogateInfo["linear"]
- nugget_opt = surrogateInfo["nuggetOpt"]
+ if surrogateInfo['advancedOpt']:
+ self.do_logtransform = surrogateInfo['logTransform']
+ kernel = surrogateInfo['kernel']
+ do_linear = surrogateInfo['linear']
+ nugget_opt = surrogateInfo['nuggetOpt']
try:
- self.nuggetVal = np.array(json.loads("[{}]".format(surrogateInfo["nuggetString"])))
+ self.nuggetVal = np.array(
+ json.loads('[{}]'.format(surrogateInfo['nuggetString']))
+ )
except json.decoder.JSONDecodeError:
msg = 'Error reading json: improper format of nugget values/bounds. Provide nugget values/bounds of each QoI with comma delimiter'
errlog.exit(msg)
- if self.nuggetVal.shape[0]!=self.y_dim and self.nuggetVal.shape[0]!=0 :
- msg = 'Error reading json: Number of nugget quantities ({}) does not match # QoIs ({})'.format(self.nuggetVal.shape[0],self.y_dim)
+ if (
+ self.nuggetVal.shape[0] != self.y_dim
+ and self.nuggetVal.shape[0] != 0
+ ):
+ msg = f'Error reading json: Number of nugget quantities ({self.nuggetVal.shape[0]}) does not match # QoIs ({self.y_dim})'
errlog.exit(msg)
- if nugget_opt == "Fixed Values":
- for Vals in self.nuggetVal:
- if (not np.isscalar(Vals)):
+ if nugget_opt == 'Fixed Values':
+ for Vals in self.nuggetVal: # noqa: N806
+ if not np.isscalar(Vals):
msg = 'Error reading json: provide nugget values of each QoI with comma delimiter'
errlog.exit(msg)
- elif nugget_opt == "Fixed Bounds":
- for Bous in self.nuggetVal:
- if (np.isscalar(Bous)):
+ elif nugget_opt == 'Fixed Bounds':
+ for Bous in self.nuggetVal: # noqa: N806
+ if np.isscalar(Bous):
msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
errlog.exit(msg)
- elif (isinstance(Bous,list)):
+ elif isinstance(Bous, list):
msg = 'Error reading json: provide both lower and upper bounds of nugget'
errlog.exit(msg)
- elif Bous.shape[0]!=2:
+ elif Bous.shape[0] != 2: # noqa: PLR2004
msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
errlog.exit(msg)
- elif Bous[0]>Bous[1]:
+ elif Bous[0] > Bous[1]:
msg = 'Error reading json: the lower bound of a nugget value should be smaller than its upper bound'
errlog.exit(msg)
@@ -277,10 +298,10 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
self.do_logtransform = False
kernel = 'Matern 5/2'
do_linear = False
- #do_nugget = True
- nugget_opt = "optimize"
+ # do_nugget = True
+ nugget_opt = 'optimize'
- #if not self.do_mf:
+ # if not self.do_mf:
# if do_simulation:
# femInfo = inp["fem"]
# self.inpFile = femInfo["inputFile"]
@@ -292,17 +313,19 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
#
if do_sampling:
- thr_NRMSE = surrogateInfo["accuracyLimit"]
- thr_t = surrogateInfo["timeLimit"] * 60
+ thr_NRMSE = surrogateInfo['accuracyLimit'] # noqa: N806
+ thr_t = surrogateInfo['timeLimit'] * 60
np.random.seed(surrogateInfo['seed'])
random.seed(surrogateInfo['seed'])
self.xrange = np.empty((0, 2), float)
for rv in inp['randomVariables']:
- if "lowerbound" not in rv:
+ if 'lowerbound' not in rv:
msg = 'Error in input RV: all RV should be set to Uniform distribution'
errlog.exit(msg)
- self.xrange = np.vstack((self.xrange, [rv['lowerbound'], rv['upperbound']]))
+ self.xrange = np.vstack(
+ (self.xrange, [rv['lowerbound'], rv['upperbound']])
+ )
self.len = np.abs(np.diff(self.xrange).T[0])
if sum(self.len == 0) > 0:
@@ -314,71 +337,89 @@ def __init__(self, work_dir, inputFile, workflowDriver, run_type, os_type, inp,
#
if self.use_existing:
- X_tmp = read_txt(self.inpData,errlog)
- Y_tmp = read_txt(self.outData,errlog)
+ X_tmp = read_txt(self.inpData, errlog) # noqa: N806
+ Y_tmp = read_txt(self.outData, errlog) # noqa: N806
n_ex = X_tmp.shape[0]
if self.do_mf:
if X_tmp.shape[1] != self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} RV column(s) but low fidelity model have {}.'.format(
- self.X_hf.shape[1], X_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.X_hf.shape[1]} RV column(s) but low fidelity model have {X_tmp.shape[1]}.'
errlog.exit(msg)
if Y_tmp.shape[1] != self.Y_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} QoI column(s) but low fidelity model have {}.'.format(
- self.Y_hf.shape[1], Y_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.Y_hf.shape[1]} QoI column(s) but low fidelity model have {Y_tmp.shape[1]}.'
errlog.exit(msg)
if X_tmp.shape[1] != x_dim:
- msg = 'Error importing input data: dimension inconsistent: have {} RV(s) but have {} column(s).'.format(
- x_dim, X_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: have {x_dim} RV(s) but have {X_tmp.shape[1]} column(s).'
errlog.exit(msg)
if Y_tmp.shape[1] != y_dim:
- msg = 'Error importing input data: dimension inconsistent: have {} QoI(s) but have {} column(s).'.format(
- y_dim, Y_tmp.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: have {y_dim} QoI(s) but have {Y_tmp.shape[1]} column(s).'
errlog.exit(msg)
if n_ex != Y_tmp.shape[0]:
- msg = 'Error importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent'.format(n_ex, Y_tmp.shape[0])
+ msg = f'Error importing input data: numbers of samples of inputs ({n_ex}) and outputs ({Y_tmp.shape[0]}) are inconsistent'
errlog.exit(msg)
else:
n_ex = 0
- if user_init ==0:
- #msg = 'Error reading json: # of initial DoE should be greater than 0'
- #errlog.exit(msg)
- user_init = -1;
- X_tmp = np.zeros((0, x_dim))
- Y_tmp = np.zeros((0, y_dim))
+ if user_init == 0:
+ # msg = 'Error reading json: # of initial DoE should be greater than 0'
+ # errlog.exit(msg)
+ user_init = -1
+ X_tmp = np.zeros((0, x_dim)) # noqa: N806
+ Y_tmp = np.zeros((0, y_dim)) # noqa: N806
if user_init < 0:
n_init_ref = min(4 * x_dim, thr_count + n_ex - 1, 500)
if self.do_parallel:
- n_init_ref = int(np.ceil(n_init_ref/self.n_processor)*self.n_processor) # Let's not waste resource
+ n_init_ref = int(
+ np.ceil(n_init_ref / self.n_processor) * self.n_processor
+ ) # Let's not waste resource
if n_init_ref > n_ex:
n_init = n_init_ref - n_ex
else:
n_init = 0
-
+
else:
n_init = user_init
n_iter = thr_count - n_init
- def FEM_batch(Xs, id_sim):
- return run_FEM_batch(Xs, id_sim, self.rv_name, self.do_parallel, self.y_dim, self.os_type, self.run_type, self.pool, t_init, thr_t, self.workflowDriver)
+ def FEM_batch(Xs, id_sim): # noqa: N802, N803
+ return run_FEM_batch(
+ Xs,
+ id_sim,
+ self.rv_name,
+ self.do_parallel,
+ self.y_dim,
+ self.os_type,
+ self.run_type,
+ self.pool,
+ t_init,
+ thr_t,
+ self.workflowDriver,
+ )
# check validity of datafile
if n_ex > 0:
- #Y_test, self.id_sim = FEM_batch(X_tmp[0, :][np.newaxis], self.id_sim)
- # TODO : Fix this
- print(X_tmp[0, :][np.newaxis].shape)
- X_test, Y_test ,self.id_sim= FEM_batch(X_tmp[0, :][np.newaxis] ,self.id_sim)
- if np.sum(abs((Y_test - Y_tmp[0, :][np.newaxis]) / Y_test) > 0.01, axis=1) > 0:
+ # Y_test, self.id_sim = FEM_batch(X_tmp[0, :][np.newaxis], self.id_sim)
+ # TODO : Fix this # noqa: TD002
+ print(X_tmp[0, :][np.newaxis].shape) # noqa: T201
+ X_test, Y_test, self.id_sim = FEM_batch( # noqa: F841, N806
+ X_tmp[0, :][np.newaxis], self.id_sim
+ )
+ if (
+ np.sum(
+ abs((Y_test - Y_tmp[0, :][np.newaxis]) / Y_test) > 0.01, # noqa: PLR2004
+ axis=1,
+ )
+ > 0
+ ):
msg = 'Consistency check failed. Your data is not consistent to your model response.'
errlog.exit(msg)
- if n_init>0:
+ if n_init > 0:
n_init -= 1
else:
n_iter -= 1
@@ -387,41 +428,41 @@ def FEM_batch(Xs, id_sim):
# generate initial samples
#
- if n_init>0:
- U = lhs(x_dim, samples=(n_init))
- X = np.vstack([X_tmp, np.zeros((n_init, x_dim))])
+ if n_init > 0:
+ U = lhs(x_dim, samples=(n_init)) # noqa: N806
+ X = np.vstack([X_tmp, np.zeros((n_init, x_dim))]) # noqa: N806
for nx in range(x_dim):
- X[n_ex:n_ex+n_init, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ X[n_ex : n_ex + n_init, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
else:
- X = X_tmp
+ X = X_tmp # noqa: N806
- if sum(abs(self.len / self.xrange[:, 0]) < 1.e-7) > 1:
+ if sum(abs(self.len / self.xrange[:, 0]) < 1.0e-7) > 1: # noqa: PLR2004
msg = 'Error : upperbound and lowerbound should not be the same'
errlog.exit(msg)
n_iter = thr_count - n_init
else:
-
n_ex = 0
- thr_NRMSE = 0.02 # default
+ thr_NRMSE = 0.02 # default # noqa: N806
thr_t = float('inf')
#
# Read sample locations from directory
#
- X = read_txt(self.inpData,errlog)
+ X = read_txt(self.inpData, errlog) # noqa: N806
if self.do_mf:
if X.shape[1] != self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} RV column(s) but low fidelity model have {}.'.format(
- self.X_hf.shape[1], X.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.X_hf.shape[1]} RV column(s) but low fidelity model have {X.shape[1]}.'
errlog.exit(msg)
if X.shape[1] != x_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} RV(s) but {} column(s).' \
- .format(x_dim, X.shape[1])
+ msg = f'Error importing input data: Number of dimension inconsistent: have {x_dim} RV(s) but {X.shape[1]} column(s).'
errlog.exit(msg)
self.xrange = np.vstack([np.min(X, axis=0), np.max(X, axis=0)]).T
@@ -431,14 +472,12 @@ def FEM_batch(Xs, id_sim):
n_init = thr_count
n_iter = 0
-
# give error
- if thr_count <= 2:
+ if thr_count <= 2: # noqa: PLR2004
msg = 'Number of samples should be greater than 2.'
errlog.exit(msg)
-
if do_doe:
ac = 1 # pre-screening time = time*ac
ar = 1 # cluster
@@ -450,8 +489,8 @@ def FEM_batch(Xs, id_sim):
else:
ac = 1 # pre-screening time = time*ac
ar = 1 # cluster
- n_candi = 1 # candidate points
- n_integ = 1 # integration points
+ n_candi = 1 # candidate points
+ n_integ = 1 # integration points
user_init = thr_count
#
@@ -462,51 +501,55 @@ def FEM_batch(Xs, id_sim):
#
# SimCenter workflow setting
#
- if os.path.exists('{}/workdir.1'.format(work_dir)):
+ if os.path.exists(f'{work_dir}/workdir.1'): # noqa: PTH110
is_left = True
idx = 0
def change_permissions_recursive(path, mode):
- for root, dirs, files in os.walk(path, topdown=False):
- for dir in [os.path.join(root, d) for d in dirs]:
- os.chmod(dir, mode)
- for file in [os.path.join(root, f) for f in files]:
- os.chmod(file, mode)
+ for root, dirs, files in os.walk(path, topdown=False): # noqa: B007
+ for dir in [os.path.join(root, d) for d in dirs]: # noqa: A001, PTH118
+ os.chmod(dir, mode) # noqa: PTH101
+ for file in [os.path.join(root, f) for f in files]: # noqa: PTH118
+ os.chmod(file, mode) # noqa: PTH101
while is_left:
- idx = idx + 1
+ idx = idx + 1 # noqa: PLR6104
try:
- if os.path.exists('{}/workdir.{}/{}'.format(work_dir, idx, workflowDriver)):
- #os.chmod('{}/workdir.{}'.format(work_dir, idx), 777)
- change_permissions_recursive('{}/workdir.{}'.format(work_dir, idx), 0o777)
- my_dir = '{}/workdir.{}'.format(work_dir, idx)
- os.chmod(my_dir, 0o777)
+ if os.path.exists( # noqa: PTH110
+ f'{work_dir}/workdir.{idx}/{workflowDriver}'
+ ):
+ # os.chmod('{}/workdir.{}'.format(work_dir, idx), 777)
+ change_permissions_recursive(
+ f'{work_dir}/workdir.{idx}', 0o777
+ )
+ my_dir = f'{work_dir}/workdir.{idx}'
+ os.chmod(my_dir, 0o777) # noqa: S103, PTH101
shutil.rmtree(my_dir)
- #shutil.rmtree('{}/workdir.{}'.format(work_dir, idx), ignore_errors=False, onerror=handleRemoveReadonly)
+ # shutil.rmtree('{}/workdir.{}'.format(work_dir, idx), ignore_errors=False, onerror=handleRemoveReadonly)
- except Exception as ex:
- print(ex)
+ except Exception as ex: # noqa: BLE001
+ print(ex) # noqa: T201
is_left = True
break
- print("Cleaned the working directory")
+ print('Cleaned the working directory') # noqa: T201
else:
- print("Work directory is clean")
+ print('Work directory is clean') # noqa: T201
- if os.path.exists('{}/dakotaTab.out'.format(work_dir)):
- os.remove('{}/dakotaTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/dakotaTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/dakotaTab.out') # noqa: PTH107
- if os.path.exists('{}/inputTab.out'.format(work_dir)):
- os.remove('{}/inputTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/inputTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/inputTab.out') # noqa: PTH107
- if os.path.exists('{}/outputTab.out'.format(work_dir)):
- os.remove('{}/outputTab.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/outputTab.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/outputTab.out') # noqa: PTH107
- if os.path.exists('{}/SimGpModel.pkl'.format(work_dir)):
- os.remove('{}/SimGpModel.pkl'.format(work_dir))
+ if os.path.exists(f'{work_dir}/SimGpModel.pkl'): # noqa: PTH110
+ os.remove(f'{work_dir}/SimGpModel.pkl') # noqa: PTH107
- if os.path.exists('{}/verif.out'.format(work_dir)):
- os.remove('{}/verif.out'.format(work_dir))
+ if os.path.exists(f'{work_dir}/verif.out'): # noqa: PTH110
+ os.remove(f'{work_dir}/verif.out') # noqa: PTH107
# func = self.__run_FEM(X,self.id_sim, self.rv_name)
@@ -515,58 +558,58 @@ def change_permissions_recursive(path, mode):
#
t_tmp = time.time()
- X_fem, Y_fem ,self.id_sim= FEM_batch(X[n_ex:, :],self.id_sim)
- Y = np.vstack((Y_tmp,Y_fem))
- X = np.vstack((X[0:n_ex, :],X_fem))
+ X_fem, Y_fem, self.id_sim = FEM_batch(X[n_ex:, :], self.id_sim) # noqa: N806
+ Y = np.vstack((Y_tmp, Y_fem)) # noqa: N806
+ X = np.vstack((X[0:n_ex, :], X_fem)) # noqa: N806
t_sim_all = time.time() - t_tmp
if automate_doe:
self.t_sim_each = t_sim_all / n_init
else:
- self.t_sim_each = float("inf")
+ self.t_sim_each = float('inf')
#
# Generate predictive samples
#
if self.do_predictive:
n_pred = 100
- Xt = np.zeros((n_pred, x_dim))
- U = lhs(x_dim, samples=n_pred)
+ Xt = np.zeros((n_pred, x_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_pred) # noqa: N806
for nx in range(x_dim):
- Xt[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ Xt[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
#
# Yt = np.zeros((n_pred, y_dim))
# for ns in range(n_pred):
# Yt[ns, :],self.id_sim = run_FEM(Xt[ns, :][np.newaxis],self.id_sim, self.rv_name)
- Yt = np.zeros((n_pred, y_dim))
- Xt, Yt ,self.id_sim= FEM_batch(Xt,self.id_sim)
+ Yt = np.zeros((n_pred, y_dim)) # noqa: N806
+ Xt, Yt, self.id_sim = FEM_batch(Xt, self.id_sim) # noqa: N806
else:
-
#
# READ SAMPLES FROM DIRECTORY
#
- Y = read_txt(self.outData,errlog)
+ Y = read_txt(self.outData, errlog) # noqa: N806
if self.do_mf:
if Y.shape[1] != self.Y_hf.shape[1]:
- msg = 'Error importing input data: dimension inconsistent: high fidelity data have {} QoI column(s) but low fidelity model have {}.'.format(
- self.Y_hf.shape[1], Y.shape[1])
+ msg = f'Error importing input data: dimension inconsistent: high fidelity data have {self.Y_hf.shape[1]} QoI column(s) but low fidelity model have {Y.shape[1]}.'
errlog.exit(msg)
if Y.shape[1] != y_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} QoI(s) but {} column(s).' \
- .format(y_dim, Y.shape[1])
+ msg = f'Error importing input data: Number of dimension inconsistent: have {y_dim} QoI(s) but {Y.shape[1]} column(s).'
errlog.exit(msg)
if X.shape[0] != Y.shape[0]:
- msg = 'Error importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent'.format(X.shape[0], Y.shape[0])
+ msg = f'Error importing input data: numbers of samples of inputs ({X.shape[0]}) and outputs ({Y.shape[0]}) are inconsistent'
errlog.exit(msg)
- thr_count = 0
- self.t_sim_each = float("inf")
+ thr_count = 0
+ self.t_sim_each = float('inf')
#
# GP function
#
@@ -581,39 +624,61 @@ def change_permissions_recursive(path, mode):
kr = GPy.kern.Matern52(input_dim=x_dim, ARD=True)
if do_linear:
- kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True)
+ kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True) # noqa: PLR6104
if not self.do_mf:
kg = kr
- self.m_list = list()
+ self.m_list = list() # noqa: C408
for i in range(y_dim):
- self.m_list = self.m_list + [GPy.models.GPRegression(X, Y[:, i][np.newaxis].transpose(), kernel=kg.copy(),normalizer=True)]
+ self.m_list = self.m_list + [ # noqa: PLR6104, RUF005
+ GPy.models.GPRegression(
+ X,
+ Y[:, i][np.newaxis].transpose(),
+ kernel=kg.copy(),
+ normalizer=True,
+ )
+ ]
for parname in self.m_list[i].parameter_names():
if parname.endswith('lengthscale'):
- exec('self.m_list[i].' + parname + '=self.len')
+ exec('self.m_list[i].' + parname + '=self.len') # noqa: S102
else:
kgs = emf.kernels.LinearMultiFidelityKernel([kr.copy(), kr.copy()])
if not self.hf_is_model:
- if not X.shape[1]==self.X_hf.shape[1]:
- msg = 'Error importing input data: dimension of low ({}) and high ({}) fidelity models (datasets) are inconsistent'.format(X.shape[1], self.X_hf.shape[1])
+ if X.shape[1] != self.X_hf.shape[1]:
+ msg = f'Error importing input data: dimension of low ({X.shape[1]}) and high ({self.X_hf.shape[1]}) fidelity models (datasets) are inconsistent'
errlog.exit(msg)
if not self.lf_is_model:
- if not X.shape[1]==self.X_lf.shape[1]:
- msg = 'Error importing input data: dimension of low ({}) and high ({}) fidelity models (datasets) are inconsistent'.format(X.shape[1], self.X_hf.shape[1])
+ if X.shape[1] != self.X_lf.shape[1]:
+ msg = f'Error importing input data: dimension of low ({X.shape[1]}) and high ({self.X_hf.shape[1]}) fidelity models (datasets) are inconsistent'
errlog.exit(msg)
- if self.mf_case == 'data-model' or self.mf_case=='data-data':
- X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, self.X_hf], [Y, self.Y_hf])
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ X_list, Y_list = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, self.X_hf], [Y, self.Y_hf]
+ )
+ )
elif self.mf_case == 'model-data':
- X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X], [self.Y_lf, Y])
-
- self.m_list = list()
- for i in range(y_dim):
- self.m_list = self.m_list + [GPyMultiOutputWrapper(emf.models.GPyLinearMultiFidelityModel(X_list, Y_list, kernel=kgs.copy(), n_fidelities=2), 2, n_optimization_restarts=15)]
-
+ X_list, Y_list = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X], [self.Y_lf, Y]
+ )
+ )
+
+ self.m_list = list() # noqa: C408
+ for i in range(y_dim): # noqa: B007
+ self.m_list = self.m_list + [ # noqa: PLR6104, RUF005
+ GPyMultiOutputWrapper(
+ emf.models.GPyLinearMultiFidelityModel(
+ X_list, Y_list, kernel=kgs.copy(), n_fidelities=2
+ ),
+ 2,
+ n_optimization_restarts=15,
+ )
+ ]
#
# Verification measures
@@ -621,7 +686,7 @@ def change_permissions_recursive(path, mode):
self.NRMSE_hist = np.zeros((1, y_dim), float)
self.NRMSE_idx = np.zeros((1, 1), int)
- #leng_hist = np.zeros((1, self.m_list[0]._param_array_.shape[0]), int)
+ # leng_hist = np.zeros((1, self.m_list[0]._param_array_.shape[0]), int)
if self.do_predictive:
self.NRMSE_pred_hist = np.empty((1, y_dim), float)
@@ -631,61 +696,72 @@ def change_permissions_recursive(path, mode):
break_doe = False
- print("======== RUNNING GP DoE ===========")
+ print('======== RUNNING GP DoE ===========') # noqa: T201
exit_code = 'count' # num iter
i = 0
- x_new = np.zeros((0,x_dim))
+ x_new = np.zeros((0, x_dim))
n_new = 0
- doe_off = False # false if true
+ doe_off = False # false if true
while not doe_off:
-
- t = time.time()
- if self.doe_method == "random":
- do_cal = True
- elif self.doe_method == "pareto":
- do_cal = True
- elif np.mod(i, self.cal_interval) == 0:
+ t = time.time() # noqa: F841
+ if (
+ self.doe_method == 'random' # noqa: PLR1714
+ or self.doe_method == 'pareto'
+ or np.mod(i, self.cal_interval) == 0
+ ):
do_cal = True
else:
do_cal = False
t_tmp = time.time()
- [x_new, self.m_list, err, idx, Y_cv, Y_cv_var] = self.__design_of_experiments(X, Y, ac, ar, n_candi,
- n_integ, self.m_list,
- do_cal, nugget_opt, do_doe)
+ [x_new, self.m_list, err, idx, Y_cv, Y_cv_var] = ( # noqa: F841, N806
+ self.__design_of_experiments(
+ X,
+ Y,
+ ac,
+ ar,
+ n_candi,
+ n_integ,
+ self.m_list,
+ do_cal,
+ nugget_opt,
+ do_doe,
+ )
+ )
t_doe = time.time() - t_tmp
- print('DoE Time: {:.2f} s'.format(t_doe))
+ print(f'DoE Time: {t_doe:.2f} s') # noqa: T201
if automate_doe:
if t_doe > self.t_sim_each:
break_doe = True
- print('========>> DOE OFF')
+ print('========>> DOE OFF') # noqa: T201
n_left = n_iter - i
break
if not self.do_mf:
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y)
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf)
- elif self.mf_case == 'model-data' :
- NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y)
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y) # noqa: N806
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf) # noqa: N806
+ elif self.mf_case == 'model-data':
+ NRMSE_val = self.__normalized_mean_sq_error(Y_cv, Y) # noqa: N806
self.NRMSE_hist = np.vstack((self.NRMSE_hist, np.array(NRMSE_val)))
self.NRMSE_idx = np.vstack((self.NRMSE_idx, i))
if self.do_predictive:
- Yt_pred = np.zeros((n_pred, y_dim))
+ Yt_pred = np.zeros((n_pred, y_dim)) # noqa: N806
for ny in range(y_dim):
- y_pred_tmp, dummy = self.__predict(self.m_list[ny],Xt)
+ y_pred_tmp, dummy = self.__predict(self.m_list[ny], Xt) # noqa: F841
Yt_pred[:, ny] = y_pred_tmp.transpose()
if self.do_logtransform:
- Yt_pred = np.exp(Yt_pred)
- NRMSE_pred_val = self.__normalized_mean_sq_error(Yt_pred, Yt)
- self.NRMSE_pred_hist = np.vstack((self.NRMSE_pred_hist, np.array(NRMSE_pred_val)))
+ Yt_pred = np.exp(Yt_pred) # noqa: N806
+ NRMSE_pred_val = self.__normalized_mean_sq_error(Yt_pred, Yt) # noqa: N806
+ self.NRMSE_pred_hist = np.vstack(
+ (self.NRMSE_pred_hist, np.array(NRMSE_pred_val))
+ )
if self.id_sim >= thr_count:
n_iter = i
@@ -715,7 +791,7 @@ def change_permissions_recursive(path, mode):
break
n_new = x_new.shape[0]
- if not (n_new + self.id_sim < n_init + n_iter +1):
+ if not (n_new + self.id_sim < n_init + n_iter + 1):
n_new = n_init + n_iter - self.id_sim
x_new = x_new[0:n_new, :]
@@ -724,25 +800,27 @@ def change_permissions_recursive(path, mode):
# y_new = np.zeros((n_new, y_dim))
# for ny in range(n_new):
# y_new[ny, :],self.id_sim = run_FEM(x_new[ny, :][np.newaxis],self.id_sim, self.rv_name)
- x_new, y_new, self.id_sim = FEM_batch(x_new,self.id_sim)
-
- #print(">> {:.2f} s".format(time.time() - t_init))
- X = np.vstack([X, x_new])
- Y = np.vstack([Y, y_new])
+ x_new, y_new, self.id_sim = FEM_batch(x_new, self.id_sim)
+ # print(">> {:.2f} s".format(time.time() - t_init))
+ X = np.vstack([X, x_new]) # noqa: N806
+ Y = np.vstack([Y, y_new]) # noqa: N806
- print("======== RUNNING GP Calibration ===========")
+ print('======== RUNNING GP Calibration ===========') # noqa: T201
# not used
if break_doe:
- X_tmp = np.zeros((n_left, x_dim))
- Y_tmp = np.zeros((n_left, y_dim))
- U = lhs(x_dim, samples=n_left)
+ X_tmp = np.zeros((n_left, x_dim)) # noqa: N806
+ Y_tmp = np.zeros((n_left, y_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_left) # noqa: N806
for nx in range(x_dim):
# X[:,nx] = np.random.uniform(xrange[nx,0], xrange[nx,1], (1, n_init))
- X_tmp[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ X_tmp[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
- X_tmp, Y_tmp, self.id_sim = FEM_batch(X_tmp,self.id_sim)
+ X_tmp, Y_tmp, self.id_sim = FEM_batch(X_tmp, self.id_sim) # noqa: N806
# for ns in np.arange(n_left):
# Y_tmp[ns, :],self.id_sim = run_FEM(X_tmp[ns, :][np.newaxis],self.id_sim, self.rv_name)
@@ -752,8 +830,8 @@ def change_permissions_recursive(path, mode):
# Y_tmp = Y_tmp[:ns, :]
# break
- X = np.vstack((X, X_tmp))
- Y = np.vstack((Y, Y_tmp))
+ X = np.vstack((X, X_tmp)) # noqa: N806
+ Y = np.vstack((Y, Y_tmp)) # noqa: N806
do_doe = False
# if not do_doe:
@@ -768,8 +846,6 @@ def change_permissions_recursive(path, mode):
# else:
# NRMSE_val = self.__normalized_mean_sq_error(Y_cv, self.Y_hf)
-
-
sim_time = time.time() - t_init
n_samp = Y.shape[0]
@@ -810,18 +886,21 @@ def change_permissions_recursive(path, mode):
# plt.show()
# plt.plot(Y_cv[:,1], Y[:,1], 'x')
# plt.show()
- print('my exit code = {}'.format(exit_code))
- print('1. count = {}'.format(self.id_sim))
- print('2. max(NRMSE) = {}'.format(np.max(NRMSE_val)))
- print('3. time = {:.2f} s'.format(sim_time))
+ print(f'my exit code = {exit_code}') # noqa: T201
+ print(f'1. count = {self.id_sim}') # noqa: T201
+ print(f'2. max(NRMSE) = {np.max(NRMSE_val)}') # noqa: T201
+ print(f'3. time = {sim_time:.2f} s') # noqa: T201
# for user information
if do_simulation:
n_err = 1000
- Xerr = np.zeros((n_err, x_dim))
- U = lhs(x_dim, samples=n_err)
+ Xerr = np.zeros((n_err, x_dim)) # noqa: N806
+ U = lhs(x_dim, samples=n_err) # noqa: N806
for nx in range(x_dim):
- Xerr[:, nx] = U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0]) + self.xrange[nx, 0]
+ Xerr[:, nx] = (
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
+ )
y_pred_var = np.zeros((n_err, y_dim))
y_data_var = np.zeros((n_err, y_dim))
@@ -830,50 +909,59 @@ def change_permissions_recursive(path, mode):
# m_tmp = self.m_list[ny].copy()
m_tmp = self.m_list[ny]
if self.do_logtransform:
- #y_var_val = np.var(np.log(Y[:, ny]))
+ # y_var_val = np.var(np.log(Y[:, ny]))
log_mean = np.mean(np.log(Y[:, ny]))
log_var = np.var(np.log(Y[:, ny]))
- y_var_val = np.exp(2*log_mean+log_var)*(np.exp(log_var)-1) # in linear space
+ y_var_val = np.exp(2 * log_mean + log_var) * (
+ np.exp(log_var) - 1
+ ) # in linear space
else:
y_var_val = np.var(Y[:, ny])
for ns in range(n_err):
- y_pred_tmp, y_pred_var_tmp = self.__predict(m_tmp,Xerr[ns, :][np.newaxis])
+ y_pred_tmp, y_pred_var_tmp = self.__predict(
+ m_tmp, Xerr[ns, :][np.newaxis]
+ )
if self.do_logtransform:
- y_pred_var[ns, ny] = np.exp(2 * y_pred_tmp + y_pred_var_tmp) * (np.exp(y_pred_var_tmp) - 1)
+ y_pred_var[ns, ny] = np.exp(
+ 2 * y_pred_tmp + y_pred_var_tmp
+ ) * (np.exp(y_pred_var_tmp) - 1)
else:
y_pred_var[ns, ny] = y_pred_var_tmp
y_data_var[ns, ny] = y_var_val
- #for parname in m_tmp.parameter_names():
+ # for parname in m_tmp.parameter_names():
# if ('Mat52' in parname) and parname.endswith('variance'):
# exec('y_pred_prior_var[ns,ny]=m_tmp.' + parname)
- #error_ratio1_Pr = (y_pred_var / y_pred_prior_var)
- error_ratio2_Pr = (y_pred_var / y_data_var)
- #np.max(error_ratio1_Pr, axis=0)
+ # error_ratio1_Pr = (y_pred_var / y_pred_prior_var)
+ error_ratio2_Pr = y_pred_var / y_data_var # noqa: N806
+ # np.max(error_ratio1_Pr, axis=0)
np.max(error_ratio2_Pr, axis=0)
- self.perc_thr = np.hstack([np.array([1]), np.arange(10, 1000, 50), np.array([999])])
+ self.perc_thr = np.hstack(
+ [np.array([1]), np.arange(10, 1000, 50), np.array([999])]
+ )
error_sorted = np.sort(np.max(error_ratio2_Pr, axis=1), axis=0)
self.perc_val = error_sorted[self.perc_thr] # criteria
self.perc_thr = 1 - (self.perc_thr) * 0.001 # ratio=simulation/sampling
corr_val = np.zeros((y_dim,))
- R2_val = np.zeros((y_dim,))
+ R2_val = np.zeros((y_dim,)) # noqa: N806
for ny in range(y_dim):
if not self.do_mf:
- Y_ex = Y[:, ny]
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- Y_ex = self.Y_hf[:, ny]
- elif self.mf_case == 'model-data':
- Y_ex = Y[:, ny]
+ Y_ex = Y[:, ny] # noqa: N806
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ Y_ex = self.Y_hf[:, ny] # noqa: N806
+ elif self.mf_case == 'model-data':
+ Y_ex = Y[:, ny] # noqa: N806
corr_val[ny] = np.corrcoef(Y_ex, Y_cv[:, ny])[0, 1]
- R2_val[ny] = 1 - np.sum(pow(Y_cv[:, ny] - Y_ex, 2)) / np.sum(pow(Y_cv[:, ny] - np.mean(Y_cv[:, ny]), 2))
- if np.var(Y_ex)==0:
+ R2_val[ny] = 1 - np.sum(pow(Y_cv[:, ny] - Y_ex, 2)) / np.sum(
+ pow(Y_cv[:, ny] - np.mean(Y_cv[:, ny]), 2)
+ )
+ if np.var(Y_ex) == 0:
corr_val[ny] = 1
R2_val[ny] = 0
@@ -908,24 +996,24 @@ def change_permissions_recursive(path, mode):
self.rvDist = []
self.rvVal = []
for nx in range(x_dim):
- rvInfo = inp["randomVariables"][nx]
- self.rvName = self.rvName + [rvInfo["name"]]
- self.rvDist = self.rvDist + [rvInfo["distribution"]]
+ rvInfo = inp['randomVariables'][nx] # noqa: N806
+ self.rvName = self.rvName + [rvInfo['name']] # noqa: PLR6104, RUF005
+ self.rvDist = self.rvDist + [rvInfo['distribution']] # noqa: PLR6104, RUF005
if do_sampling:
- self.rvVal = self.rvVal + [(rvInfo["upperbound"] + rvInfo["lowerbound"]) / 2]
+ self.rvVal = self.rvVal + [ # noqa: PLR6104, RUF005
+ (rvInfo['upperbound'] + rvInfo['lowerbound']) / 2
+ ]
else:
- self.rvVal = self.rvVal + [np.mean(X[:, nx])]
-
- def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
+ self.rvVal = self.rvVal + [np.mean(X[:, nx])] # noqa: PLR6104, RUF005
- warnings.filterwarnings("ignore")
+ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt): # noqa: ARG002, C901
+ warnings.filterwarnings('ignore')
t_opt = time.time()
- m_list = list()
-
- for ny in range(self.y_dim):
+ m_list = list() # noqa: C408
- print("y dimension {}:".format(ny))
+ for ny in range(self.y_dim): # noqa: PLR1702
+ print(f'y dimension {ny}:') # noqa: T201
nopt = 10
#
@@ -935,7 +1023,7 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
if not self.do_mf:
if np.var(m_tmp_list[ny].Y) == 0:
- nugget_opt_tmp = "Zero"
+ nugget_opt_tmp = 'Zero'
for parname in m_tmp_list[ny].parameter_names():
if parname.endswith('variance'):
m_tmp_list[ny][parname].constrain_fixed(0)
@@ -943,13 +1031,17 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m_init = m_tmp_list[ny]
m_tmp = m_init
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
m_tmp.optimize(clear_after_finish=True)
@@ -960,8 +1052,8 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m = m_tmp.copy()
id_opt = 1
- print('{} among {} Log-Likelihood: {}'.format(1, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
+ print(f'{1} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}') # noqa: T201
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
if time.time() - t_unfix > self.t_sim_each:
nopt = 1
@@ -972,15 +1064,19 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
for parname in m_tmp.parameter_names():
if parname.endswith('lengthscale'):
- exec('m_tmp.' + parname + '=self.len')
+ exec('m_tmp.' + parname + '=self.len') # noqa: S102
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
m_tmp.optimize(clear_after_finish=True)
@@ -992,8 +1088,8 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
m = m_tmp.copy()
id_opt = 1
- print('{} among {} Log-Likelihood: {}'.format(2, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
+ print(f'{2} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}') # noqa: T201
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_unfix))
if time.time() - t_unfix > self.t_sim_each:
nopt = 1
@@ -1004,29 +1100,45 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
for parname in m_tmp.parameter_names():
if parname.endswith('lengthscale'):
if math.isnan(m.log_likelihood()):
- exec('m_tmp.' + parname + '=np.random.exponential(1, (1, x_dim)) * m_init.' + parname)
+ exec( # noqa: S102
+ 'm_tmp.'
+ + parname
+ + '=np.random.exponential(1, (1, x_dim)) * m_init.'
+ + parname
+ )
else:
- exec('m_tmp.' + parname + '=np.random.exponential(1, (1, x_dim)) * m.' + parname)
-
- if nugget_opt_tmp == "Optimize":
+ exec( # noqa: S102
+ 'm_tmp.'
+ + parname
+ + '=np.random.exponential(1, (1, x_dim)) * m.'
+ + parname
+ )
+
+ if nugget_opt_tmp == 'Optimize':
m_tmp['Gaussian_noise.variance'].unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp['Gaussian_noise.variance'].constrain_fixed(self.nuggetVal[ny])
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp['Gaussian_noise.variance'].constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- elif nugget_opt_tmp == "Zero":
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp['Gaussian_noise.variance'].constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp['Gaussian_noise.variance'].constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ elif nugget_opt_tmp == 'Zero':
m_tmp['Gaussian_noise.variance'].constrain_fixed(0)
- t_fix = time.time()
+ t_fix = time.time() # noqa: F841
try:
m_tmp.optimize()
# m_tmp.optimize_restarts(5)
- except Exception as ex:
- print("OS error: {0}".format(ex))
+ except Exception as ex: # noqa: BLE001
+ print(f'OS error: {ex}') # noqa: T201
- print('{} among {} Log-Likelihood: {}'.format(no + 3, nopt, m_tmp.log_likelihood()))
- #print(' Calibration time for each: {:.2f} s'.format(time.time() - t_fix))
+ print( # noqa: T201
+ f'{no + 3} among {nopt} Log-Likelihood: {m_tmp.log_likelihood()}'
+ )
+ # print(' Calibration time for each: {:.2f} s'.format(time.time() - t_fix))
if m_tmp.log_likelihood() > max_log_likli:
max_log_likli = m_tmp.log_likelihood()
@@ -1038,31 +1150,49 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
break
if math.isinf(-max_log_likli) or math.isnan(-max_log_likli):
- #msg = "Error GP optimization failed, perhaps QoI values are zero."
+ # msg = "Error GP optimization failed, perhaps QoI values are zero."
if np.var(m_tmp.Y) != 0:
- msg = "Error GP optimization failed for QoI #{}".format(ny+1)
+ msg = f'Error GP optimization failed for QoI #{ny + 1}'
self.errlog.exit(msg)
- m_list = m_list + [m]
- print(m)
+ m_list = m_list + [m] # noqa: PLR6104, RUF005
+ print(m) # noqa: T201
else:
-
-
- if nugget_opt_tmp == "Optimize":
+ if nugget_opt_tmp == 'Optimize':
m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.unfix()
m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.unfix()
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(self.nuggetVal[ny])
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(self.nuggetVal[ny])
-
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_bounded(self.nuggetVal[ny][0], self.nuggetVal[ny][1])
-
- elif nugget_opt_tmp == "Zero":
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(0)
- m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(0)
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(
+ self.nuggetVal[ny]
+ )
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(
+ self.nuggetVal[ny]
+ )
+
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_bounded(
+ self.nuggetVal[ny][0], self.nuggetVal[ny][1]
+ )
+
+ elif nugget_opt_tmp == 'Zero':
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(0)
+ m_tmp_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(0)
#
# if not do_nugget:
# m_tmp_list[ny].gpy_model.mixed_noise.Gaussian_noise.fix(0)
@@ -1073,24 +1203,33 @@ def __parameter_calibration(self, m_tmp_list, x_dim, nugget_opt):
id_opt = 0
self.calib_time = (time.time() - t_opt) * round(10 / nopt)
- print(' Calibration time: {:.2f} s, id_opt={}'.format(self.calib_time, id_opt))
+ print(f' Calibration time: {self.calib_time:.2f} s, id_opt={id_opt}') # noqa: T201
return m_tmp_list
- def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do_cal, nugget_opt, do_doe):
-
+ def __design_of_experiments( # noqa: C901, PLR0914, PLR0915
+ self,
+ X, # noqa: N803
+ Y, # noqa: N803
+ ac,
+ ar, # noqa: ARG002
+ n_candi,
+ n_integ,
+ pre_m_list,
+ do_cal,
+ nugget_opt,
+ do_doe,
+ ):
# do log transform
if self.do_logtransform:
-
- if np.min(Y)<0:
+ if np.min(Y) < 0:
msg = 'Error running SimCenterUQ. Response contains negative values. Please uncheck the log-transform option in the UQ tab'
errlog.exit(msg)
- Y = np.log(Y)
+ Y = np.log(Y) # noqa: N806
if self.do_mf:
-
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- if np.min(self.Y_hf)<0:
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ if np.min(self.Y_hf) < 0:
msg = 'Error running SimCenterUQ. Response contains negative values. Please uncheck the log-transform option in the UQ tab'
errlog.exit(msg)
self.Y_hf = np.log(self.Y_hf)
@@ -1111,15 +1250,28 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
if not self.do_mf:
m_tmp_list[i].set_XY(X, Y[:, i][np.newaxis].transpose())
else:
-
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, self.X_hf],
- [Y[:, i][np.newaxis].transpose(), self.Y_hf[:, i][np.newaxis].transpose()])
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, self.X_hf],
+ [
+ Y[:, i][np.newaxis].transpose(),
+ self.Y_hf[:, i][np.newaxis].transpose(),
+ ],
+ )
+ )
elif self.mf_case == 'model-data':
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X],
- [self.Y_lf[:, i][np.newaxis].transpose(),Y[:, i][np.newaxis].transpose()])
-
- m_tmp_list[i].set_data(X=X_list_tmp,Y=Y_list_tmp)
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X],
+ [
+ self.Y_lf[:, i][np.newaxis].transpose(),
+ Y[:, i][np.newaxis].transpose(),
+ ],
+ )
+ )
+
+ m_tmp_list[i].set_data(X=X_list_tmp, Y=Y_list_tmp)
if do_cal:
m_list = self.__parameter_calibration(m_tmp_list, x_dim, nugget_opt)
@@ -1129,23 +1281,21 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
#
# cross validation errors
#
- Y_pred, Y_pred_var, e2 = self.__get_cross_validation(X,Y,m_list)
-
+ Y_pred, Y_pred_var, e2 = self.__get_cross_validation(X, Y, m_list) # noqa: N806
if self.do_logtransform:
-
mu = Y_pred
sig2 = Y_pred_var
median = np.exp(mu)
- mean = np.exp(mu + sig2/2)
- var = np.exp(2*mu + sig2)*(np.exp(sig2)-1)
+ mean = np.exp(mu + sig2 / 2) # noqa: F841
+ var = np.exp(2 * mu + sig2) * (np.exp(sig2) - 1)
- Y_pred = median
- Y_pred_var = var
+ Y_pred = median # noqa: N806
+ Y_pred_var = var # noqa: N806
if self.do_mf:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
self.Y_hf = np.exp(self.Y_hf)
elif self.mf_case == 'model-data':
self.Y_lf = np.exp(self.Y_lf)
@@ -1168,91 +1318,101 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
nc1 = round(n_candi)
self.doe_method = self.doe_method.lower()
- if self.doe_method == "pareto":
-
+ if self.doe_method == 'pareto':
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
nq = round(n_integ)
xq = np.zeros((nq, x_dim))
for nx in range(x_dim):
- xq[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nq))
+ xq[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nq)
+ )
#
# Lets Do Pareto
#
yc1_pred, yc1_var = self.__predict(m_idx, xc1) # use only variance
- score1 = np.zeros(yc1_pred.shape)
+ score1 = np.zeros(yc1_pred.shape) # noqa: F841
cri1 = np.zeros(yc1_pred.shape)
cri2 = np.zeros(yc1_pred.shape)
- # TODO: is this the best?
+ # TODO: is this the best? # noqa: TD002
ll = self.xrange[:, 1] - self.xrange[:, 0]
for i in range(nc1):
if not self.do_mf:
wei = self.weights_node2(xc1[i, :], X, ll)
- #phi = e2[closest_node(xc1[i, :], X, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], X)]
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- wei = self.weights_node2(xc1[i, :], self.X_hf, ll)
- #phi = e2[closest_node(xc1[i, :], self.X_hf, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], self.X_hf)]
- elif self.mf_case == 'model-data':
- wei = self.weights_node2(xc1[i, :], X, ll)
- #phi = e2[closest_node(xc1[i, :], X, ll)]
- #phi = e2[self.__closest_node(xc1[i, :], X)]
-
- #cri1[i] = yc1_var[i]
+ # phi = e2[closest_node(xc1[i, :], X, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], X)]
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ wei = self.weights_node2(xc1[i, :], self.X_hf, ll)
+ # phi = e2[closest_node(xc1[i, :], self.X_hf, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], self.X_hf)]
+ elif self.mf_case == 'model-data':
+ wei = self.weights_node2(xc1[i, :], X, ll)
+ # phi = e2[closest_node(xc1[i, :], X, ll)]
+ # phi = e2[self.__closest_node(xc1[i, :], X)]
+
+ # cri1[i] = yc1_var[i]
cri2[i] = sum(e2[:, y_idx] / Y_pred_var[:, y_idx] * wei.T)
- #cri2[i] = pow(phi[y_idx],r)
+ # cri2[i] = pow(phi[y_idx],r)
- VOI = np.zeros(yc1_pred.shape)
+ VOI = np.zeros(yc1_pred.shape) # noqa: N806
for i in range(nc1):
- pdfvals = m_idx.kern.K(np.array([xq[i]]), xq)**2/m_idx.kern.K(np.array([xq[0]]))**2
- VOI[i] = np.mean(pdfvals)*np.prod(np.diff(self.xrange,axis=1)) # * np.prod(np.diff(self.xrange))
+ pdfvals = (
+ m_idx.kern.K(np.array([xq[i]]), xq) ** 2
+ / m_idx.kern.K(np.array([xq[0]])) ** 2
+ )
+ VOI[i] = np.mean(pdfvals) * np.prod(
+ np.diff(self.xrange, axis=1)
+ ) # * np.prod(np.diff(self.xrange))
cri1[i] = yc1_var[i] * VOI[i]
- cri1 = (cri1-np.min(cri1))/(np.max(cri1)-np.min(cri1))
- cri2 = (cri2-np.min(cri2))/(np.max(cri2)-np.min(cri2))
+ cri1 = (cri1 - np.min(cri1)) / (np.max(cri1) - np.min(cri1))
+ cri2 = (cri2 - np.min(cri2)) / (np.max(cri2) - np.min(cri2))
logcrimi1 = np.log(cri1[:, 0])
logcrimi2 = np.log(cri2[:, 0])
-
- idx_pareto_front = list()
+ idx_pareto_front = list() # noqa: C408, F841
rankid = np.zeros(nc1)
- varRank = np.zeros(nc1)
- biasRank = np.zeros(nc1)
- for id in range(nc1):
- idx_tmp = np.argwhere((logcrimi1 >= logcrimi1[id]) * (logcrimi2 >= logcrimi2[id]))
- varRank[id] = np.sum((logcrimi1 >= logcrimi1[id]))
- biasRank[id] = np.sum((logcrimi2 >= logcrimi2[id]))
+ varRank = np.zeros(nc1) # noqa: N806
+ biasRank = np.zeros(nc1) # noqa: N806
+ for id in range(nc1): # noqa: A001
+ idx_tmp = np.argwhere(
+ (logcrimi1 >= logcrimi1[id]) * (logcrimi2 >= logcrimi2[id])
+ )
+ varRank[id] = np.sum(logcrimi1 >= logcrimi1[id])
+ biasRank[id] = np.sum(logcrimi2 >= logcrimi2[id])
rankid[id] = idx_tmp.size
- idx_rank = np.argsort(rankid)
- sort_rank = np.sort(rankid)
- num_1rank = np.sum(rankid==1)
- idx_1rank = list((np.argwhere(rankid==1)).flatten())
- npareto = 4
+ idx_rank = np.argsort(rankid) # noqa: F841
+ sort_rank = np.sort(rankid) # noqa: F841
+ num_1rank = np.sum(rankid == 1)
+ idx_1rank = list((np.argwhere(rankid == 1)).flatten())
+ npareto = 4 # noqa: F841
if num_1rank < self.cal_interval:
prob = np.ones((nc1,))
- prob[list(rankid==1)]=0
- prob=prob/sum(prob)
- idx_pareto = idx_1rank + list(np.random.choice(nc1, self.cal_interval-num_1rank, p=prob))
+ prob[list(rankid == 1)] = 0
+ prob = prob / sum(prob) # noqa: PLR6104
+ idx_pareto = idx_1rank + list(
+ np.random.choice(nc1, self.cal_interval - num_1rank, p=prob)
+ )
else:
idx_pareto_candi = idx_1rank.copy()
- X_tmp = X
- Y_tmp = Y[:,y_idx][np.newaxis].T
+ X_tmp = X # noqa: N806
+ Y_tmp = Y[:, y_idx][np.newaxis].T # noqa: N806
m_tmp = m_idx.copy()
# get MMSEw
- score = np.squeeze(cri1*cri2)
+ score = np.squeeze(cri1 * cri2)
score_candi = score[idx_pareto_candi]
best_local = np.argsort(-score_candi)[0]
best_global = idx_1rank[best_local]
@@ -1260,37 +1420,40 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
idx_pareto_new = [best_global]
del idx_pareto_candi[best_local]
- for i in range(self.cal_interval-1):
- X_tmp = np.vstack([X_tmp, xc1[best_global, :][np.newaxis]])
- Y_tmp = np.vstack([Y_tmp, np.array([[0]]) ]) # any variables
+ for i in range(self.cal_interval - 1): # noqa: B007
+ X_tmp = np.vstack([X_tmp, xc1[best_global, :][np.newaxis]]) # noqa: N806
+ # any variables
+ Y_tmp = np.vstack([Y_tmp, np.array([[0]])]) # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp)
- dummy, Yq_var = m_tmp.predict(xc1[idx_pareto_candi, :])
+ dummy, Yq_var = m_tmp.predict(xc1[idx_pareto_candi, :]) # noqa: N806
cri1 = Yq_var * VOI[idx_pareto_candi]
cri1 = (cri1 - np.min(cri1)) / (np.max(cri1) - np.min(cri1))
- score_tmp = cri1 * cri2[idx_pareto_candi] # only update the variance
+ score_tmp = (
+ cri1 * cri2[idx_pareto_candi]
+ ) # only update the variance
best_local = np.argsort(-np.squeeze(score_tmp))[0]
best_global = idx_pareto_candi[best_local]
- idx_pareto_new = idx_pareto_new + [best_global]
+ idx_pareto_new = idx_pareto_new + [best_global] # noqa: PLR6104, RUF005
del idx_pareto_candi[best_local]
- #score_tmp = Yq_var * cri2[idx_pareto_left]/Y_pred_var[closest_node(xc1[i, :], X, self.m_list, self.xrange)]
+ # score_tmp = Yq_var * cri2[idx_pareto_left]/Y_pred_var[closest_node(xc1[i, :], X, self.m_list, self.xrange)]
- #idx_pareto = list(idx_rank[0:self.cal_interval])
+ # idx_pareto = list(idx_rank[0:self.cal_interval])
idx_pareto = idx_pareto_new
update_point = xc1[idx_pareto, :]
- update_IMSE = 0
-
+ update_IMSE = 0 # noqa: N806
+
# import matplotlib.pyplot as plt
# plt.plot(logcrimi1, logcrimi2, 'x');plt.plot(logcrimi1[idx_pareto], logcrimi2[idx_pareto], 'x'); plt.show()
# plt.plot(m_idx.X[:,0], m_idx.X[:,1], 'x'); plt.show()
- # plt.plot(X[:, 0],X[:, 1], 'ro');
+ # plt.plot(X[:, 0],X[:, 1], 'ro');
# plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(xc1[rankid==0,0], xc1[rankid==0,1], 'rx'); plt.show()
- # plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(update_point[:,0], update_point[:,1], 'rx'); plt.show()
+ # plt.scatter(xc1[:,0], xc1[:,1], c=cri2); plt.plot(update_point[:,0], update_point[:,1], 'rx'); plt.show()
# plt.scatter(xc1[:, 0], xc1[:, 1], c=cri2); plt.show()
#
- '''
+ """
idx_pareto = list()
for id in range(nc1):
idx_tmp = np.argwhere(logcrimi2 >= logcrimi2[id])
@@ -1305,73 +1468,85 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
idx_pareto2 = np.asarray(random_indices)
idx_pareto = np.asarray(idx_pareto)
idx_pareto = list(idx_pareto[idx_pareto2[0:self.cal_interval]])
- '''
-
- elif self.doe_method == "imsew":
-
+ """ # noqa: W293
+ elif self.doe_method == 'imsew':
nq = round(n_integ)
m_stack = m_idx.copy()
- X_stack = X
- Y_stack = Y
+ X_stack = X # noqa: N806
+ Y_stack = Y # noqa: N806
- update_point = np.zeros((self.cal_interval,self.x_dim))
- update_IMSE = np.zeros((self.cal_interval,1))
+ update_point = np.zeros((self.cal_interval, self.x_dim))
+ update_IMSE = np.zeros((self.cal_interval, 1)) # noqa: N806
#
# Initial candidates
#
for ni in range(self.cal_interval):
-
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
xq = np.zeros((nq, x_dim))
for nx in range(x_dim):
- xq[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nq))
+ xq[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nq)
+ )
- #TODO: is diff(xrange) the best?
+ # TODO: is diff(xrange) the best? # noqa: TD002
ll = self.xrange[:, 1] - self.xrange[:, 0]
phiq = np.zeros((nq, y_dim))
for i in range(nq):
- phiq[i,:] = e2[closest_node(xq[i, :], X, ll)]
+ phiq[i, :] = e2[closest_node(xq[i, :], X, ll)]
phiqr = pow(phiq[:, y_idx], r)
if self.do_parallel:
tmp = time.time()
- iterables = ((m_stack.copy(), xc1[i,:][np.newaxis], xq, phiqr, i) for i in range(nc1))
+ iterables = (
+ (m_stack.copy(), xc1[i, :][np.newaxis], xq, phiqr, i)
+ for i in range(nc1)
+ )
result_objs = list(self.pool.starmap(imse, iterables))
- IMSEc1 = np.zeros(nc1)
- for IMSE_val, idx in result_objs:
+ IMSEc1 = np.zeros(nc1) # noqa: N806
+ for IMSE_val, idx in result_objs: # noqa: N806
IMSEc1[idx] = IMSE_val
- print("IMSE: finding the next DOE {} in a parallel way.. time = {}".format(ni,time.time() -tmp)) # 7s # 3-4s
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} in a parallel way.. time = {time.time() - tmp}'
+ ) # 7s # 3-4s
else:
tmp = time.time()
phiqr = pow(phiq[:, y_idx], r)
- IMSEc1 = np.zeros(nc1)
+ IMSEc1 = np.zeros(nc1) # noqa: N806
for i in range(nc1):
- IMSEc1[i], dummy = imse(m_stack.copy(), xc1[i,:][np.newaxis], xq, phiqr, i)
- print("IMSE: finding the next DOE {} in a serial way.. time = {}".format(ni,time.time() -tmp)) # 4s
+ IMSEc1[i], dummy = imse( # noqa: F841
+ m_stack.copy(), xc1[i, :][np.newaxis], xq, phiqr, i
+ )
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} in a serial way.. time = {time.time() - tmp}'
+ ) # 4s
new_idx = np.argmin(IMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.zeros((Y_stack.shape[0] + 1, Y.shape[1])) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ Y_stack = np.zeros( # noqa: N806
+ (Y_stack.shape[0] + 1, Y.shape[1])
+ ) # any variables
m_stack.set_XY(X=X_stack, Y=Y_stack)
update_point[ni, :] = x_point
update_IMSE[ni, :] = IMSEc1[new_idx]
# import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.show()
- # import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.plot(update_point[:,0],update_point[:,1],'x'); plt.show()
+ # import matplotlib.pyplot as plt; plt.scatter(xc1[:,0],xc1[:,1],c = IMSEc1); plt.plot(update_point[:,0],update_point[:,1],'x'); plt.show()
# import matplotlib.pyplot as plt; plt.scatter(X_stack[:,0],X_stack[:,1]); plt.show()
- '''
+ """
nc1 = round(n_candi)
xc1 = np.zeros((nc1, x_dim))
@@ -1452,29 +1627,31 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
update_point = xc3[new_idx, :][np.newaxis]
update_IMSE = IMSE[new_idx]
- '''
-
- elif self.doe_method == "random":
+ """ # noqa: W293
- update_point = xc1[0:self.cal_interval, :]
- update_IMSE = 0
+ elif self.doe_method == 'random':
+ update_point = xc1[0 : self.cal_interval, :]
+ update_IMSE = 0 # noqa: N806
- elif self.doe_method == "mmse":
- sort_idx_score1 = np.argsort(-cri1.T) # (-) sign to make it descending order
+ elif self.doe_method == 'mmse':
+ sort_idx_score1 = np.argsort(
+ -cri1.T
+ ) # (-) sign to make it descending order
nc2 = round(nc1 * ac)
xc2 = xc1[sort_idx_score1[0, 0:nc2], :]
update_point = xc2[0:1, :]
- update_IMSE = 0
-
- elif self.doe_method == "mmsew":
+ update_IMSE = 0 # noqa: N806
+ elif self.doe_method == 'mmsew':
#
# Initial candidates
#
xc1 = np.zeros((nc1, x_dim))
for nx in range(x_dim):
- xc1[:, nx] = np.random.uniform(self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)) # LHS
+ xc1[:, nx] = np.random.uniform(
+ self.xrange[nx, 0], self.xrange[nx, 1], (1, nc1)
+ ) # LHS
m_stack = m_idx.copy()
ll = self.xrange[:, 1] - self.xrange[:, 0]
@@ -1484,38 +1661,43 @@ def __design_of_experiments(self, X, Y, ac, ar, n_candi, n_integ, pre_m_list, do
phicr = pow(phic[:, y_idx], r)
- X_stack = X
- Y_stack = Y
+ X_stack = X # noqa: N806
+ Y_stack = Y # noqa: N806
- update_point = np.zeros((self.cal_interval,self.x_dim))
- update_IMSE = np.zeros((self.cal_interval,1))
+ update_point = np.zeros((self.cal_interval, self.x_dim))
+ update_IMSE = np.zeros((self.cal_interval, 1)) # noqa: N806
for ni in range(self.cal_interval):
yc1_pred, yc1_var = m_stack.predict(xc1) # use only variance
- MMSEc1 = yc1_var.flatten() * phicr.flatten()
-
+ MMSEc1 = yc1_var.flatten() * phicr.flatten() # noqa: N806
new_idx = np.argmax(MMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.zeros((Y_stack.shape[0] + 1, Y.shape[1])) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ Y_stack = np.zeros( # noqa: N806
+ (Y_stack.shape[0] + 1, Y.shape[1])
+ ) # any variables
m_stack.set_XY(X=X_stack, Y=Y_stack)
update_point[ni, :] = x_point
update_IMSE[ni, :] = MMSEc1[new_idx]
else:
- msg = 'Error running SimCenterUQ: cannot identify the doe method <' + self.doe_method + '>'
+ msg = (
+ 'Error running SimCenterUQ: cannot identify the doe method <'
+ + self.doe_method
+ + '>'
+ )
errlog.exit(msg)
return update_point, m_list, update_IMSE, y_idx, Y_pred, Y_pred_var
- def __normalized_mean_sq_error(self, yp, ye):
+ def __normalized_mean_sq_error(self, yp, ye): # noqa: PLR6301
nt = yp.shape[0]
- data_bound = (np.max(ye, axis=0) - np.min(ye, axis=0))
- RMSE = np.sqrt(1 / nt * np.sum(pow(yp - ye, 2), axis=0))
- NRMSE =RMSE/data_bound
- NRMSE[np.argwhere((data_bound ==0))]=0
+ data_bound = np.max(ye, axis=0) - np.min(ye, axis=0)
+ RMSE = np.sqrt(1 / nt * np.sum(pow(yp - ye, 2), axis=0)) # noqa: N806
+ NRMSE = RMSE / data_bound # noqa: N806
+ NRMSE[np.argwhere(data_bound == 0)] = 0
return NRMSE
def __closest_node(self, node, nodes):
@@ -1524,216 +1706,294 @@ def __closest_node(self, node, nodes):
deltas_norm = np.zeros(deltas.shape)
for nx in range(self.x_dim):
- deltas_norm[:, nx] = (deltas[:, nx]) / (self.xrange[nx, 1] - self.xrange[nx, 0]) # additional weights?
+ deltas_norm[:, nx] = (deltas[:, nx]) / (
+ self.xrange[nx, 1] - self.xrange[nx, 0]
+ ) # additional weights?
# np.argmin(np.sum(pow(deltas_norm,2),axis=1))
dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm)
return np.argmin(dist_2)
- def __from_XY_into_list(self, X, Y):
- x_list = list()
- y_list = list()
+ def __from_XY_into_list(self, X, Y): # noqa: N802, N803, PLR6301
+ x_list = list() # noqa: C408
+ y_list = list() # noqa: C408
for i in range(Y.shape[1]):
- x_list = x_list + [X, ]
- y_list = y_list + [Y[:, [i, ]], ]
+ x_list = x_list + [ # noqa: PLR6104, RUF005
+ X,
+ ]
+ y_list = y_list + [ # noqa: PLR6104, RUF005
+ Y[
+ :,
+ [
+ i,
+ ],
+ ],
+ ]
return x_list, y_list
- def __predict(self, m, X):
-
- if not self.do_mf:
+ def __predict(self, m, X): # noqa: N803
+ if not self.do_mf: # noqa: RET503
return m.predict(X)
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- X_list = convert_x_list_to_array([X, X])
- X_list_l = X_list[:X.shape[0]]
- X_list_h = X_list[X.shape[0]:]
- return m.predict(X_list_h)
- elif self.mf_case == 'model-data':
- #return m.predict(X)
- X_list = convert_x_list_to_array([X, X])
- X_list_l = X_list[:X.shape[0]]
- X_list_h = X_list[X.shape[0]:]
- return m.predict(X_list_h)
-
-
-
- def __get_cross_validation(self,X,Y,m_list):
-
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: RET505, PLR1714
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806
+ X_list_l = X_list[: X.shape[0]] # noqa: N806
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
+ return m.predict(X_list_h)
+ elif self.mf_case == 'model-data':
+ # return m.predict(X)
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806
+ X_list_l = X_list[: X.shape[0]] # noqa: N806, F841
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
+ return m.predict(X_list_h)
+
+ def __get_cross_validation(self, X, Y, m_list): # noqa: N803
if not self.do_mf:
e2 = np.zeros(Y.shape)
- Y_pred = np.zeros(Y.shape)
- Y_pred_var = np.zeros(Y.shape)
+ Y_pred = np.zeros(Y.shape) # noqa: N806
+ Y_pred_var = np.zeros(Y.shape) # noqa: N806
for ny in range(Y.shape[1]):
m_tmp = m_list[ny].copy()
for ns in range(X.shape[0]):
- X_tmp = np.delete(X, ns, axis=0)
- Y_tmp = np.delete(Y, ns, axis=0)
+ X_tmp = np.delete(X, ns, axis=0) # noqa: N806
+ Y_tmp = np.delete(Y, ns, axis=0) # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp[:, ny][np.newaxis].transpose())
x_loo = X[ns, :][np.newaxis]
# Y_pred_tmp, Y_err_tmp = m_tmp.predict(x_loo)
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
Y_pred[ns, ny] = Y_pred_tmp
Y_pred_var[ns, ny] = Y_err_tmp
- e2[ns, ny] = pow((Y_pred[ns, ny] - Y[ns, ny]), 2) # for nD outputs
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - Y[ns, ny]), 2
+ ) # for nD outputs
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ e2 = np.zeros(self.Y_hf.shape)
+ Y_pred = np.zeros(self.Y_hf.shape) # noqa: N806
+ Y_pred_var = np.zeros(self.Y_hf.shape) # noqa: N806
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- e2 = np.zeros(self.Y_hf.shape)
- Y_pred = np.zeros(self.Y_hf.shape)
- Y_pred_var = np.zeros(self.Y_hf.shape)
-
- for ny in range(Y.shape[1]):
- m_tmp = deepcopy(m_list[ny])
- for ns in range(self.X_hf.shape[0]):
- X_hf_tmp = np.delete(self.X_hf, ns, axis=0)
- Y_hf_tmp = np.delete(self.Y_hf, ns, axis=0)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([X, X_hf_tmp],
- [Y[:, ny][np.newaxis].transpose(), Y_hf_tmp[:, ny][np.newaxis].transpose()])
- m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- x_loo = self.X_hf[ns][np.newaxis]
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
- Y_pred[ns,ny] = Y_pred_tmp
- Y_pred_var[ns,ny] = Y_err_tmp
- e2[ns,ny] = pow((Y_pred[ns,ny] - self.Y_hf[ns,ny]), 2) # for nD outputs
+ for ny in range(Y.shape[1]):
+ m_tmp = deepcopy(m_list[ny])
+ for ns in range(self.X_hf.shape[0]):
+ X_hf_tmp = np.delete(self.X_hf, ns, axis=0) # noqa: N806
+ Y_hf_tmp = np.delete(self.Y_hf, ns, axis=0) # noqa: N806
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [X, X_hf_tmp],
+ [
+ Y[:, ny][np.newaxis].transpose(),
+ Y_hf_tmp[:, ny][np.newaxis].transpose(),
+ ],
+ )
+ )
+ m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
+ x_loo = self.X_hf[ns][np.newaxis]
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
+ Y_pred[ns, ny] = Y_pred_tmp
+ Y_pred_var[ns, ny] = Y_err_tmp
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - self.Y_hf[ns, ny]), 2
+ ) # for nD outputs
- elif self.mf_case == 'model-data':
- e2 = np.zeros(Y.shape)
- Y_pred = np.zeros(Y.shape)
- Y_pred_var = np.zeros(Y.shape)
-
- for ny in range(Y.shape[1]):
- m_tmp = deepcopy(m_list[ny])
- for ns in range(X.shape[0]):
- X_tmp = np.delete(X, ns, axis=0)
- Y_tmp = np.delete(Y, ns, axis=0)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays([self.X_lf, X_tmp],
- [self.Y_lf[:, ny][np.newaxis].transpose(), Y_tmp[:, ny][np.newaxis].transpose()])
- m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- #x_loo = np.hstack((X[ns], 1))[np.newaxis]
- x_loo = self.X_hf[ns][np.newaxis]
- Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp,x_loo)
- Y_pred[ns,ny] = Y_pred_tmp
- Y_pred_var[ns,ny] = Y_err_tmp
- e2[ns,ny] = pow((Y_pred[ns,ny] - Y[ns,ny]), 2) # for nD outputs
+ elif self.mf_case == 'model-data':
+ e2 = np.zeros(Y.shape)
+ Y_pred = np.zeros(Y.shape) # noqa: N806
+ Y_pred_var = np.zeros(Y.shape) # noqa: N806
+
+ for ny in range(Y.shape[1]):
+ m_tmp = deepcopy(m_list[ny])
+ for ns in range(X.shape[0]):
+ X_tmp = np.delete(X, ns, axis=0) # noqa: N806
+ Y_tmp = np.delete(Y, ns, axis=0) # noqa: N806
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ [self.X_lf, X_tmp],
+ [
+ self.Y_lf[:, ny][np.newaxis].transpose(),
+ Y_tmp[:, ny][np.newaxis].transpose(),
+ ],
+ )
+ )
+ m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
+ # x_loo = np.hstack((X[ns], 1))[np.newaxis]
+ x_loo = self.X_hf[ns][np.newaxis]
+ Y_pred_tmp, Y_err_tmp = self.__predict(m_tmp, x_loo) # noqa: N806
+ Y_pred[ns, ny] = Y_pred_tmp
+ Y_pred_var[ns, ny] = Y_err_tmp
+ e2[ns, ny] = pow(
+ (Y_pred[ns, ny] - Y[ns, ny]), 2
+ ) # for nD outputs
return Y_pred, Y_pred_var, e2
- def term(self):
+ def term(self): # noqa: D102
if self.do_parallel:
- if self.run_type != "runningLocal":
- print("RUNNING SUCCESSFUL")
- self.world.Abort(0) # to prevent deadlock
+ if self.run_type != 'runningLocal':
+ print('RUNNING SUCCESSFUL') # noqa: T201
+ self.world.Abort(0) # to prevent deadlock
+ def save_model(self, filename): # noqa: C901, D102, PLR0915
+ import json # noqa: PLC0415
- def save_model(self, filename):
- import json
-
- with open(self.work_dir + '/' + filename + '.pkl', 'wb') as file:
+ with open(self.work_dir + '/' + filename + '.pkl', 'wb') as file: # noqa: PTH123
pickle.dump(self.m_list, file)
# json.dump(self.m_list, file)
-
header_string_x = ' ' + ' '.join([str(elem) for elem in self.rv_name]) + ' '
header_string_y = ' ' + ' '.join([str(elem) for elem in self.g_name])
header_string = header_string_x + header_string_y
if not self.do_mf:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y), axis=1)
+ xy_data = np.concatenate(
+ (np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y),
+ axis=1,
+ )
+ elif not self.hf_is_model:
+ xy_data = np.concatenate(
+ (
+ np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T,
+ self.X_hf,
+ self.Y_hf,
+ ),
+ axis=1,
+ )
else:
- if not self.hf_is_model:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T, self.X_hf, self.Y_hf), axis=1)
- else:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T, self.X, self.Y), axis=1)
- np.savetxt(self.work_dir + '/dakotaTab.out', xy_data, header=header_string, fmt='%1.4e', comments='%')
- np.savetxt(self.work_dir + '/inputTab.out', self.X, header=header_string_x, fmt='%1.4e', comments='%')
- np.savetxt(self.work_dir + '/outputTab.out', self.Y, header=header_string_y, fmt='%1.4e', comments='%')
+ xy_data = np.concatenate(
+ (
+ np.asmatrix(np.arange(1, self.X.shape[0] + 1)).T,
+ self.X,
+ self.Y,
+ ),
+ axis=1,
+ )
+ np.savetxt(
+ self.work_dir + '/dakotaTab.out',
+ xy_data,
+ header=header_string,
+ fmt='%1.4e',
+ comments='%',
+ )
+ np.savetxt(
+ self.work_dir + '/inputTab.out',
+ self.X,
+ header=header_string_x,
+ fmt='%1.4e',
+ comments='%',
+ )
+ np.savetxt(
+ self.work_dir + '/outputTab.out',
+ self.Y,
+ header=header_string_y,
+ fmt='%1.4e',
+ comments='%',
+ )
y_ub = np.zeros(self.Y_loo.shape)
y_lb = np.zeros(self.Y_loo.shape)
-
if not self.do_logtransform:
for ny in range(self.y_dim):
- y_lb[:,ny] = norm.ppf(0.05, loc=self.Y_loo[:, ny],
- scale=np.sqrt(self.Y_loo_var[:, ny])).tolist()
- y_ub[:, ny] = norm.ppf(0.95, loc=self.Y_loo[:, ny],
- scale=np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ y_lb[:, ny] = norm.ppf(
+ 0.05, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
+ y_ub[:, ny] = norm.ppf(
+ 0.95, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
else:
for ny in range(self.y_dim):
mu = np.log(self.Y_loo[:, ny])
- sig = np.sqrt(np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1))
- y_lb[:,ny] = lognorm.ppf(0.05, s=sig, scale=np.exp(mu)).tolist()
+ sig = np.sqrt(
+ np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1)
+ )
+ y_lb[:, ny] = lognorm.ppf(0.05, s=sig, scale=np.exp(mu)).tolist()
y_ub[:, ny] = lognorm.ppf(0.95, s=sig, scale=np.exp(mu)).tolist()
- xy_sur_data = np.hstack((xy_data,self.Y_loo,y_lb,y_ub,self.Y_loo_var))
- g_name_sur = self.g_name
- header_string_sur = header_string + " " + ".median ".join(
- g_name_sur) + ".median " + ".q5 ".join(g_name_sur) + ".q5 " + ".q95 ".join(
- g_name_sur) + ".q95 " + ".var ".join(g_name_sur) + ".var"
-
- np.savetxt(self.work_dir + '/surrogateTab.out', xy_sur_data, header=header_string_sur, fmt='%1.4e', comments='%')
-
-
+ xy_sur_data = np.hstack((xy_data, self.Y_loo, y_lb, y_ub, self.Y_loo_var))
+ g_name_sur = self.g_name
+ header_string_sur = (
+ header_string
+ + ' '
+ + '.median '.join(g_name_sur)
+ + '.median '
+ + '.q5 '.join(g_name_sur)
+ + '.q5 '
+ + '.q95 '.join(g_name_sur)
+ + '.q95 '
+ + '.var '.join(g_name_sur)
+ + '.var'
+ )
+
+ np.savetxt(
+ self.work_dir + '/surrogateTab.out',
+ xy_sur_data,
+ header=header_string_sur,
+ fmt='%1.4e',
+ comments='%',
+ )
results = {}
- results["doSampling"] = self.do_sampling
- results["doSimulation"] = self.do_simulation
- results["doDoE"] = self.do_doe
- results["doLogtransform"] = self.do_logtransform
- results["doLinear"] = self.do_linear
- results["doMultiFidelity"] = self.do_mf
- results["kernName"] = self.kernel
- results["terminationCode"] = self.exit_code
- results["thrNRMSE"] = self.thr_NRMSE
- results["valSamp"] = self.n_samp
- results["valSim"] = self.n_sim
- results["valTime"] = self.sim_time
- results["xdim"] = self.x_dim
- results["ydim"] = self.y_dim
- results["xlabels"] = self.rv_name
- results["ylabels"] = self.g_name
- results["yExact"] = {}
- results["yPredict"] = {}
- results["valNugget"] = {}
- results["valNRMSE"] = {}
- results["valR2"] = {}
- results["valCorrCoeff"] = {}
- results["yPredict_CI_lb"] = {}
- results["yPredict_CI_ub"] = {}
- results["xExact"] = {}
+ results['doSampling'] = self.do_sampling
+ results['doSimulation'] = self.do_simulation
+ results['doDoE'] = self.do_doe
+ results['doLogtransform'] = self.do_logtransform
+ results['doLinear'] = self.do_linear
+ results['doMultiFidelity'] = self.do_mf
+ results['kernName'] = self.kernel
+ results['terminationCode'] = self.exit_code
+ results['thrNRMSE'] = self.thr_NRMSE
+ results['valSamp'] = self.n_samp
+ results['valSim'] = self.n_sim
+ results['valTime'] = self.sim_time
+ results['xdim'] = self.x_dim
+ results['ydim'] = self.y_dim
+ results['xlabels'] = self.rv_name
+ results['ylabels'] = self.g_name
+ results['yExact'] = {}
+ results['yPredict'] = {}
+ results['valNugget'] = {}
+ results['valNRMSE'] = {}
+ results['valR2'] = {}
+ results['valCorrCoeff'] = {}
+ results['yPredict_CI_lb'] = {}
+ results['yPredict_CI_ub'] = {}
+ results['xExact'] = {}
for nx in range(self.x_dim):
- results["xExact"][self.rv_name[nx]] = self.X[:, nx].tolist()
+ results['xExact'][self.rv_name[nx]] = self.X[:, nx].tolist()
for ny in range(self.y_dim):
if not self.do_mf:
- results["yExact"][self.g_name[ny]] = self.Y[:, ny].tolist()
- else:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- results["yExact"][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
- elif self.mf_case == 'model-data':
- results["yExact"][self.g_name[ny]] = self.Y[:, ny].tolist()
+ results['yExact'][self.g_name[ny]] = self.Y[:, ny].tolist()
+ elif self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ results['yExact'][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
+ elif self.mf_case == 'model-data':
+ results['yExact'][self.g_name[ny]] = self.Y[:, ny].tolist()
- results["yPredict"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()
+ results['yPredict'][self.g_name[ny]] = self.Y_loo[:, ny].tolist()
if not self.do_logtransform:
- #results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()+2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
- #results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()-2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
+ # results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()+2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
+ # results["yPredict_CI_lb"][self.g_name[ny]] = self.Y_loo[:, ny].tolist()-2*np.sqrt(self.Y_loo_var[:, ny]).tolist()
- results["yPredict_CI_lb"][self.g_name[ny]] = norm.ppf(0.25, loc = self.Y_loo[:, ny] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
- results["yPredict_CI_ub"][self.g_name[ny]] = norm.ppf(0.75, loc = self.Y_loo[:, ny] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ results['yPredict_CI_lb'][self.g_name[ny]] = norm.ppf(
+ 0.25, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
+ results['yPredict_CI_ub'][self.g_name[ny]] = norm.ppf(
+ 0.75, loc=self.Y_loo[:, ny], scale=np.sqrt(self.Y_loo_var[:, ny])
+ ).tolist()
else:
+ mu = np.log(self.Y_loo[:, ny])
+ sig = np.sqrt(
+ np.log(self.Y_loo_var[:, ny] / pow(self.Y_loo[:, ny], 2) + 1)
+ )
- mu = np.log(self.Y_loo[:, ny] )
- sig = np.sqrt(np.log(self.Y_loo_var[:, ny]/pow(self.Y_loo[:, ny] ,2)+1))
-
- results["yPredict_CI_lb"][self.g_name[ny]] = lognorm.ppf(0.25, s = sig, scale = np.exp(mu)).tolist()
- results["yPredict_CI_ub"][self.g_name[ny]] = lognorm.ppf(0.75, s = sig, scale = np.exp(mu)).tolist()
-
-
+ results['yPredict_CI_lb'][self.g_name[ny]] = lognorm.ppf(
+ 0.25, s=sig, scale=np.exp(mu)
+ ).tolist()
+ results['yPredict_CI_ub'][self.g_name[ny]] = lognorm.ppf(
+ 0.75, s=sig, scale=np.exp(mu)
+ ).tolist()
# if self.do_logtransform:
# log_mean = 0
@@ -1741,13 +2001,15 @@ def save_model(self, filename):
# nuggetVal_linear = np.exp(2*log_mean+log_var)*(np.exp(log_var)-1) # in linear space
if self.do_mf:
- #results["valNugget"][self.g_name[ny]] = float(self.m_list[ny].gpy_model['Gaussian_noise.variance'])
+ # results["valNugget"][self.g_name[ny]] = float(self.m_list[ny].gpy_model['Gaussian_noise.variance'])
pass
else:
- results["valNugget"][self.g_name[ny]] = float(self.m_list[ny]['Gaussian_noise.variance'])
- results["valNRMSE"][self.g_name[ny]] = self.NRMSE_val[ny]
- results["valR2"][self.g_name[ny]] = self.R2_val[ny]
- results["valCorrCoeff"][self.g_name[ny]] = self.corr_val[ny]
+ results['valNugget'][self.g_name[ny]] = float(
+ self.m_list[ny]['Gaussian_noise.variance']
+ )
+ results['valNRMSE'][self.g_name[ny]] = self.NRMSE_val[ny]
+ results['valR2'][self.g_name[ny]] = self.R2_val[ny]
+ results['valCorrCoeff'][self.g_name[ny]] = self.corr_val[ny]
# if np.isnan(self.NRMSE_val[ny]):
# results["valNRMSE"][self.g_name[ny]] = 0
@@ -1757,118 +2019,124 @@ def save_model(self, filename):
# results["valCorrCoeff"][self.g_name[ny]] = 0
if self.do_simulation:
- results["predError"] = {}
- results["predError"]["percent"] = self.perc_thr.tolist()
- results["predError"]["value"] = self.perc_val.tolist()
- results["fem"] = {}
- results["fem"]["workflow_driver"] = self.workflowDriver
- #results["fem"]["postprocessScript"] = self.postFile
- #results["fem"]["program"] = self.appName
+ results['predError'] = {}
+ results['predError']['percent'] = self.perc_thr.tolist()
+ results['predError']['value'] = self.perc_val.tolist()
+ results['fem'] = {}
+ results['fem']['workflow_driver'] = self.workflowDriver
+ # results["fem"]["postprocessScript"] = self.postFile
+ # results["fem"]["program"] = self.appName
if self.do_sampling:
if self.use_existing:
- results["inpData"] = self.inpData
- results["outData"] = self.outData
+ results['inpData'] = self.inpData
+ results['outData'] = self.outData
else:
- results["inpData"] = self.inpData
+ results['inpData'] = self.inpData
if not self.do_simulation:
- results["outData"] = self.outData
+ results['outData'] = self.outData
if self.do_mf:
- if self.mf_case == 'data-model' or self.mf_case == 'data-data':
- results["inpData_HF"] = self.inpData_hf
- results["outData_HF"] = self.outData_hf
- results["valSamp_HF"] = self.X_hf.shape[0]
+ if self.mf_case == 'data-model' or self.mf_case == 'data-data': # noqa: PLR1714
+ results['inpData_HF'] = self.inpData_hf
+ results['outData_HF'] = self.outData_hf
+ results['valSamp_HF'] = self.X_hf.shape[0]
elif self.mf_case == 'model-data':
- results["inpData_LF"] = self.inpData_lf
- results["outData_LF"] = self.outData_lf
- results["valSamp_LF"] = self.X_lf.shape[0]
+ results['inpData_LF'] = self.inpData_lf
+ results['outData_LF'] = self.outData_lf
+ results['valSamp_LF'] = self.X_lf.shape[0]
rv_list = []
for nx in range(self.x_dim):
rvs = {}
- rvs["name"] = self.rvName[nx]
- rvs["distribution"] = self.rvDist[nx]
- rvs["value"] = self.rvVal[nx]
- rv_list = rv_list + [rvs]
- results["randomVariables"] = rv_list
-
+ rvs['name'] = self.rvName[nx]
+ rvs['distribution'] = self.rvDist[nx]
+ rvs['value'] = self.rvVal[nx]
+ rv_list = rv_list + [rvs] # noqa: PLR6104, RUF005
+ results['randomVariables'] = rv_list
-
- ### Used for surrogate
- results["modelInfo"] = {}
+ # Used for surrogate
+ results['modelInfo'] = {}
if not self.do_mf:
for ny in range(self.y_dim):
- results["modelInfo"][self.g_name[ny]] = {}
+ results['modelInfo'][self.g_name[ny]] = {}
for parname in self.m_list[ny].parameter_names():
- results["modelInfo"][self.g_name[ny]][parname] = list(eval('self.m_list[ny].' + parname))
-
+ results['modelInfo'][self.g_name[ny]][parname] = list(
+ eval('self.m_list[ny].' + parname) # noqa: S307
+ )
- with open(self.work_dir + '/dakota.out', 'w') as fp:
+ with open(self.work_dir + '/dakota.out', 'w') as fp: # noqa: PLW1514, PTH123
json.dump(results, fp, indent=1)
- with open(self.work_dir + '/GPresults.out', 'w') as file:
-
+ with open(self.work_dir + '/GPresults.out', 'w') as file: # noqa: PLR1702, PLW1514, PTH123
file.write('* Problem setting\n')
- file.write(' - dimension of x : {}\n'.format(self.x_dim))
- file.write(' - dimension of y : {}\n'.format(self.y_dim))
- file.write(" - sampling : {}\n".format(self.do_sampling))
- file.write(" - simulation : {}\n".format(self.do_simulation))
+ file.write(f' - dimension of x : {self.x_dim}\n')
+ file.write(f' - dimension of y : {self.y_dim}\n')
+ file.write(f' - sampling : {self.do_sampling}\n')
+ file.write(f' - simulation : {self.do_simulation}\n')
if self.do_doe:
- file.write(" - design of experiments : {} \n".format(self.do_doe))
+ file.write(f' - design of experiments : {self.do_doe} \n')
if not self.do_doe:
if self.do_simulation and self.do_sampling:
file.write(
- " - design of experiments (DoE) turned off - DoE evaluation time exceeds the model simulation time \n")
+ ' - design of experiments (DoE) turned off - DoE evaluation time exceeds the model simulation time \n'
+ )
file.write('\n')
file.write('* Convergence\n')
- file.write(' - exit code : "{}"\n'.format(self.exit_code))
+ file.write(f' - exit code : "{self.exit_code}"\n')
file.write(' simulation terminated as ')
if self.exit_code == 'count':
- file.write('number of counts reached the maximum (max={})"\n'.format(self.thr_count))
+ file.write(
+ f'number of counts reached the maximum (max={self.thr_count})"\n'
+ )
elif self.exit_code == 'accuracy':
- file.write('minimum accuracy level (NRMSE={:.2f}) is achieved"\n'.format(self.thr_NRMSE))
+ file.write(
+ f'minimum accuracy level (NRMSE={self.thr_NRMSE:.2f}) is achieved"\n'
+ )
elif self.exit_code == 'time':
- file.write('maximum running time (t={:.1f}s) reached"\n'.format(self.thr_t))
+ file.write(f'maximum running time (t={self.thr_t:.1f}s) reached"\n')
else:
file.write('cannot identify the exit code\n')
- file.write(' - number of simulations (count) : {}\n'.format(self.n_samp))
+ file.write(f' - number of simulations (count) : {self.n_samp}\n')
file.write(
- ' - maximum normalized root-mean-squared error (NRMSE): {:.5f}\n'.format(np.max(self.NRMSE_val)))
+ f' - maximum normalized root-mean-squared error (NRMSE): {np.max(self.NRMSE_val):.5f}\n'
+ )
for ny in range(self.y_dim):
- file.write(' {} : {:.2f}\n'.format(self.g_name[ny], self.NRMSE_val[ny]))
- file.write(' - analysis time : {:.1f} sec\n'.format(self.sim_time))
- file.write(' - calibration interval : {}\n'.format(self.cal_interval))
+ file.write(f' {self.g_name[ny]} : {self.NRMSE_val[ny]:.2f}\n')
+ file.write(f' - analysis time : {self.sim_time:.1f} sec\n')
+ file.write(f' - calibration interval : {self.cal_interval}\n')
file.write('\n')
- file.write('* GP parameters\n'.format(self.y_dim))
- file.write(' - Kernel : {}\n'.format(self.kernel))
- file.write(' - Linear : {}\n\n'.format(self.do_linear))
+ file.write('* GP parameters\n'.format())
+ file.write(f' - Kernel : {self.kernel}\n')
+ file.write(f' - Linear : {self.do_linear}\n\n')
if not self.do_mf:
for ny in range(self.y_dim):
- file.write(' [{}]\n'.format(self.g_name[ny]))
+ file.write(f' [{self.g_name[ny]}]\n')
m_tmp = self.m_list[ny]
for parname in m_tmp.parameter_names():
- file.write(' - {} '.format(parname))
- parvals = eval('m_tmp.' + parname)
+ file.write(f' - {parname} ')
+ parvals = eval('m_tmp.' + parname) # noqa: S307
if len(parvals) == self.x_dim:
file.write('\n')
for nx in range(self.x_dim):
- file.write(' {} : {:.2e}\n'.format(self.rv_name[nx], parvals[nx]))
+ file.write(
+ f' {self.rv_name[nx]} : {parvals[nx]:.2e}\n'
+ )
else:
- file.write(' : {:.2e}\n'.format(parvals[0]))
- file.write('\n'.format(self.g_name[ny]))
+ file.write(f' : {parvals[0]:.2e}\n')
+ file.write('\n'.format())
file.close()
- print("Results Saved")
+ print('Results Saved') # noqa: T201
return 0
- def weights_node2(self, node, nodes, ls):
+ def weights_node2(self, node, nodes, ls): # noqa: D102, PLR6301
nodes = np.asarray(nodes)
deltas = nodes - node
@@ -1878,14 +2146,14 @@ def weights_node2(self, node, nodes, ls):
dist_ls = np.sqrt(np.sum(pow(deltas_norm, 2), axis=1))
- weig = np.exp(-pow(dist_ls,2))
- if (sum(weig)==0):
+ weig = np.exp(-pow(dist_ls, 2))
+ if sum(weig) == 0:
weig = np.ones(nodes.shape[0])
- return weig/sum(weig)
+ return weig / sum(weig)
-def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver):
- X = np.atleast_2d(X)
+def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver): # noqa: N802, N803, D103
+ X = np.atleast_2d(X) # noqa: N806
x_dim = X.shape[1]
if X.shape[0] > 1:
@@ -1893,187 +2161,204 @@ def run_FEM(X, id_sim, rv_name, work_dir, workflowDriver):
msg = 'do one simulation at a time'
errlog.exit(msg)
-
# (1) create "workdir.idx " folder :need C++17 to use the files system namespace
current_dir_i = work_dir + '/workdir.' + str(id_sim + 1)
- print(id_sim)
+ print(id_sim) # noqa: T201
try:
shutil.copytree(work_dir + '/templatedir', current_dir_i)
- except Exception as ex:
+ except Exception as ex: # noqa: BLE001
errlog = errorLog(work_dir)
msg = 'Error running FEM: ' + str(ex)
errlog.exit(msg)
-
# (2) write param.in file
- outF = open(current_dir_i + '/params.in', 'w')
+ outF = open(current_dir_i + '/params.in', 'w') # noqa: N806, PLW1514, PTH123, SIM115
- outF.write('{}\n'.format(x_dim))
+ outF.write(f'{x_dim}\n')
for i in range(x_dim):
- outF.write('{} {}\n'.format(rv_name[i], X[0, i]))
+ outF.write(f'{rv_name[i]} {X[0, i]}\n')
outF.close()
# (3) run workflow_driver.bat
os.chdir(current_dir_i)
- workflow_run_command = '{}/{}'.format(current_dir_i, workflowDriver)
- subprocess.check_call(workflow_run_command, shell=True)
+ workflow_run_command = f'{current_dir_i}/{workflowDriver}'
+ subprocess.check_call(workflow_run_command, shell=True) # noqa: S602
# (4) reading results
- if glob.glob('results.out'):
+ if glob.glob('results.out'): # noqa: PTH207
g = np.loadtxt('results.out').flatten()
else:
errlog = errorLog(work_dir)
msg = 'Error running FEM: results.out missing at ' + current_dir_i
errlog.exit(msg)
- if g.shape[0]==0:
+ if g.shape[0] == 0:
errlog = errorLog(work_dir)
msg = 'Error running FEM: results.out is empty'
errlog.exit(msg)
- os.chdir("../")
+ os.chdir('../')
if np.isnan(np.sum(g)):
errlog = errorLog(work_dir)
- msg = 'Error running FEM: Response value at workdir.{} is NaN'.format(id_sim+1)
+ msg = f'Error running FEM: Response value at workdir.{id_sim + 1} is NaN'
errlog.exit(msg)
return g, id_sim
-def run_FEM_batch(X,id_sim, rv_name, do_parallel, y_dim, os_type, run_type, pool, t_init, t_thr, workflowDriver):
- X = np.atleast_2d(X)
+def run_FEM_batch( # noqa: N802, D103
+ X, # noqa: N803
+ id_sim,
+ rv_name,
+ do_parallel,
+ y_dim,
+ os_type, # noqa: ARG001
+ run_type, # noqa: ARG001
+ pool,
+ t_init,
+ t_thr,
+ workflowDriver, # noqa: N803
+):
+ X = np.atleast_2d(X) # noqa: N806
# Windows
- #if os_type.lower().startswith('win'):
+ # if os_type.lower().startswith('win'):
# workflowDriver = "workflow_driver.bat"
- #else:
+ # else:
# workflowDriver = "workflow_driver"
nsamp = X.shape[0]
if not do_parallel:
- Y = np.zeros((nsamp,y_dim))
+ Y = np.zeros((nsamp, y_dim)) # noqa: N806
for ns in range(nsamp):
- Y[ns,:], id_sim_current = run_FEM(X[ns,:],id_sim+ns,rv_name, work_dir, workflowDriver)
+ Y[ns, :], id_sim_current = run_FEM(
+ X[ns, :], id_sim + ns, rv_name, work_dir, workflowDriver
+ )
if time.time() - t_init > t_thr:
- X = X[:ns, :]
- Y = Y[:ns, :]
- break
+ X = X[:ns, :] # noqa: N806
+ Y = Y[:ns, :] # noqa: N806
+ break
- return X, Y, id_sim_current+1
+ return X, Y, id_sim_current + 1
if do_parallel:
- print("Running {} simulations in parallel".format(nsamp))
+ print(f'Running {nsamp} simulations in parallel') # noqa: T201
tmp = time.time()
- iterables = ((X[i, :][np.newaxis], id_sim + i, rv_name, work_dir, workflowDriver) for i in range(nsamp))
+ iterables = (
+ (X[i, :][np.newaxis], id_sim + i, rv_name, work_dir, workflowDriver)
+ for i in range(nsamp)
+ )
try:
result_objs = list(pool.starmap(run_FEM, iterables))
- print("Simulation time = {} s".format(time.time() - tmp)); tmp = time.time();
+ print(f'Simulation time = {time.time() - tmp} s') # noqa: T201
+ tmp = time.time()
except KeyboardInterrupt:
- print("Ctrl+c received, terminating and joining pool.")
+ print('Ctrl+c received, terminating and joining pool.') # noqa: T201
try:
pool.shutdown()
- except Exception:
+ except Exception: # noqa: BLE001
sys.exit()
- tmp = time.time();
- print("=====================================")
- Nsim = len(list((result_objs)))
- Y = np.zeros((Nsim, y_dim))
+ tmp = time.time()
+ print('=====================================') # noqa: T201
+ Nsim = len(list(result_objs)) # noqa: N806
+ Y = np.zeros((Nsim, y_dim)) # noqa: N806
- for val, id in result_objs:
+ for val, id in result_objs: # noqa: A001
if np.isnan(np.sum(val)):
- Nsim = id - id_sim
- X = X[:Nsim, :]
- Y = Y[:Nsim, :]
+ Nsim = id - id_sim # noqa: N806
+ X = X[:Nsim, :] # noqa: N806
+ Y = Y[:Nsim, :] # noqa: N806
else:
Y[id - id_sim, :] = val
return X, Y, id_sim + Nsim
-def read_txt(text_dir, errlog):
- if not os.path.exists(text_dir):
- msg = "Error: file does not exist: " + text_dir
+
+def read_txt(text_dir, errlog): # noqa: D103
+ if not os.path.exists(text_dir): # noqa: PTH110
+ msg = 'Error: file does not exist: ' + text_dir
errlog.exit(msg)
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PTH123
# Iterate through the file until the table starts
header_count = 0
for line in f:
if line.startswith('%'):
- header_count = header_count + 1
- print(line)
+ header_count = header_count + 1 # noqa: PLR6104
+ print(line) # noqa: T201
-
# X = np.loadtxt(f, skiprows=header_count, delimiter=',')
try:
- with open(text_dir) as f:
- X = np.loadtxt(f, skiprows=header_count)
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
+ X = np.loadtxt(f, skiprows=header_count) # noqa: N806
except ValueError:
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
try:
- X = np.genfromtxt(f, skip_header=header_count, delimiter=',')
+ X = np.genfromtxt(f, skip_header=header_count, delimiter=',') # noqa: N806
# if there are extra delimiter, remove nan
if np.isnan(X[-1, -1]):
- X = np.delete(X, -1, 1)
+ X = np.delete(X, -1, 1) # noqa: N806
# X = np.loadtxt(f, skiprows=header_count, delimiter=',')
except ValueError:
- msg = "Error: file format is not supported " + text_dir
+ msg = 'Error: file format is not supported ' + text_dir
errlog.exit(msg)
if X.ndim == 1:
- X = np.array([X]).transpose()
+ X = np.array([X]).transpose() # noqa: N806
return X
-def closest_node(node, nodes, ll):
+def closest_node(node, nodes, ll): # noqa: D103
nodes = np.asarray(nodes)
deltas = nodes - node
deltas_norm = np.zeros(deltas.shape)
for nx in range(nodes.shape[1]):
- deltas_norm[:, nx] = deltas[:, nx] / ll[nx]
+ deltas_norm[:, nx] = deltas[:, nx] / ll[nx]
- dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm) # square sum
+ dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm) # square sum
return np.argmin(dist_2)
-def imse(m_tmp, xcandi, xq, phiqr, i):
- X = m_tmp.X
- Y = m_tmp.Y
- X_tmp = np.vstack([X, xcandi])
- Y_tmp = np.zeros((Y.shape[0] + 1, Y.shape[1])) # any variables
+
+def imse(m_tmp, xcandi, xq, phiqr, i): # noqa: D103
+ X = m_tmp.X # noqa: N806
+ Y = m_tmp.Y # noqa: N806
+ X_tmp = np.vstack([X, xcandi]) # noqa: N806
+ Y_tmp = np.zeros((Y.shape[0] + 1, Y.shape[1])) # any variables # noqa: N806
m_tmp.set_XY(X=X_tmp, Y=Y_tmp)
- dummy, Yq_var = m_tmp.predict(xq)
- IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() *Yq_var.flatten())
-
+ dummy, Yq_var = m_tmp.predict(xq) # noqa: F841, N806
+ IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten()) # noqa: N806
+
return IMSEc1, i
# ==========================================================================================
-class errorLog(object):
+class errorLog: # noqa: D101
def __init__(self, work_dir):
- self.file = open('{}/dakota.err'.format(work_dir), "w")
+ self.file = open(f'{work_dir}/dakota.err', 'w') # noqa: PLW1514, PTH123, SIM115
- def exit(self, msg):
- print(msg)
+ def exit(self, msg): # noqa: D102
+ print(msg) # noqa: T201
self.file.write(msg)
self.file.close()
- exit(-1)
+ exit(-1) # noqa: PLR1722
- def terminate(self):
+ def terminate(self): # noqa: D102
self.file.close()
-def build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type):
+def build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type): # noqa: N803, D103
# t_total = time.process_time()
filename = 'SimGpModel'
- print('FILE: ' + work_dir + '/templatedir/' + inputFile)
- f = open(work_dir + '/templatedir/' + inputFile)
+ print('FILE: ' + work_dir + '/templatedir/' + inputFile) # noqa: T201
+ f = open(work_dir + '/templatedir/' + inputFile) # noqa: PLW1514, PTH123, SIM115
try:
inp = json.load(f)
except ValueError:
@@ -2083,33 +2368,38 @@ def build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type):
f.close()
if inp['UQ_Method']['uqType'] != 'Train GP Surrogate Model':
- msg = 'UQ type inconsistency : user wanted <' + inp['UQ_Method'][
- 'uqType'] + '> but called program'
+ msg = (
+ 'UQ type inconsistency : user wanted <'
+ + inp['UQ_Method']['uqType']
+ + '> but called program'
+ )
errlog.exit(msg)
-
- gp = GpFromModel(work_dir, inputFile, workflowDriver, run_type, os_type, inp, errlog)
+ gp = GpFromModel(
+ work_dir, inputFile, workflowDriver, run_type, os_type, inp, errlog
+ )
gp.save_model(filename)
gp.term()
+
# the actual execution
# ==========================================================================================
# the actual execution
-if __name__ == "__main__":
- inputArgs = sys.argv
+if __name__ == '__main__':
+ inputArgs = sys.argv # noqa: N816
work_dir = inputArgs[1].replace(os.sep, '/')
errlog = errorLog(work_dir)
- inputFile = inputArgs[2]
- workflowDriver = inputArgs[3]
+ inputFile = inputArgs[2] # noqa: N816
+ workflowDriver = inputArgs[3] # noqa: N816
os_type = inputArgs[4]
run_type = inputArgs[5]
- result_file = "results.out"
-
- #sys.exit(build_surrogate(work_dir, os_type, run_type))
+ result_file = 'results.out'
+
+ # sys.exit(build_surrogate(work_dir, os_type, run_type))
build_surrogate(work_dir, inputFile, workflowDriver, os_type, run_type)
diff --git a/modules/performUQ/SimCenterUQ/runPLoM.py b/modules/performUQ/SimCenterUQ/runPLoM.py
index a375e06ae..9bc4130fe 100644
--- a/modules/performUQ/SimCenterUQ/runPLoM.py
+++ b/modules/performUQ/SimCenterUQ/runPLoM.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2021 Leland Stanford Junior University
# Copyright (c) 2021 The Regents of the University of California
#
@@ -35,30 +34,28 @@
# this file. If not, see .
#
# This module is modified from the surrogateBuild.py to use PLoM for surrogate
-# modeling while maintaining similar input/output formats compatable with the current workflow
+# modeling while maintaining similar input/output formats compatible with the current workflow
#
# Contributors:
# Kuanshi Zhong
# Sang-ri Yi
# Frank Mckenna
#
-import shutil
+import json
import os
+import shutil
+import subprocess # noqa: S404
import sys
-import json
-from scipy.stats import lognorm, norm
+
import numpy as np
-from multiprocessing import Pool
-from PLoM.PLoM import *
import pandas as pd
-import subprocess
+from PLoM.PLoM import * # noqa: F403
# ==========================================================================================
-class runPLoM:
- """
- runPLoM: class for run a PLoM job
+class runPLoM:
+ """runPLoM: class for run a PLoM job
methods:
__init__: initialization
_create_variables: create variable name lists
@@ -67,20 +64,26 @@ class runPLoM:
_load_variables: load training data
train_model: model training
save_model: model saving
- """
-
- def __init__(self, work_dir, run_type, os_type, job_config, errlog, input_file, workflow_driver):
-
- """
- __init__
- input:
+ """ # noqa: D205, D400
+
+ def __init__(
+ self,
+ work_dir,
+ run_type,
+ os_type,
+ job_config,
+ errlog,
+ input_file,
+ workflow_driver,
+ ):
+ """__init__
+ input:
work_dir: working directory
run_type: job type
os_type: operating system type
job_config: configuration (dtype = dict)
errlog: error log object
- """
-
+ """ # noqa: D205, D400
# read inputs
self.work_dir = work_dir
self.run_type = run_type
@@ -91,8 +94,8 @@ def __init__(self, work_dir, run_type, os_type, job_config, errlog, input_file,
self.workflow_driver = workflow_driver
# initialization
- self.rv_name = list()
- self.g_name = list()
+ self.rv_name = list() # noqa: C408
+ self.g_name = list() # noqa: C408
self.x_dim = 0
self.y_dim = 0
@@ -100,13 +103,13 @@ def __init__(self, work_dir, run_type, os_type, job_config, errlog, input_file,
# self.x_dim, self.y_dim, self.rv_name, self.g_name = self._create_variables(job_config)
# read PLoM parameters
- surrogateInfo = job_config["UQ"]["surrogateMethodInfo"]
+ surrogateInfo = job_config['UQ']['surrogateMethodInfo'] # noqa: N806
if self._parse_plom_parameters(surrogateInfo):
msg = 'runPLoM.__init__: Error in reading PLoM parameters.'
self.errlog.exit(msg)
# parallel setup
- self.do_parallel = surrogateInfo.get("parallelExecution", False)
+ self.do_parallel = surrogateInfo.get('parallelExecution', False)
if self.do_parallel:
if self._set_up_parallel():
msg = 'runPLoM.__init__: Error in setting up parallel.'
@@ -116,16 +119,16 @@ def __init__(self, work_dir, run_type, os_type, job_config, errlog, input_file,
self.cal_interval = 5
# prepare training data
- if surrogateInfo["method"] == "Import Data File":
+ if surrogateInfo['method'] == 'Import Data File':
do_sampling = False
- do_simulation = not surrogateInfo["outputData"]
- self.doe_method = "None" # default
- do_doe = False
- self.inpData = os.path.join(work_dir, "templatedir/inpFile.in")
+ do_simulation = not surrogateInfo['outputData']
+ self.doe_method = 'None' # default
+ do_doe = False # noqa: F841
+ self.inpData = os.path.join(work_dir, 'templatedir/inpFile.in') # noqa: PTH118
if not do_simulation:
- self.outData = os.path.join(work_dir, "templatedir/outFile.in")
+ self.outData = os.path.join(work_dir, 'templatedir/outFile.in') # noqa: PTH118
self._create_variables_from_input()
- elif surrogateInfo["method"] == "Sampling and Simulation":
+ elif surrogateInfo['method'] == 'Sampling and Simulation':
# run simulation first to generate training data
do_sampling = False
do_simulation = False
@@ -135,47 +138,48 @@ def __init__(self, work_dir, run_type, os_type, job_config, errlog, input_file,
errlog.exit(msg)
# read variable names
- #self.x_dim, self.y_dim, self.rv_name, self.g_name = self._create_variables(surrogateInfo["method"])
+ # self.x_dim, self.y_dim, self.rv_name, self.g_name = self._create_variables(surrogateInfo["method"])
# load variables
if self._load_variables(do_sampling, do_simulation):
msg = 'runPLoM.__init__: Error in loading variables.'
self.errlog.exit(msg)
-
- def _run_simulation(self):
-
- """
- _run_simulation: running simulation to get training data
+ def _run_simulation(self): # noqa: C901
+ """_run_simulation: running simulation to get training data
input:
job_config: job configuration dictionary
output:
None
- """
- import platform
+ """ # noqa: D205, D400
+ import platform # noqa: PLC0415
job_config = self.job_config
# get python instance
- runType = job_config.get('runType','runningLocal')
- if (sys.platform == 'darwin' or sys.platform == "linux" or sys.platform == "linux2"):
- pythonEXE = 'python3'
+ runType = job_config.get('runType', 'runningLocal') # noqa: N806
+ if (
+ sys.platform == 'darwin'
+ or sys.platform == 'linux'
+ or sys.platform == 'linux2'
+ ):
+ pythonEXE = 'python3' # noqa: N806
else:
- pythonEXE = 'python'
+ pythonEXE = 'python' # noqa: N806
if runType == 'runningLocal' and platform.system() == 'Windows':
- localAppDir = job_config.get('localAppDir',None)
+ localAppDir = job_config.get('localAppDir', None) # noqa: N806
if localAppDir is None:
# no local app directory is found, let's try to use system python
pass
else:
- #pythonEXE = os.path.join(localAppDir,'applications','python','python.exe')
- pythonEXE = '\"' + sys.executable + '\"'
+ # pythonEXE = os.path.join(localAppDir,'applications','python','python.exe')
+ pythonEXE = '"' + sys.executable + '"' # noqa: N806
else:
# for remote run and macOS, let's use system python
pass
# move into the templatedir
- run_dir = job_config.get('runDir',os.getcwd())
+ run_dir = job_config.get('runDir', os.getcwd()) # noqa: PTH109
os.chdir(run_dir)
# training is done for single building (for now)
bldg_id = None
@@ -184,60 +188,74 @@ def _run_simulation(self):
os.chdir('templatedir')
# dakota script path
- dakotaScript = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),'dakota','DakotaUQ.py')
+ dakotaScript = os.path.join( # noqa: PTH118, N806
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))), # noqa: PTH100, PTH120
+ 'dakota',
+ 'DakotaUQ.py',
+ )
- print('dakotaScript = ',dakotaScript)
+ print('dakotaScript = ', dakotaScript) # noqa: T201
- # write a new dakota.json for forward propogation
- ## KZ modified 0331
- with open(self.input_file, 'r', encoding='utf-8') as f:
+ # write a new dakota.json for forward propagation
+ # KZ modified 0331
+ with open(self.input_file, encoding='utf-8') as f: # noqa: PTH123
tmp = json.load(f)
tmp['UQ']['uqType'] = 'Forward Propagation'
tmp['UQ']['parallelExecution'] = True
- samplingObj = tmp['UQ']['surrogateMethodInfo']['samplingMethod']
- tmp['UQ']['samplingMethodData']=dict()
- ## KZ modified 0331
+ samplingObj = tmp['UQ']['surrogateMethodInfo']['samplingMethod'] # noqa: N806
+ tmp['UQ']['samplingMethodData'] = dict() # noqa: C408
+ # KZ modified 0331
tmp['UQ']['uqEngine'] = 'Dakota'
tmp['Applications']['UQ']['Application'] = 'Dakota-UQ'
for key, item in samplingObj.items():
tmp['UQ']['samplingMethodData'][key] = item
- with open('sc_dakota_plom.json','w', encoding='utf-8') as f:
+ with open('sc_dakota_plom.json', 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(tmp, f, indent=2)
# command line
- ## KZ modified 0331
- command_line = f"{pythonEXE} {dakotaScript} --workflowInput sc_dakota_plom.json --driverFile {os.path.splitext(self.workflow_driver)[0]} --workflowOutput EDP.json --runType {runType}"
- print(command_line)
+ # KZ modified 0331
+ command_line = f'{pythonEXE} {dakotaScript} --workflowInput sc_dakota_plom.json --driverFile {os.path.splitext(self.workflow_driver)[0]} --workflowOutput EDP.json --runType {runType}' # noqa: PTH122
+ print(command_line) # noqa: T201
# run command
- dakotaTabPath = os.path.join(self.work_dir,"dakotaTab.out")
- print(dakotaTabPath)
+ dakotaTabPath = os.path.join(self.work_dir, 'dakotaTab.out') # noqa: PTH118, N806
+ print(dakotaTabPath) # noqa: T201
try:
- os.system(command_line)
- except:
- print('runPLoM._run_simulation: error in running dakota to generate the initial sample.')
- print('runPLoM._run_simulation: please check if the dakota is installed correctly on the system.')
-
- if not os.path.exists(dakotaTabPath):
- try:
- subprocess.call(command_line)
- except:
- print('runPLoM._run_simulation: error in running dakota to generate the initial sample.')
- print('runPLoM._run_simulation: please check if the dakota is installed correctly on the system.')
-
- if not os.path.exists(dakotaTabPath):
+ os.system(command_line) # noqa: S605
+ except: # noqa: E722
+ print( # noqa: T201
+ 'runPLoM._run_simulation: error in running dakota to generate the initial sample.'
+ )
+ print( # noqa: T201
+ 'runPLoM._run_simulation: please check if the dakota is installed correctly on the system.'
+ )
+
+ if not os.path.exists(dakotaTabPath): # noqa: PTH110
+ try:
+ subprocess.call(command_line) # noqa: S603
+ except: # noqa: E722
+ print( # noqa: T201
+ 'runPLoM._run_simulation: error in running dakota to generate the initial sample.'
+ )
+ print( # noqa: T201
+ 'runPLoM._run_simulation: please check if the dakota is installed correctly on the system.'
+ )
+
+ if not os.path.exists(dakotaTabPath): # noqa: PTH110
msg = 'Dakota preprocessor did not run successfully'
self.errlog.exit(msg)
# remove the new dakota.json
- #os.remove('sc_dakota_plom.json')
+ # os.remove('sc_dakota_plom.json')
- if runType in ['run', 'runningLocal']:
+ if runType in ['run', 'runningLocal']: # noqa: PLR6201
# create the response.csv file from the dakotaTab.out file
os.chdir(run_dir)
if bldg_id is not None:
os.chdir(bldg_id)
- dakota_out = pd.read_csv('dakotaTab.out', sep=r'\s+', header=0, index_col=0)
+ dakota_out = pd.read_csv(
+ 'dakotaTab.out', sep=r'\s+', header=0, index_col=0
+ )
# save to csv
dakota_out.to_csv('response.csv')
# create a IM.csv file
@@ -246,47 +264,53 @@ def _run_simulation(self):
self.inpData, self.outData = self._prepare_training_data(run_dir)
# update job_config['randomVariables']
cur_rv_list = [x.get('name') for x in job_config['randomVariables']]
- for curRV in self.rv_name:
+ for curRV in self.rv_name: # noqa: N806
if curRV not in cur_rv_list:
- job_config['randomVariables'].append({'distribution': 'Normal', 'name': curRV})
+ job_config['randomVariables'].append(
+ {'distribution': 'Normal', 'name': curRV}
+ )
self.job_config = job_config
- elif self.run_type in ['set_up', 'runningRemote']:
+ elif self.run_type in ['set_up', 'runningRemote']: # noqa: PLR6201
pass
-
- def _prepare_training_data(self, run_dir):
-
+ def _prepare_training_data(self, run_dir): # noqa: C901
# load IM.csv if exists
- df_IM = pd.DataFrame()
- if os.path.exists(os.path.join(run_dir,'IM.csv')):
- df_IM = pd.read_csv(os.path.join(run_dir,'IM.csv'),index_col=None)
+ df_IM = pd.DataFrame() # noqa: N806
+ if os.path.exists(os.path.join(run_dir, 'IM.csv')): # noqa: PTH110, PTH118
+ df_IM = pd.read_csv(os.path.join(run_dir, 'IM.csv'), index_col=None) # noqa: PTH118, N806
else:
- msg = 'runPLoM._prepare_training_data: no IM.csv in {}.'.format(run_dir)
- print(msg)
+ msg = f'runPLoM._prepare_training_data: no IM.csv in {run_dir}.'
+ print(msg) # noqa: T201
# load response.csv if exists
- df_SIMU = pd.DataFrame()
- if os.path.exists(os.path.join(run_dir,'response.csv')):
- df_SIMU = pd.read_csv(os.path.join(run_dir,'response.csv'),index_col=None)
+ df_SIMU = pd.DataFrame() # noqa: N806
+ if os.path.exists(os.path.join(run_dir, 'response.csv')): # noqa: PTH110, PTH118
+ df_SIMU = pd.read_csv( # noqa: N806
+ os.path.join(run_dir, 'response.csv'), # noqa: PTH118
+ index_col=None,
+ )
else:
- msg = 'runPLoM._prepare_training_data: response.csv not found in {}.'.format(run_dir)
+ msg = f'runPLoM._prepare_training_data: response.csv not found in {run_dir}.'
self.errlog.exit(msg)
# read BIM to get RV names
# KZ modified 0331
- with open(os.path.join(run_dir, 'templatedir', self.input_file), 'r', encoding='utf-8') as f:
+ with open( # noqa: PTH123
+ os.path.join(run_dir, 'templatedir', self.input_file), # noqa: PTH118
+ encoding='utf-8',
+ ) as f:
tmp = json.load(f)
- rVs = tmp.get('randomVariables', None)
+ rVs = tmp.get('randomVariables', None) # noqa: N806
if rVs is None:
rv_names = []
else:
rv_names = [x.get('name') for x in rVs]
-
+
# collect rv columns from df_SIMU
- df_RV = pd.DataFrame()
+ df_RV = pd.DataFrame() # noqa: N806
if len(rv_names) > 0:
- df_RV = df_SIMU[rv_names]
+ df_RV = df_SIMU[rv_names] # noqa: N806
for cur_rv in rv_names:
df_SIMU.pop(cur_rv)
if '%eval_id' in list(df_SIMU.columns):
@@ -297,12 +321,12 @@ def _prepare_training_data(self, run_dir):
self.multipleEvent = df_SIMU.pop('MultipleEvent')
else:
self.multipleEvent = None
-
+
# concat df_RV and df_IM
if not df_IM.empty:
- df_X = pd.concat([df_IM, df_RV], axis=1)
+ df_X = pd.concat([df_IM, df_RV], axis=1) # noqa: N806
else:
- df_X = df_RV
+ df_X = df_RV # noqa: N806
if not df_X.empty and '%eval_id' in list(df_X.columns):
df_X.pop('%eval_id')
if not df_X.empty and '%MultipleEvent' in list(df_X.columns):
@@ -312,14 +336,20 @@ def _prepare_training_data(self, run_dir):
# make the first column name start with %
if not df_X.empty:
- df_X = df_X.rename({list(df_X.columns)[0]:'%'+list(df_X.columns)[0]}, axis='columns')
- df_SIMU = df_SIMU.rename({list(df_SIMU.columns)[0]:'%'+list(df_SIMU.columns)[0]}, axis='columns')
+ df_X = df_X.rename( # noqa: N806
+ {list(df_X.columns)[0]: '%' + list(df_X.columns)[0]}, # noqa: RUF015
+ axis='columns',
+ )
+ df_SIMU = df_SIMU.rename( # noqa: N806
+ {list(df_SIMU.columns)[0]: '%' + list(df_SIMU.columns)[0]}, # noqa: RUF015
+ axis='columns',
+ )
# save to csvs
- inpData = os.path.join(run_dir,'PLoM_variables.csv')
- outData = os.path.join(run_dir,'PLoM_responses.csv')
- df_X.to_csv(inpData,index=False)
- df_SIMU.to_csv(outData,index=False)
+ inpData = os.path.join(run_dir, 'PLoM_variables.csv') # noqa: PTH118, N806
+ outData = os.path.join(run_dir, 'PLoM_responses.csv') # noqa: PTH118, N806
+ df_X.to_csv(inpData, index=False)
+ df_SIMU.to_csv(outData, index=False)
# set rv_names, g_name, x_dim, y_dim
self.rv_name = list(df_X.columns)
@@ -329,34 +359,45 @@ def _prepare_training_data(self, run_dir):
return inpData, outData
-
- def _compute_IM(self, run_dir, pythonEXE):
-
+ def _compute_IM(self, run_dir, pythonEXE): # noqa: N802, N803, PLR6301
# find workdirs
workdir_list = [x for x in os.listdir(run_dir) if x.startswith('workdir')]
# intensity measure app
- computeIM = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
- 'createEVENT','groundMotionIM','IntensityMeasureComputer.py')
-
+ computeIM = os.path.join( # noqa: PTH118, N806
+ os.path.dirname( # noqa: PTH120
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # noqa: PTH100, PTH120
+ ),
+ 'createEVENT',
+ 'groundMotionIM',
+ 'IntensityMeasureComputer.py',
+ )
+
# compute IMs
for cur_workdir in workdir_list:
os.chdir(cur_workdir)
- if os.path.exists('EVENT.json') and os.path.exists('AIM.json'):
- os.system(f"{pythonEXE} {computeIM} --filenameAIM AIM.json --filenameEVENT EVENT.json --filenameIM IM.json")
+ if os.path.exists('EVENT.json') and os.path.exists('AIM.json'): # noqa: PTH110
+ os.system( # noqa: S605
+ f'{pythonEXE} {computeIM} --filenameAIM AIM.json --filenameEVENT EVENT.json --filenameIM IM.json'
+ )
os.chdir(run_dir)
# collect IMs from different workdirs
for i, cur_workdir in enumerate(workdir_list):
cur_id = int(cur_workdir.split('.')[-1])
- if os.path.exists(os.path.join(cur_workdir,'IM.csv')):
+ if os.path.exists(os.path.join(cur_workdir, 'IM.csv')): # noqa: PTH110, PTH118
try:
- tmp1 = pd.read_csv(os.path.join(cur_workdir,'IM.csv'),index_col=None)
- except:
+ tmp1 = pd.read_csv(
+ os.path.join(cur_workdir, 'IM.csv'), # noqa: PTH118
+ index_col=None,
+ )
+ except: # noqa: E722
return
if tmp1.empty:
return
- tmp2 = pd.DataFrame({'%eval_id': [cur_id for x in range(len(tmp1.index))]})
+ tmp2 = pd.DataFrame(
+ {'%eval_id': [cur_id for x in range(len(tmp1.index))]}
+ )
if i == 0:
im_collector = pd.concat([tmp2, tmp1], axis=1)
else:
@@ -365,13 +406,10 @@ def _compute_IM(self, run_dir, pythonEXE):
else:
return
im_collector = im_collector.sort_values(by=['%eval_id'])
- im_collector.to_csv('IM.csv',index=False)
-
+ im_collector.to_csv('IM.csv', index=False)
def _create_variables(self, training_data):
-
- """
- create_variables: creating X and Y variables
+ """create_variables: creating X and Y variables
input:
training_data: training data source
output:
@@ -379,8 +417,7 @@ def _create_variables(self, training_data):
y_dim: dimension of Y data
rv_name: random variable name (X data)
g_name: variable name (Y data)
- """
-
+ """ # noqa: D205, D400
job_config = self.job_config
# initialization
@@ -395,18 +432,18 @@ def _create_variables(self, training_data):
# read X and Y variable names
for rv in job_config['randomVariables']:
- rv_name = rv_name + [rv['name']]
+ rv_name = rv_name + [rv['name']] # noqa: PLR6104, RUF005
x_dim += 1
if x_dim == 0:
msg = 'Error reading json: RV is empty'
self.errlog.exit(msg)
for g in job_config['EDP']:
- if g['length']==1: # scalar
- g_name = g_name + [g['name']]
+ if g['length'] == 1: # scalar
+ g_name = g_name + [g['name']] # noqa: PLR6104, RUF005
y_dim += 1
- else: # vector
+ else: # vector
for nl in range(g['length']):
- g_name = g_name + ["{}_{}".format(g['name'],nl+1)]
+ g_name = g_name + ['{}_{}'.format(g['name'], nl + 1)] # noqa: PLR6104, RUF005
y_dim += 1
if y_dim == 0:
msg = 'Error reading json: EDP(QoI) is empty'
@@ -415,9 +452,7 @@ def _create_variables(self, training_data):
# return
return x_dim, y_dim, rv_name, g_name
-
def _create_variables_from_input(self):
-
df_variables = pd.read_csv(self.inpData, header=0)
df_responses = pd.read_csv(self.outData, header=0)
@@ -436,34 +471,36 @@ def _create_variables_from_input(self):
else:
self.multipleEvent = None
-
- def _parse_plom_parameters(self, surrogateInfo):
-
- """
- _parse_plom_parameters: parse PLoM parameters from surrogateInfo
+ def _parse_plom_parameters(self, surrogateInfo): # noqa: C901, N803
+ """_parse_plom_parameters: parse PLoM parameters from surrogateInfo
input:
surrogateInfo: surrogate information dictionary
output:
- run_flag: 0 - sucess, 1: failure
- """
-
+ run_flag: 0 - success, 1: failure
+ """ # noqa: D205, D400
run_flag = 0
try:
self.n_mc = int(surrogateInfo['newSampleRatio'])
- self.epsilonPCA = surrogateInfo.get("epsilonPCA",1e-6)
+ self.epsilonPCA = surrogateInfo.get('epsilonPCA', 1e-6)
# KZ, 07/24: adding customized option for smootherKDE factor
- self.smootherKDE_Customize = surrogateInfo.get("smootherKDE_Customize",False)
+ self.smootherKDE_Customize = surrogateInfo.get(
+ 'smootherKDE_Customize', False
+ )
if self.smootherKDE_Customize:
# KZ, 07/24: taking in user-defined function filepath and directory
- self.smootherKDE_file = surrogateInfo.get("smootherKDE_path",False)
- self.smootherKDE_dir = surrogateInfo.get("smootherKDE_pathPath",False)
+ self.smootherKDE_file = surrogateInfo.get('smootherKDE_path', False)
+ self.smootherKDE_dir = surrogateInfo.get(
+ 'smootherKDE_pathPath', False
+ )
if self.smootherKDE_file and self.smootherKDE_dir:
# KZ, 07/24: both file and file path received
# Note that the file is saved by the frontend to the work_dir -> overwrite self.smootherKDE_file
- self.smootherKDE_file = os.path.join(work_dir, "templatedir", self.smootherKDE_file)
- if not os.path.isfile(self.smootherKDE_file):
+ self.smootherKDE_file = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir', self.smootherKDE_file
+ )
+ if not os.path.isfile(self.smootherKDE_file): # noqa: PTH113
# not found the file
- msg = 'Error finding user-defined function file for KDE: {}.'.format(self.smootherKDE_file)
+ msg = f'Error finding user-defined function file for KDE: {self.smootherKDE_file}.'
errlog.exit(msg)
else:
# KZ, 07/24: missing user-defined file
@@ -471,134 +508,137 @@ def _parse_plom_parameters(self, surrogateInfo):
errlog.exit(msg)
else:
# KZ, 07/24: get user defined smootherKDE
- self.smootherKDE = surrogateInfo.get("smootherKDE",25)
- #self.smootherKDE = surrogateInfo.get("smootherKDE",25)
- self.randomSeed = surrogateInfo.get("randomSeed",None)
- self.diffMap = surrogateInfo.get("diffusionMaps",True)
- self.logTransform = surrogateInfo.get("logTransform",False)
- self.constraintsFlag = surrogateInfo.get("constraints",False)
+ self.smootherKDE = surrogateInfo.get('smootherKDE', 25)
+ # self.smootherKDE = surrogateInfo.get("smootherKDE",25)
+ self.randomSeed = surrogateInfo.get('randomSeed', None)
+ self.diffMap = surrogateInfo.get('diffusionMaps', True)
+ self.logTransform = surrogateInfo.get('logTransform', False)
+ self.constraintsFlag = surrogateInfo.get('constraints', False)
# KZ: 07/24: adding customized option for kdeTolerance
- self.kdeTolerance_Customize = surrogateInfo.get("tolKDE_Customize",False)
+ self.kdeTolerance_Customize = surrogateInfo.get(
+ 'tolKDE_Customize', False
+ )
if self.kdeTolerance_Customize:
# KZ, 07/24: taking in user-defined function filepath and directory
- self.kdeTolerance_file = surrogateInfo.get("tolKDE_path",False)
- self.kdeTolerance_dir = surrogateInfo.get("tolKDE_pathPath",False)
+ self.kdeTolerance_file = surrogateInfo.get('tolKDE_path', False)
+ self.kdeTolerance_dir = surrogateInfo.get('tolKDE_pathPath', False)
if self.kdeTolerance_file and self.kdeTolerance_dir:
# KZ, 07/24: both file and file path received
# Note that the file is saved by the frontend to the work_dir -> overwrite self.kdeTolerance_file
- self.kdeTolerance_file = os.path.join(work_dir, "templatedir", self.kdeTolerance_file)
- if not os.path.isfile(self.kdeTolerance_file):
+ self.kdeTolerance_file = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir', self.kdeTolerance_file
+ )
+ if not os.path.isfile(self.kdeTolerance_file): # noqa: PTH113
# not found the file
- msg = 'Error finding user-defined function file for KDE: {}.'.format(self.kdeTolerance_file)
+ msg = f'Error finding user-defined function file for KDE: {self.kdeTolerance_file}.'
errlog.exit(msg)
else:
# KZ, 07/24: missing user-defined file
- msg = 'Error loading user-defined function file for KDE tolerance.'
+ msg = (
+ 'Error loading user-defined function file for KDE tolerance.'
+ )
errlog.exit(msg)
else:
- self.kdeTolerance = surrogateInfo.get("kdeTolerance",0.1)
- #self.kdeTolerance = surrogateInfo.get("kdeTolerance",0.1)
+ self.kdeTolerance = surrogateInfo.get('kdeTolerance', 0.1)
+ # self.kdeTolerance = surrogateInfo.get("kdeTolerance",0.1)
if self.constraintsFlag:
- self.constraintsFile = os.path.join(work_dir, "templatedir/plomConstraints.py")
- self.numIter = surrogateInfo.get("numIter",50)
- self.tolIter = surrogateInfo.get("tolIter",0.02)
- self.preTrained = surrogateInfo.get("preTrained",False)
+ self.constraintsFile = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/plomConstraints.py'
+ )
+ self.numIter = surrogateInfo.get('numIter', 50)
+ self.tolIter = surrogateInfo.get('tolIter', 0.02)
+ self.preTrained = surrogateInfo.get('preTrained', False)
if self.preTrained:
- self.preTrainedModel = os.path.join(work_dir, "templatedir/surrogatePLoM.h5")
+ self.preTrainedModel = os.path.join( # noqa: PTH118
+ work_dir, 'templatedir/surrogatePLoM.h5'
+ )
# KZ, 07/24: loading hyperparameter functions (evaluating self.kdeTolerance and self.smootherKDE from user-defined case)
if self._load_hyperparameter():
msg = 'runPLoM._parse_plom_parameters: Error in loading hyperparameter functions.'
self.errlog.exit(msg)
- except:
+ except: # noqa: E722
run_flag = 1
# return
return run_flag
-
def _set_up_parallel(self):
-
- """
- _set_up_parallel: set up modules and variables for parallel jobs
+ """_set_up_parallel: set up modules and variables for parallel jobs
input:
none
output:
- run_flag: 0 - sucess, 1 - failure
- """
-
+ run_flag: 0 - success, 1 - failure
+ """ # noqa: D205, D400
run_flag = 0
try:
if self.run_type.lower() == 'runninglocal':
self.n_processor = os.cpu_count()
- from multiprocessing import Pool
+ from multiprocessing import Pool # noqa: PLC0415
+
self.pool = Pool(self.n_processor)
else:
- from mpi4py import MPI
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py import MPI # noqa: PLC0415
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
+
self.world = MPI.COMM_WORLD
self.pool = MPIPoolExecutor()
self.n_processor = self.world.Get_size()
- print("nprocessor :")
- print(self.n_processor)
+ print('nprocessor :') # noqa: T201
+ print(self.n_processor) # noqa: T201
self.cal_interval = self.n_processor
- except:
+ except: # noqa: E722
run_flag = 1
# return
return run_flag
-
- def _load_variables(self, do_sampling, do_simulation):
-
- """
- _load_variables: load variables
+ def _load_variables(self, do_sampling, do_simulation): # noqa: C901
+ """_load_variables: load variables
input:
do_sampling: sampling flag
do_simulation: simulation flag
job_config: job configuration dictionary
output:
- run_flag: 0 - sucess, 1 - failure
- """
+ run_flag: 0 - success, 1 - failure
+ """ # noqa: D205, D400
job_config = self.job_config
run_flag = 0
- #try:
+ # try:
if do_sampling:
pass
else:
- X = read_txt(self.inpData, self.errlog)
- print('X = ', X)
- print(X.columns)
+ X = read_txt(self.inpData, self.errlog) # noqa: N806
+ print('X = ', X) # noqa: T201
+ print(X.columns) # noqa: T201
if len(X.columns) != self.x_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} RV(s) but {} column(s).' \
- .format(self.x_dim, len(X.columns))
+ msg = f'Error importing input data: Number of dimension inconsistent: have {self.x_dim} RV(s) but {len(X.columns)} column(s).'
errlog.exit(msg)
if self.logTransform:
- X = np.log(X)
+ X = np.log(X) # noqa: N806
if do_simulation:
pass
else:
- Y = read_txt(self.outData, self.errlog)
+ Y = read_txt(self.outData, self.errlog) # noqa: N806
if Y.shape[1] != self.y_dim:
- msg = 'Error importing input data: Number of dimension inconsistent: have {} QoI(s) but {} column(s).' \
- .format(self.y_dim, len(Y.columns))
+ msg = f'Error importing input data: Number of dimension inconsistent: have {self.y_dim} QoI(s) but {len(Y.columns)} column(s).'
errlog.exit(msg)
if self.logTransform:
- Y = np.log(Y)
+ Y = np.log(Y) # noqa: N806
if X.shape[0] != Y.shape[0]:
- msg = 'Warning importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent'.format(len(X.columns), len(Y.columns))
- print(msg)
+ msg = f'Warning importing input data: numbers of samples of inputs ({len(X.columns)}) and outputs ({len(Y.columns)}) are inconsistent'
+ print(msg) # noqa: T201
n_samp = Y.shape[0]
# writing a data file for PLoM input
self.X = X.to_numpy()
self.Y = Y.to_numpy()
- inputXY = os.path.join(work_dir, "templatedir/inputXY.csv")
- X_Y = pd.concat([X,Y], axis=1)
+ inputXY = os.path.join(work_dir, 'templatedir/inputXY.csv') # noqa: PTH118, N806
+ X_Y = pd.concat([X, Y], axis=1) # noqa: N806
X_Y.to_csv(inputXY, sep=',', header=True, index=False)
self.inputXY = inputXY
self.n_samp = n_samp
@@ -610,74 +650,101 @@ def _load_variables(self, do_sampling, do_simulation):
self.rvVal = []
try:
for nx in range(self.x_dim):
- rvInfo = job_config["randomVariables"][nx]
- self.rvName = self.rvName + [rvInfo["name"]]
- self.rvDist = self.rvDist + [rvInfo["distribution"]]
+ rvInfo = job_config['randomVariables'][nx] # noqa: N806
+ self.rvName = self.rvName + [rvInfo['name']] # noqa: PLR6104, RUF005
+ self.rvDist = self.rvDist + [rvInfo['distribution']] # noqa: PLR6104, RUF005
if do_sampling:
- self.rvVal = self.rvVal + [(rvInfo["upperbound"] + rvInfo["lowerbound"]) / 2]
+ self.rvVal = self.rvVal + [ # noqa: PLR6104, RUF005
+ (rvInfo['upperbound'] + rvInfo['lowerbound']) / 2
+ ]
else:
- self.rvVal = self.rvVal + [np.mean(self.X[:, nx])]
- except:
+ self.rvVal = self.rvVal + [np.mean(self.X[:, nx])] # noqa: PLR6104, RUF005
+ except: # noqa: E722
msg = 'Warning: randomVariables attributes in configuration file are not consistent with x_dim'
- print(msg)
- #except:
+ print(msg) # noqa: T201
+ # except:
# run_flag = 1
# return
return run_flag
-
# KZ, 07/24: loading user-defined hyper-parameter files
def _load_hyperparameter(self):
run_flag = 0
try:
# load constraints first
- constr_file = Path(self.constraintsFile).resolve()
+ constr_file = Path(self.constraintsFile).resolve() # noqa: F405
sys.path.insert(0, str(constr_file.parent) + '/')
- constr_script = importlib.__import__(constr_file.name[:-3], globals(), locals(), [], 0)
+ constr_script = importlib.__import__( # noqa: F405
+ constr_file.name[:-3], globals(), locals(), [], 0
+ )
self.beta_c = constr_script.beta_c()
- print("beta_c = ", self.beta_c)
+ print('beta_c = ', self.beta_c) # noqa: T201
# if smootherKDE
if self.smootherKDE_Customize:
- kde_file = Path(self.smootherKDE_file).resolve()
+ kde_file = Path(self.smootherKDE_file).resolve() # noqa: F405
sys.path.insert(0, str(kde_file.parent) + '/')
- kde_script = importlib.__import__(kde_file.name[:-3], globals(), locals(), [], 0)
+ kde_script = importlib.__import__( # noqa: F405
+ kde_file.name[:-3], globals(), locals(), [], 0
+ )
self.get_epsilon_k = kde_script.get_epsilon_k
# evaluating the function
self.smootherKDE = self.get_epsilon_k(self.beta_c)
- print('epsilon_k = ',self.smootherKDE)
+ print('epsilon_k = ', self.smootherKDE) # noqa: T201
# if tolKDE
if self.kdeTolerance_Customize:
- beta_file = Path(self.kdeTolerance_file).resolve()
+ beta_file = Path(self.kdeTolerance_file).resolve() # noqa: F405
sys.path.insert(0, str(beta_file.parent) + '/')
- beta_script = importlib.__import__(beta_file.name[:-3], globals(), locals(), [], 0)
+ beta_script = importlib.__import__( # noqa: F405
+ beta_file.name[:-3], globals(), locals(), [], 0
+ )
self.get_epsilon_db = beta_script.get_epsilon_db
# evaluating the function
self.kdeTolerance = self.get_epsilon_db(self.beta_c)
- print('epsilon_db = ',self.kdeTolerance)
- except:
+ print('epsilon_db = ', self.kdeTolerance) # noqa: T201
+ except: # noqa: E722
run_flag = 1
return run_flag
-
- def train_model(self, model_name='SurrogatePLoM'):
- db_path = os.path.join(self.work_dir, 'templatedir')
+ def train_model(self, model_name='SurrogatePLoM'): # noqa: D102
+ db_path = os.path.join(self.work_dir, 'templatedir') # noqa: PTH118
if not self.preTrained:
- self.modelPLoM = PLoM(model_name=model_name, data=self.inputXY, separator=',', col_header=True, db_path=db_path,
- tol_pca = self.epsilonPCA, epsilon_kde = self.smootherKDE, runDiffMaps = self.diffMap, plot_tag = True)
+ self.modelPLoM = PLoM( # noqa: F405
+ model_name=model_name,
+ data=self.inputXY,
+ separator=',',
+ col_header=True,
+ db_path=db_path,
+ tol_pca=self.epsilonPCA,
+ epsilon_kde=self.smootherKDE,
+ runDiffMaps=self.diffMap,
+ plot_tag=True,
+ )
else:
- self.modelPLoM = PLoM(model_name=model_name, data=self.preTrainedModel, db_path=db_path,
- tol_pca = self.epsilonPCA, epsilon_kde = self.smootherKDE, runDiffMaps = self.diffMap)
+ self.modelPLoM = PLoM( # noqa: F405
+ model_name=model_name,
+ data=self.preTrainedModel,
+ db_path=db_path,
+ tol_pca=self.epsilonPCA,
+ epsilon_kde=self.smootherKDE,
+ runDiffMaps=self.diffMap,
+ )
if self.constraintsFlag:
self.modelPLoM.add_constraints(self.constraintsFile)
if self.n_mc > 0:
- tasks = ['DataNormalization','RunPCA','RunKDE','ISDEGeneration']
+ tasks = ['DataNormalization', 'RunPCA', 'RunKDE', 'ISDEGeneration']
else:
- tasks = ['DataNormalization','RunPCA','RunKDE']
+ tasks = ['DataNormalization', 'RunPCA', 'RunKDE']
self.modelPLoM.ConfigTasks(task_list=tasks)
- self.modelPLoM.RunAlgorithm(n_mc=self.n_mc, tol = self.tolIter, max_iter = self.numIter, seed_num=self.randomSeed, tolKDE=self.kdeTolerance)
+ self.modelPLoM.RunAlgorithm(
+ n_mc=self.n_mc,
+ tol=self.tolIter,
+ max_iter=self.numIter,
+ seed_num=self.randomSeed,
+ tolKDE=self.kdeTolerance,
+ )
if self.n_mc > 0:
- self.modelPLoM.export_results(data_list=['/X0','/X_new'])
+ self.modelPLoM.export_results(data_list=['/X0', '/X_new'])
else:
self.modelPLoM.export_results(data_list=['/X0'])
self.pcaEigen = self.modelPLoM.mu
@@ -689,189 +756,229 @@ def train_model(self, model_name='SurrogatePLoM'):
if self.constraintsFlag:
self.Errors = self.modelPLoM.errors
-
- def save_model(self):
-
+ def save_model(self): # noqa: C901, D102
# copy the h5 model file to the main work dir
- shutil.copy2(os.path.join(self.work_dir,'templatedir','SurrogatePLoM','SurrogatePLoM.h5'),self.work_dir)
+ shutil.copy2(
+ os.path.join( # noqa: PTH118
+ self.work_dir, 'templatedir', 'SurrogatePLoM', 'SurrogatePLoM.h5'
+ ),
+ self.work_dir,
+ )
if self.n_mc > 0:
- shutil.copy2(os.path.join(self.work_dir,'templatedir','SurrogatePLoM','DataOut','X_new.csv'),self.work_dir)
+ shutil.copy2(
+ os.path.join( # noqa: PTH118
+ self.work_dir,
+ 'templatedir',
+ 'SurrogatePLoM',
+ 'DataOut',
+ 'X_new.csv',
+ ),
+ self.work_dir,
+ )
if self.X.shape[0] > 0:
- header_string_x = ' ' + ' '.join([str(elem).replace('%','') for elem in self.rv_name]) + ' '
+ header_string_x = (
+ ' '
+ + ' '.join([str(elem).replace('%', '') for elem in self.rv_name])
+ + ' '
+ )
else:
header_string_x = ' '
- header_string_y = ' ' + ' '.join([str(elem).replace('%','') for elem in self.g_name])
+ header_string_y = ' ' + ' '.join(
+ [str(elem).replace('%', '') for elem in self.g_name]
+ )
header_string = header_string_x[:-1] + header_string_y
- #xy_data = np.concatenate((np.asmatrix(np.arange(1, self.n_samp + 1)).T, self.X, self.Y), axis=1)
- #np.savetxt(self.work_dir + '/dakotaTab.out', xy_data, header=header_string, fmt='%1.4e', comments='%')
- #np.savetxt(self.work_dir + '/inputTab.out', self.X, header=header_string_x[1:-1], fmt='%1.4e', comments='%')
- #np.savetxt(self.work_dir + '/outputTab.out', self.Y, header=header_string_y[1:], fmt='%1.4e', comments='%')
- df_inputTab = pd.DataFrame(data=self.X, columns=self.rv_name)
- df_outputTab = pd.DataFrame(data=self.Y, columns=self.g_name)
- df_inputTab.to_csv(os.path.join(self.work_dir,'inputTab.out'),index=False)
- df_outputTab.to_csv(os.path.join(self.work_dir,'outputTab.out'),index=False)
+ # xy_data = np.concatenate((np.asmatrix(np.arange(1, self.n_samp + 1)).T, self.X, self.Y), axis=1)
+ # np.savetxt(self.work_dir + '/dakotaTab.out', xy_data, header=header_string, fmt='%1.4e', comments='%')
+ # np.savetxt(self.work_dir + '/inputTab.out', self.X, header=header_string_x[1:-1], fmt='%1.4e', comments='%')
+ # np.savetxt(self.work_dir + '/outputTab.out', self.Y, header=header_string_y[1:], fmt='%1.4e', comments='%')
+ df_inputTab = pd.DataFrame(data=self.X, columns=self.rv_name) # noqa: N806
+ df_outputTab = pd.DataFrame(data=self.Y, columns=self.g_name) # noqa: N806
+ df_inputTab.to_csv(os.path.join(self.work_dir, 'inputTab.out'), index=False) # noqa: PTH118
+ df_outputTab.to_csv(
+ os.path.join(self.work_dir, 'outputTab.out'), # noqa: PTH118
+ index=False,
+ )
results = {}
- results["valSamp"] = self.n_samp
- results["xdim"] = self.x_dim
- results["ydim"] = self.y_dim
- results["xlabels"] = self.rv_name
- results["ylabels"] = self.g_name
- results["yExact"] = {}
- results["xPredict"] = {}
- results["yPredict"] = {}
- results["valNRMSE"] = {}
- results["valR2"] = {}
- results["valCorrCoeff"] = {}
+ results['valSamp'] = self.n_samp
+ results['xdim'] = self.x_dim
+ results['ydim'] = self.y_dim
+ results['xlabels'] = self.rv_name
+ results['ylabels'] = self.g_name
+ results['yExact'] = {}
+ results['xPredict'] = {}
+ results['yPredict'] = {}
+ results['valNRMSE'] = {}
+ results['valR2'] = {}
+ results['valCorrCoeff'] = {}
for ny in range(self.y_dim):
- results["yExact"][self.g_name[ny]] = self.Y[:, ny].tolist()
+ results['yExact'][self.g_name[ny]] = self.Y[:, ny].tolist()
- results["inpData"] = self.inpData
+ results['inpData'] = self.inpData
if not self.do_simulation:
- results["outData"] = self.outData
+ results['outData'] = self.outData
- results["logTransform"] = self.logTransform
+ results['logTransform'] = self.logTransform
rv_list = []
try:
for nx in range(self.x_dim):
rvs = {}
- rvs["name"] = self.rvName[nx]
- rvs["distribution"] = self.rvDist[nx]
- rvs["value"] = self.rvVal[nx]
- rv_list = rv_list + [rvs]
- results["randomVariables"] = rv_list
- except:
+ rvs['name'] = self.rvName[nx]
+ rvs['distribution'] = self.rvDist[nx]
+ rvs['value'] = self.rvVal[nx]
+ rv_list = rv_list + [rvs] # noqa: PLR6104, RUF005
+ results['randomVariables'] = rv_list
+ except: # noqa: E722
msg = 'Warning: randomVariables attributes in configuration file are not consistent with x_dim'
- print(msg)
- results["dirPLoM"] = os.path.join(os.path.dirname(os.path.abspath(__file__)),'PLoM')
-
- results["pcaEigen"] = self.pcaEigen.tolist()
- results["pcaError"] = self.pcaError
- results["pcaComp"] = self.pcaComp
- results["kdeEigen"] = self.kdeEigen.tolist()
- results["kdeComp"] = self.kdeComp
- results["Errors"] = self.Errors
-
+ print(msg) # noqa: T201
+ results['dirPLoM'] = os.path.join( # noqa: PTH118
+ os.path.dirname(os.path.abspath(__file__)), # noqa: PTH100, PTH120
+ 'PLoM',
+ )
+
+ results['pcaEigen'] = self.pcaEigen.tolist()
+ results['pcaError'] = self.pcaError
+ results['pcaComp'] = self.pcaComp
+ results['kdeEigen'] = self.kdeEigen.tolist()
+ results['kdeComp'] = self.kdeComp
+ results['Errors'] = self.Errors
+
if self.n_mc > 0:
- Xnew = pd.read_csv(self.work_dir + '/X_new.csv', header=0, index_col=0)
+ Xnew = pd.read_csv(self.work_dir + '/X_new.csv', header=0, index_col=0) # noqa: N806
if self.logTransform:
- Xnew = np.exp(Xnew)
+ Xnew = np.exp(Xnew) # noqa: N806
for nx in range(self.x_dim):
- results["xPredict"][self.rv_name[nx]] = Xnew.iloc[:, nx].tolist()
+ results['xPredict'][self.rv_name[nx]] = Xnew.iloc[:, nx].tolist()
for ny in range(self.y_dim):
- results["yPredict"][self.g_name[ny]] = Xnew.iloc[:, self.x_dim+ny].tolist()
+ results['yPredict'][self.g_name[ny]] = Xnew.iloc[
+ :, self.x_dim + ny
+ ].tolist()
- if self.X.shape[0]>0:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.Y.shape[0] + 1)).T, self.X, self.Y), axis=1)
+ if self.X.shape[0] > 0:
+ xy_data = np.concatenate(
+ (np.asmatrix(np.arange(1, self.Y.shape[0] + 1)).T, self.X, self.Y),
+ axis=1,
+ )
else:
- xy_data = np.concatenate((np.asmatrix(np.arange(1, self.Y.shape[0] + 1)).T, self.Y), axis=1)
- np.savetxt(self.work_dir + '/dakotaTab.out', xy_data, header=header_string, fmt='%1.4e', comments='%')
-
+ xy_data = np.concatenate(
+ (np.asmatrix(np.arange(1, self.Y.shape[0] + 1)).T, self.Y), axis=1
+ )
+ np.savetxt(
+ self.work_dir + '/dakotaTab.out',
+ xy_data,
+ header=header_string,
+ fmt='%1.4e',
+ comments='%',
+ )
+
# KZ: adding MultipleEvent if any
if self.multipleEvent is not None:
- tmp = pd.read_csv(os.path.join(self.work_dir,'dakotaTab.out'),index_col=None,sep=' ')
- tmp = pd.concat([tmp,self.multipleEvent],axis=1)
- tmp.to_csv(os.path.join(self.work_dir,'dakotaTab.out'),index=False,sep=' ')
-
- #if not self.do_logtransform:
- #results["yPredict_CI_lb"][self.g_name[ny]] = norm.ppf(0.25, loc = results["yPredict"][self.g_name[ny]] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
- #results["yPredict_CI_ub"][self.g_name[ny]] = norm.ppf(0.75, loc = results["yPredict"][self.g_name[ny]] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
- #else:
+ tmp = pd.read_csv(
+ os.path.join(self.work_dir, 'dakotaTab.out'), # noqa: PTH118
+ index_col=None,
+ sep=' ',
+ )
+ tmp = pd.concat([tmp, self.multipleEvent], axis=1)
+ tmp.to_csv(
+ os.path.join(self.work_dir, 'dakotaTab.out'), # noqa: PTH118
+ index=False,
+ sep=' ',
+ )
+
+ # if not self.do_logtransform:
+ # results["yPredict_CI_lb"][self.g_name[ny]] = norm.ppf(0.25, loc = results["yPredict"][self.g_name[ny]] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ # results["yPredict_CI_ub"][self.g_name[ny]] = norm.ppf(0.75, loc = results["yPredict"][self.g_name[ny]] , scale = np.sqrt(self.Y_loo_var[:, ny])).tolist()
+ # else:
# mu = np.log(self.Y_loo[:, ny] )
# sig = np.sqrt(np.log(self.Y_loo_var[:, ny]/pow(self.Y_loo[:, ny] ,2)+1))
# results["yPredict_CI_lb"][self.g_name[ny]] = lognorm.ppf(0.25, s = sig, scale = np.exp(mu)).tolist()
# results["yPredict_CI_ub"][self.g_name[ny]] = lognorm.ppf(0.75, s = sig, scale = np.exp(mu)).tolist()
-
+
# over-write the data with Xnew if any
if self.n_mc > 0:
- Xnew.insert(0,'%',[x+1 for x in list(Xnew.index)])
+ Xnew.insert(0, '%', [x + 1 for x in list(Xnew.index)])
Xnew.to_csv(self.work_dir + '/dakotaTab.out', index=False, sep=' ')
- if os.path.exists('dakota.out'):
- os.remove('dakota.out')
+ if os.path.exists('dakota.out'): # noqa: PTH110
+ os.remove('dakota.out') # noqa: PTH107
- with open('dakota.out', 'w', encoding='utf-8') as fp:
+ with open('dakota.out', 'w', encoding='utf-8') as fp: # noqa: PTH123
json.dump(results, fp, indent=2)
- print("Results Saved")
-
+ print('Results Saved') # noqa: T201
-def read_txt(text_dir, errlog):
- if not os.path.exists(text_dir):
- msg = "Error: file does not exist: " + text_dir
+def read_txt(text_dir, errlog): # noqa: D103
+ if not os.path.exists(text_dir): # noqa: PTH110
+ msg = 'Error: file does not exist: ' + text_dir
errlog.exit(msg)
header_line = []
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PTH123
# Iterate through the file until the table starts
header_count = 0
for line in f:
if line.startswith('%'):
- header_count = header_count + 1
- header_line = line[1:] # remove '%'
+ header_count = header_count + 1 # noqa: PLR6104
+ header_line = line[1:] # remove '%'
try:
- with open(text_dir) as f:
- X = np.loadtxt(f, skiprows=header_count)
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
+ X = np.loadtxt(f, skiprows=header_count) # noqa: N806
except ValueError:
try:
- with open(text_dir) as f:
- X = np.genfromtxt(f, skip_header=header_count, delimiter=',')
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
+ X = np.genfromtxt(f, skip_header=header_count, delimiter=',') # noqa: N806
# if there are extra delimiter, remove nan
if np.isnan(X[-1, -1]):
- X = np.delete(X, -1, 1)
+ X = np.delete(X, -1, 1) # noqa: N806
except ValueError:
- msg = "Error: file format is not supported " + text_dir
+ msg = 'Error: file format is not supported ' + text_dir
errlog.exit(msg)
if X.ndim == 1:
- X = np.array([X]).transpose()
+ X = np.array([X]).transpose() # noqa: N806
- print('X = ', X)
+ print('X = ', X) # noqa: T201
- #df_X = pd.DataFrame(data=X, columns=["V"+str(x) for x in range(X.shape[1])])
+ # df_X = pd.DataFrame(data=X, columns=["V"+str(x) for x in range(X.shape[1])])
if len(header_line) > 0:
- df_X = pd.DataFrame(data=X, columns=header_line.replace('\n','').split(','))
+ df_X = pd.DataFrame(data=X, columns=header_line.replace('\n', '').split(',')) # noqa: N806
else:
- df_X = pd.DataFrame()
+ df_X = pd.DataFrame() # noqa: N806
- print('df_X = ',df_X)
+ print('df_X = ', df_X) # noqa: T201
return df_X
-
-class errorLog(object):
+class errorLog: # noqa: D101
def __init__(self, work_dir):
- self.file = open('{}/dakota.err'.format(work_dir), "w")
+ self.file = open(f'{work_dir}/dakota.err', 'w') # noqa: PLW1514, PTH123, SIM115
- def exit(self, msg):
- print(msg)
+ def exit(self, msg): # noqa: D102
+ print(msg) # noqa: T201
self.file.write(msg)
self.file.close()
- exit(-1)
+ exit(-1) # noqa: PLR1722
def build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver):
-
- """
- build_surrogate: built surrogate model
+ """build_surrogate: built surrogate model
input:
work_dir: working directory
run_type: job type
os_type: operating system type
- """
-
+ """ # noqa: D205, D400
# t_total = time.process_time()
# default filename
- filename = 'PLoM_Model'
+ filename = 'PLoM_Model' # noqa: F841
# read the configuration file
- f = open(work_dir + '/templatedir/' + input_file)
+ f = open(work_dir + '/templatedir/' + input_file) # noqa: PLW1514, PTH123, SIM115
try:
job_config = json.load(f)
except ValueError:
@@ -881,20 +988,24 @@ def build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver):
# check the uq type
if job_config['UQ']['uqType'] != 'PLoM Model':
- msg = 'UQ type inconsistency : user wanted <' + job_config['UQ']['uqType'] + \
- '> but called program'
+ msg = (
+ 'UQ type inconsistency : user wanted <'
+ + job_config['UQ']['uqType']
+ + '> but called program'
+ )
errlog.exit(msg)
# initializing runPLoM
- model = runPLoM(work_dir, run_type, os_type, job_config, errlog, input_file, workflow_driver)
+ model = runPLoM(
+ work_dir, run_type, os_type, job_config, errlog, input_file, workflow_driver
+ )
# training the model
model.train_model()
# save the model
model.save_model()
-if __name__ == "__main__":
-
+if __name__ == '__main__':
"""
shell command: PYTHON runPLoM.py work_dir run_type os_type
work_dir: working directory
@@ -903,10 +1014,10 @@ def build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver):
"""
# collect arguments
- inputArgs = sys.argv
- # working diretory
+ inputArgs = sys.argv # noqa: N816
+ # working directory
work_dir = inputArgs[1].replace(os.sep, '/')
- print('work_dir = {}'.format(work_dir))
+ print(f'work_dir = {work_dir}') # noqa: T201
# print the work_dir
errlog = errorLog(work_dir)
# job type
@@ -914,11 +1025,11 @@ def build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver):
# operating system type
os_type = inputArgs[4]
# default output file: results.out
- result_file = "results.out"
+ result_file = 'results.out'
# input file name
- input_file = os.path.basename(inputArgs[2])
- print('input_file = {}'.format(input_file))
+ input_file = os.path.basename(inputArgs[2]) # noqa: PTH119
+ print(f'input_file = {input_file}') # noqa: T201
# workflowDriver
workflow_driver = inputArgs[3]
# start build the surrogate
- build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver)
+ build_surrogate(work_dir, os_type, run_type, input_file, workflow_driver)
diff --git a/modules/performUQ/SimCenterUQ/surrogateBuild.py b/modules/performUQ/SimCenterUQ/surrogateBuild.py
index b4ce65d53..42da6e547 100644
--- a/modules/performUQ/SimCenterUQ/surrogateBuild.py
+++ b/modules/performUQ/SimCenterUQ/surrogateBuild.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2021 Leland Stanford Junior University
# Copyright (c) 2021 The Regents of the University of California
#
@@ -43,95 +42,92 @@
# Jan 31, 2023: let's not use GPy calibration parallel for now, because it seems to give local maxima
import copy
-import glob
import json
-import math
import os
-import pickle
+import pickle # noqa: S403
import random
-import shutil
-import subprocess
import sys
import time
-from copy import deepcopy
-import random
import warnings
-import traceback
-warnings.filterwarnings("ignore")
+warnings.filterwarnings('ignore')
-file_dir = os.path.dirname(__file__)
+
+file_dir = os.path.dirname(__file__) # noqa: PTH120
sys.path.append(file_dir)
-from UQengine import UQengine
+from UQengine import UQengine # noqa: E402
# import pip installed modules
-
try:
- moduleName = "numpy"
+ moduleName = 'numpy' # noqa: N816
import numpy as np
- moduleName = "GPy"
- import GPy as GPy
+ moduleName = 'GPy' # noqa: N816
+ import GPy as GPy # noqa: PLC0414
- moduleName = "scipy"
- from scipy.stats import lognorm, norm, cramervonmises, qmc
+ moduleName = 'scipy' # noqa: N816
+ from scipy.stats import cramervonmises, lognorm, norm, qmc
- moduleName = "UQengine"
+ moduleName = 'UQengine' # noqa: N816
# from utilities import run_FEM_batch, errorLog
error_tag = False # global variable
-except:
+except: # noqa: E722
error_tag = True
- print("Failed to import module:" + moduleName)
+ print('Failed to import module:' + moduleName) # noqa: T201
-errFileName = 'dakota.err'
-sys.stderr = open(errFileName, 'w')
+errFileName = 'dakota.err' # noqa: N816
+sys.stderr = open(errFileName, 'w') # noqa: PLW1514, PTH123, SIM115
#
# Modify GPy package
#
-if error_tag == False:
+if error_tag == False: # noqa: E712
- def monkeypatch_method(cls):
+ def monkeypatch_method(cls): # noqa: D103
def decorator(func):
setattr(cls, func.__name__, func)
return func
return decorator
-
@monkeypatch_method(GPy.models.gp_regression.GPRegression)
- def randomize(self, rand_gen=None, *args, **kwargs):
+ def randomize(self, rand_gen=None, *args, **kwargs): # noqa: D103
if rand_gen is None:
rand_gen = np.random.normal
# first take care of all parameters (from N(0,1))
- x = rand_gen(size=self._size_transformed(), *args, **kwargs)
+ x = rand_gen(size=self._size_transformed(), *args, **kwargs) # noqa: B026
updates = self.update_model()
- self.update_model(False) # Switch off the updates
+ self.update_model(False) # Switch off the updates # noqa: FBT003
self.optimizer_array = x # makes sure all of the tied parameters get the same init (since there's only one prior object...)
# now draw from prior where possible
x = self.param_array.copy()
- [np.put(x, ind, p.rvs(ind.size)) for p, ind in self.priors.items() if not p is None]
+ [
+ np.put(x, ind, p.rvs(ind.size))
+ for p, ind in self.priors.items()
+ if p is not None
+ ]
unfixlist = np.ones((self.size,), dtype=bool)
- from paramz.transformations import __fixed__
+ from paramz.transformations import __fixed__ # noqa: PLC0415
+
unfixlist[self.constraints[__fixed__]] = False
self.param_array.flat[unfixlist] = x.view(np.ndarray).ravel()[unfixlist]
self.update_model(updates)
-## Main function
+# Main function
-def main(inputArgs):
- gp = surrogate(inputArgs)
+def main(inputArgs): # noqa: N803, D103
+ gp = surrogate(inputArgs) # noqa: F841
-class surrogate(UQengine):
- def __init__(self, inputArgs):
- super(surrogate, self).__init__(inputArgs)
+class surrogate(UQengine): # noqa: D101
+ def __init__(self, inputArgs): # noqa: N803
+ super(surrogate, self).__init__(inputArgs) # noqa: UP008
t_init = time.time()
#
@@ -162,81 +158,80 @@ def __init__(self, inputArgs):
# save model as
#
- self.save_model("SimGpModel")
+ self.save_model('SimGpModel')
- def check_packages(self,error_tag, moduleName):
-
- if error_tag == True and moduleName =="GPy":
- if self.os_type.lower().startswith("darwin"):
- msg = ("Surrogate modeling module uses GPy python package which is facing a version compatibility issue at this moment (01.05.2024). To use the surrogate module, one needs to update manually the GPy version to 1.13. The instruction can be found in the the documentation: https://nheri-simcenter.github.io/quoFEM-Documentation/common/user_manual/usage/desktop/SimCenterUQSurrogate.html#lblsimsurrogate")
+ def check_packages(self, error_tag, moduleName): # noqa: N803, D102
+ if error_tag == True and moduleName == 'GPy': # noqa: E712
+ if self.os_type.lower().startswith('darwin'):
+ msg = 'Surrogate modeling module uses GPy python package which is facing a version compatibility issue at this moment (01.05.2024). To use the surrogate module, one needs to update manually the GPy version to 1.13. The instruction can be found in the the documentation: https://nheri-simcenter.github.io/quoFEM-Documentation/common/user_manual/usage/desktop/SimCenterUQSurrogate.html#lblsimsurrogate'
self.exit(msg)
- if error_tag == True:
- if self.os_type.lower().startswith("win"):
+ if error_tag == True: # noqa: E712
+ if self.os_type.lower().startswith('win'):
msg = (
- "Failed to load python module ["
- + moduleName
- + "]. Go to File-Preference-Python and reset the path."
+ 'Failed to load python module ['
+ + moduleName
+ + ']. Go to File-Preference-Python and reset the path.'
)
else:
msg = (
- "Failed to load python module ["
- + moduleName
- + "]. Did you forget ?"
+ 'Failed to load python module ['
+ + moduleName
+ + ']. Did you forget ?'
)
self.exit(msg)
- def readJson(self):
- #self.nopt = max([20, self.n_processor])
+ def readJson(self): # noqa: C901, N802, D102, PLR0912, PLR0915
+ # self.nopt = max([20, self.n_processor])
self.nopt = 1
try:
- jsonPath = self.inputFile # for EEUQ
- if not os.path.isabs(jsonPath):
- jsonPath = self.work_dir + "/templatedir/" + self.inputFile # for quoFEM
+ jsonPath = self.inputFile # for EEUQ # noqa: N806
+ if not os.path.isabs(jsonPath): # noqa: PTH117
+ jsonPath = ( # noqa: N806
+ self.work_dir + '/templatedir/' + self.inputFile
+ ) # for quoFEM
- with open(jsonPath, 'r', encoding='utf-8') as f:
- dakotaJson = json.load(f)
+ with open(jsonPath, encoding='utf-8') as f: # noqa: PTH123
+ dakotaJson = json.load(f) # noqa: N806
except ValueError:
- msg = "invalid json format - dakota.json"
+ msg = 'invalid json format - dakota.json'
self.exit(msg)
- if dakotaJson["UQ"]["uqType"] != "Train GP Surrogate Model":
+ if dakotaJson['UQ']['uqType'] != 'Train GP Surrogate Model':
msg = (
- "UQ type inconsistency : user wanted <"
- + dakotaJson["UQ"]["uqType"]
- + "> but we called program"
+ 'UQ type inconsistency : user wanted <'
+ + dakotaJson['UQ']['uqType']
+ + '> but we called program'
)
self.exit(msg)
- surrogateJson = dakotaJson["UQ"]["surrogateMethodInfo"]
+ surrogateJson = dakotaJson['UQ']['surrogateMethodInfo'] # noqa: N806
- if surrogateJson["method"] == "Sampling and Simulation":
- random.seed(surrogateJson["seed"])
- np.random.seed(surrogateJson["seed"])
+ if surrogateJson['method'] == 'Sampling and Simulation':
+ random.seed(surrogateJson['seed'])
+ np.random.seed(surrogateJson['seed'])
else:
random.seed(1)
np.random.seed(1)
-
-
#
# EE-UQ
#
- #TODO: multihazards?
+ # TODO: multihazards? # noqa: TD002
self.isEEUQ = False
- if dakotaJson["Applications"].get("Events") != None:
- Evt = dakotaJson["Applications"]["Events"]
- if Evt[0].get("EventClassification") != None:
- if Evt[0]["EventClassification"] == "Earthquake":
+ if dakotaJson['Applications'].get('Events') != None: # noqa: E711
+ Evt = dakotaJson['Applications']['Events'] # noqa: N806
+ if Evt[0].get('EventClassification') != None: # noqa: E711
+ if Evt[0]['EventClassification'] == 'Earthquake':
self.isEEUQ = True
- self.rv_name_ee=[]
- if surrogateJson.get("IntensityMeasure") != None and self.isEEUQ:
- self.intensityMeasure = surrogateJson["IntensityMeasure"]
- self.intensityMeasure["useGeoMean"] = surrogateJson["useGeoMean"]
- self.unitInfo = dakotaJson["GeneralInformation"]["units"]
- for imName, imChar in surrogateJson["IntensityMeasure"].items():
+ self.rv_name_ee = []
+ if surrogateJson.get('IntensityMeasure') != None and self.isEEUQ: # noqa: E711
+ self.intensityMeasure = surrogateJson['IntensityMeasure']
+ self.intensityMeasure['useGeoMean'] = surrogateJson['useGeoMean']
+ self.unitInfo = dakotaJson['GeneralInformation']['units']
+ for imName, imChar in surrogateJson['IntensityMeasure'].items(): # noqa: B007, N806, PERF102
# if imChar.get("Periods") != None:
# for pers in imChar["Periods"]:
# self.rv_name_ee += [imName+str(pers)]
@@ -247,56 +242,55 @@ def readJson(self):
self.IntensityMeasure = {}
self.unitInfo = {}
-
if self.isEEUQ:
self.checkWorkflow(dakotaJson)
#
# common for all surrogate options
#
- self.rv_name = list()
+ self.rv_name = list() # noqa: C408
x_dim = 0
- for rv in dakotaJson["randomVariables"]:
- self.rv_name += [rv["name"]]
+ for rv in dakotaJson['randomVariables']:
+ self.rv_name += [rv['name']]
x_dim += 1
- self.g_name = list()
+ self.g_name = list() # noqa: C408
y_dim = 0
-
- for g in dakotaJson["EDP"]:
+
+ for g in dakotaJson['EDP']:
# scalar
- if (not g["name"]):
- msg = "QoI name cannot be an empty string"
+ if not g['name']:
+ msg = 'QoI name cannot be an empty string'
self.exit(msg)
- if g["length"] == 1:
- self.g_name += [g["name"]]
+ if g['length'] == 1:
+ self.g_name += [g['name']]
y_dim += 1
# vector
else:
- for nl in range(g["length"]):
- self.g_name += ["{}_{}".format(g["name"], nl + 1)]
+ for nl in range(g['length']):
+ self.g_name += ['{}_{}'.format(g['name'], nl + 1)]
y_dim += 1
if x_dim == 0:
- msg = "Error reading json: RV is empty"
+ msg = 'Error reading json: RV is empty'
self.exit(msg)
if y_dim == 0:
- msg = "Error reading json: EDP(QoI) is empty"
+ msg = 'Error reading json: EDP(QoI) is empty'
self.exit(msg)
do_predictive = False
- automate_doe = False
+ automate_doe = False # noqa: F841
self.x_dim = x_dim
self.y_dim = y_dim
self.do_predictive = do_predictive
try:
- self.do_parallel = surrogateJson["parallelExecution"]
- except:
+ self.do_parallel = surrogateJson['parallelExecution']
+ except: # noqa: E722
self.do_parallel = True
if self.do_parallel:
@@ -306,7 +300,7 @@ def readJson(self):
self.n_processor = 1
self.pool = 0
self.cal_interval = 5
- print("self.cal_interval : {}".format(self.cal_interval))
+ print(f'self.cal_interval : {self.cal_interval}') # noqa: T201
#
# Advanced
@@ -315,56 +309,56 @@ def readJson(self):
self.heteroscedastic = False
# if surrogateJson["advancedOpt"]:
- self.do_logtransform = surrogateJson["logTransform"]
- self.kernel = surrogateJson["kernel"]
- self.do_linear = surrogateJson["linear"]
- self.nugget_opt = surrogateJson["nuggetOpt"]
+ self.do_logtransform = surrogateJson['logTransform']
+ self.kernel = surrogateJson['kernel']
+ self.do_linear = surrogateJson['linear']
+ self.nugget_opt = surrogateJson['nuggetOpt']
# self.heteroscedastic = surrogateJson["Heteroscedastic"]
-
- if (self.nugget_opt== "Fixed Values") or (self.nugget_opt == "Fixed Bounds"):
+ if (self.nugget_opt == 'Fixed Values') or ( # noqa: PLR1714
+ self.nugget_opt == 'Fixed Bounds'
+ ):
try:
self.nuggetVal = np.array(
- json.loads("[{}]".format(surrogateJson["nuggetString"]))
+ json.loads('[{}]'.format(surrogateJson['nuggetString']))
)
except json.decoder.JSONDecodeError:
- msg = "Error reading json: improper format of nugget values/bounds. Provide nugget values/bounds of each QoI with comma delimiter"
+ msg = 'Error reading json: improper format of nugget values/bounds. Provide nugget values/bounds of each QoI with comma delimiter'
self.exit(msg)
- if self.nuggetVal.shape[0] != self.y_dim and self.nuggetVal.shape[0] != 0:
- msg = "Error reading json: Number of nugget quantities ({}) does not match # QoIs ({})".format(
- self.nuggetVal.shape[0], self.y_dim
- )
+ if (
+ self.nuggetVal.shape[0] != self.y_dim
+ and self.nuggetVal.shape[0] != 0
+ ):
+ msg = f'Error reading json: Number of nugget quantities ({self.nuggetVal.shape[0]}) does not match # QoIs ({self.y_dim})'
self.exit(msg)
else:
self.nuggetVal = 1
- if self.nugget_opt == "Heteroscedastic":
- self.stochastic =[True] * y_dim
+ if self.nugget_opt == 'Heteroscedastic':
+ self.stochastic = [True] * y_dim
else:
- self.stochastic =[False] * y_dim
-
+ self.stochastic = [False] * y_dim
-
- if self.nugget_opt == "Fixed Values":
- for Vals in self.nuggetVal:
+ if self.nugget_opt == 'Fixed Values':
+ for Vals in self.nuggetVal: # noqa: N806
if not np.isscalar(Vals):
- msg = "Error reading json: provide nugget values of each QoI with comma delimiter"
+ msg = 'Error reading json: provide nugget values of each QoI with comma delimiter'
self.exit(msg)
- elif self.nugget_opt == "Fixed Bounds":
- for Bous in self.nuggetVal:
+ elif self.nugget_opt == 'Fixed Bounds':
+ for Bous in self.nuggetVal: # noqa: N806
if np.isscalar(Bous):
- msg = "Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],..."
+ msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
self.exit(msg)
elif isinstance(Bous, list):
- msg = "Error reading json: provide both lower and upper bounds of nugget"
+ msg = 'Error reading json: provide both lower and upper bounds of nugget'
self.exit(msg)
- elif Bous.shape[0] != 2:
- msg = "Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],..."
+ elif Bous.shape[0] != 2: # noqa: PLR2004
+ msg = 'Error reading json: provide nugget bounds of each QoI in brackets with comma delimiter, e.g. [0.0,1.0],[0.0,2.0],...'
self.exit(msg)
elif Bous[0] > Bous[1]:
- msg = "Error reading json: the lower bound of a nugget value should be smaller than its upper bound"
+ msg = 'Error reading json: the lower bound of a nugget value should be smaller than its upper bound'
self.exit(msg)
# else:
# # use default
@@ -375,38 +369,33 @@ def readJson(self):
# self.nuggetVal= 1
# self.stochastic =[False] * y_dim
-
-
if self.stochastic[0]:
-
@monkeypatch_method(GPy.likelihoods.Gaussian)
- def gaussian_variance(self, Y_metadata=None):
+ def gaussian_variance(self, Y_metadata=None): # noqa: N803
if Y_metadata is None:
return self.variance
- else:
+ else: # noqa: RET505
return self.variance * Y_metadata['variance_structure']
@monkeypatch_method(GPy.core.GP)
- def set_XY2(self, X=None, Y=None, Y_metadata=None):
+ def set_XY2(self, X=None, Y=None, Y_metadata=None): # noqa: N802, N803
if Y_metadata is not None:
if self.Y_metadata is None:
self.Y_metadata = Y_metadata
else:
self.Y_metadata.update(Y_metadata)
- print("metadata_updated")
+ print('metadata_updated') # noqa: T201
self.set_XY(X, Y)
-
-
# Save model information
- if (surrogateJson["method"] == "Sampling and Simulation") or (
- surrogateJson["method"] == "Import Data File"
+ if (surrogateJson['method'] == 'Sampling and Simulation') or (
+ surrogateJson['method'] == 'Import Data File'
):
self.do_mf = False
self.modelInfoHF = model_info(
surrogateJson,
- dakotaJson["randomVariables"],
+ dakotaJson['randomVariables'],
self.work_dir,
x_dim,
y_dim,
@@ -415,18 +404,18 @@ def set_XY2(self, X=None, Y=None, Y_metadata=None):
)
self.modelInfoLF = model_info(
surrogateJson,
- dakotaJson["randomVariables"],
+ dakotaJson['randomVariables'],
self.work_dir,
x_dim,
y_dim,
self.n_processor,
idx=-1,
) # NONE model
- elif surrogateJson["method"] == "Import Multi-fidelity Data File":
+ elif surrogateJson['method'] == 'Import Multi-fidelity Data File':
self.do_mf = True
self.modelInfoHF = model_info(
- surrogateJson["highFidelity"],
- dakotaJson["randomVariables"],
+ surrogateJson['highFidelity'],
+ dakotaJson['randomVariables'],
self.work_dir,
x_dim,
y_dim,
@@ -434,8 +423,8 @@ def set_XY2(self, X=None, Y=None, Y_metadata=None):
idx=1,
)
self.modelInfoLF = model_info(
- surrogateJson["lowFidelity"],
- dakotaJson["randomVariables"],
+ surrogateJson['lowFidelity'],
+ dakotaJson['randomVariables'],
self.work_dir,
x_dim,
y_dim,
@@ -446,20 +435,14 @@ def set_XY2(self, X=None, Y=None, Y_metadata=None):
msg = 'Error reading json: select among "Import Data File", "Sampling and Simulation" or "Import Multi-fidelity Data File"'
self.exit(msg)
-
if self.do_mf:
try:
- moduleName = "emukit"
- import emukit.multi_fidelity as emf
- from emukit.model_wrappers.gpy_model_wrappers import GPyMultiOutputWrapper
- from emukit.multi_fidelity.convert_lists_to_array import (
- convert_x_list_to_array,
- convert_xy_lists_to_arrays,
- )
+ moduleName = 'emukit' # noqa: N806
+
error_tag = False # global variable
- except:
- error_tag = True
- print("Failed to import module:" + moduleName)
+ except: # noqa: E722
+ error_tag = True # noqa: F841
+ print('Failed to import module:' + moduleName) # noqa: T201
if self.modelInfoHF.is_model:
self.ll = self.modelInfoHF.ll
@@ -485,16 +468,16 @@ def set_XY2(self, X=None, Y=None, Y_metadata=None):
self.modelInfoHF.runIdx = 0
self.modelInfoLF.runIdx = 0
if self.modelInfoHF.is_model and self.modelInfoLF.is_model:
- self.doeIdx = "HFLF" ## HFHF is for multi-fidelity GPy
+ self.doeIdx = 'HFLF' # HFHF is for multi-fidelity GPy
self.modelInfoHF.runIdx = 1
self.modelInfoLF.runIdx = 2
self.cal_interval = 1
elif not self.modelInfoHF.is_model and self.modelInfoLF.is_model:
- self.doeIdx = "LF"
+ self.doeIdx = 'LF'
elif self.modelInfoHF.is_model and not self.modelInfoLF.is_model:
- self.doeIdx = "HF"
+ self.doeIdx = 'HF'
else:
- self.doeIdx = "HF" # whatever.
+ self.doeIdx = 'HF' # whatever.
#
# For later use..
@@ -508,103 +491,118 @@ def set_XY2(self, X=None, Y=None, Y_metadata=None):
self.rvDiscStr = []
self.rvDiscIdx = []
for nx in range(x_dim):
- rvInfo = dakotaJson["randomVariables"][nx]
- self.rvName = self.rvName + [rvInfo["name"]]
- self.rvDist = self.rvDist + [rvInfo["distribution"]]
+ rvInfo = dakotaJson['randomVariables'][nx] # noqa: N806
+ self.rvName = self.rvName + [rvInfo['name']] # noqa: PLR6104, RUF005
+ self.rvDist = self.rvDist + [rvInfo['distribution']] # noqa: PLR6104, RUF005
if self.modelInfoHF.is_model:
- if rvInfo["distribution"]=="Uniform":
- self.rvVal += [(rvInfo["upperbound"] + rvInfo["lowerbound"]) / 2]
+ if rvInfo['distribution'] == 'Uniform':
+ self.rvVal += [(rvInfo['upperbound'] + rvInfo['lowerbound']) / 2]
self.rvDiscStr += [[]]
- elif rvInfo["distribution"]=="discrete_design_set_string":
+ elif rvInfo['distribution'] == 'discrete_design_set_string':
self.rvVal += [1]
- self.rvDiscStr += [rvInfo["elements"]]
+ self.rvDiscStr += [rvInfo['elements']]
self.rvDiscIdx = [nx]
elif self.modelInfoHF.is_data:
- self.rvVal = self.rvVal + [np.mean(self.modelInfoHF.X_existing[:, nx])]
+ self.rvVal = self.rvVal + [ # noqa: PLR6104, RUF005
+ np.mean(self.modelInfoHF.X_existing[:, nx])
+ ]
else:
self.rvVal = [0] * self.x_dim
-
- def checkWorkflow(self,dakotaJson):
- if dakotaJson["Applications"]["EDP"]["Application"] == "SurrogateEDP":
- msg = "Error in SurrogateGP engine: Do not select [None] in the EDP tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab."
+ def checkWorkflow(self, dakotaJson): # noqa: N802, N803, D102
+ if dakotaJson['Applications']['EDP']['Application'] == 'SurrogateEDP':
+ msg = 'Error in SurrogateGP engine: Do not select [None] in the EDP tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.'
self.exit(msg)
- if dakotaJson["Applications"]["Simulation"]["Application"] == "SurrogateSimulation":
- msg = "Error in SurrogateGP engine: Do not select [None] in the FEM tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab."
+ if (
+ dakotaJson['Applications']['Simulation']['Application']
+ == 'SurrogateSimulation'
+ ):
+ msg = 'Error in SurrogateGP engine: Do not select [None] in the FEM tab. [None] is used only when using pre-trained surrogate, i.e. when [Surrogate] is selected in the SIM Tab.'
self.exit(msg)
-
- maxSampSize=float("Inf")
- for rv in dakotaJson["randomVariables"]:
+ maxSampSize = float('Inf') # noqa: N806
+ for rv in dakotaJson['randomVariables']:
if rv['distribution'] == 'discrete_design_set_string':
- maxSampSize = len(rv['elements'] )
+ maxSampSize = len(rv['elements']) # noqa: N806
- if (maxSampSize not supported'
self.exit(msg)
if self.do_linear:
- kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True)
+ kr = kr + GPy.kern.Linear(input_dim=x_dim, ARD=True) # noqa: PLR6104
if self.do_mf:
- kr = emf.kernels.LinearMultiFidelityKernel([kr.copy(), kr.copy()])
+ kr = emf.kernels.LinearMultiFidelityKernel([kr.copy(), kr.copy()]) # noqa: F821
return kr
- def create_gpy_model(self, X_dummy, Y_dummy, kr):
-
+ def create_gpy_model(self, X_dummy, Y_dummy, kr): # noqa: N803, D102
if not self.do_mf:
-
if not self.heteroscedastic:
- m_tmp =GPy.models.GPRegression( X_dummy, Y_dummy, kernel=kr.copy(), normalizer=self.set_normalizer)
+ m_tmp = GPy.models.GPRegression(
+ X_dummy,
+ Y_dummy,
+ kernel=kr.copy(),
+ normalizer=self.set_normalizer,
+ )
else:
self.set_normalizer = False
- m_tmp = GPy.models.GPHeteroscedasticRegression( X_dummy, Y_dummy, kernel=kr.copy())
+ m_tmp = GPy.models.GPHeteroscedasticRegression(
+ X_dummy, Y_dummy, kernel=kr.copy()
+ )
- #for parname in m_tmp.parameter_names():
+ # for parname in m_tmp.parameter_names():
# if parname.endswith("lengthscale"):
# exec("m_tmp." + parname + "=self.ll")
# for multi fidelity case
else:
- X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ X_list, Y_list = emf.convert_lists_to_array.convert_xy_lists_to_arrays( # noqa: N806, F821
[X_dummy, X_dummy], [Y_dummy, Y_dummy]
)
- for i in range(y_dim):
- m_tmp = GPyMultiOutputWrapper(emf.models.GPyLinearMultiFidelityModel(X_list, Y_list, kernel=kr.copy(), n_fidelities=2),2,n_optimization_restarts=self.nopt,)
+ for i in range(y_dim): # noqa: B007, F821
+ m_tmp = GPyMultiOutputWrapper( # noqa: F821
+ emf.models.GPyLinearMultiFidelityModel( # noqa: F821
+ X_list, Y_list, kernel=kr.copy(), n_fidelities=2
+ ),
+ 2,
+ n_optimization_restarts=self.nopt,
+ )
return m_tmp
- def create_gp_model(self):
+ def create_gp_model(self): # noqa: D102
x_dim = self.x_dim
y_dim = self.y_dim
# choose kernel
kr = self.create_kernel(x_dim)
- X_dummy = np.zeros((1, x_dim))
- Y_dummy = np.zeros((1, y_dim))
+ X_dummy = np.zeros((1, x_dim)) # noqa: N806
+ Y_dummy = np.zeros((1, y_dim)) # noqa: N806
# for single fidelity case
self.set_normalizer = True
@@ -612,7 +610,11 @@ def create_gp_model(self):
self.normMeans = [0] * y_dim
self.normVars = [1] * y_dim
self.m_list = [0] * self.y_dim
- self.m_var_list, self.var_str, self.Y_mean = [0] * self.y_dim, [1] * self.y_dim, [0] * self.y_dim
+ self.m_var_list, self.var_str, self.Y_mean = (
+ [0] * self.y_dim,
+ [1] * self.y_dim,
+ [0] * self.y_dim,
+ )
for i in range(y_dim):
self.m_list[i] = self.create_gpy_model(X_dummy, Y_dummy, kr)
@@ -620,147 +622,164 @@ def create_gp_model(self):
self.x_dim = x_dim
self.y_dim = y_dim
-
-
- def predict(self, m_tmp, X, noise=0):
-
+ def predict(self, m_tmp, X, noise=0): # noqa: ARG002, N803, D102
if not self.do_mf:
-
- if all(m_tmp.Y == np.mean(m_tmp.Y, axis=0)):
- return m_tmp.Y[0], 0 # if response is constant - just return constant
- elif self.heteroscedastic:
+ if all(np.mean(m_tmp.Y, axis=0) == m_tmp.Y):
+ return m_tmp.Y[
+ 0
+ ], 0 # if response is constant - just return constant
+ elif self.heteroscedastic: # noqa: RET505
return m_tmp.predict_noiseless(X)
else:
return m_tmp.predict_noiseless(X)
else:
-
- idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0)
- if all(m_tmp.gpy_model.Y == np.mean(m_tmp.gpy_model.Y[idxHF, :], axis=0)):
+ idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0) # noqa: N806
+ if all(
+ np.mean(m_tmp.gpy_model.Y[idxHF, :], axis=0) == m_tmp.gpy_model.Y
+ ):
return (
m_tmp.gpy_model.Y[0],
0,
) # if high-fidelity response is constant - just return constant
- else:
- X_list = convert_x_list_to_array([X, X])
- X_list_h = X_list[X.shape[0]:]
+ else: # noqa: RET505
+ X_list = convert_x_list_to_array([X, X]) # noqa: N806, F821
+ X_list_h = X_list[X.shape[0] :] # noqa: N806
return m_tmp.predict(X_list_h)
- def set_XY(self, m_tmp, ny, X_hf, Y_hf, X_lf=float("nan"), Y_lf=float("nan"), enforce_hom=False):
-
+ def set_XY( # noqa: C901, N802, D102
+ self,
+ m_tmp,
+ ny,
+ X_hf, # noqa: N803
+ Y_hf, # noqa: N803
+ X_lf=float('nan'), # noqa: N803
+ Y_lf=float('nan'), # noqa: N803
+ enforce_hom=False, # noqa: FBT002
+ ):
#
# check if X dimension has changed...
#
x_current_dim = self.x_dim
for parname in m_tmp.parameter_names():
- if parname.endswith("lengthscale"):
- exec("x_current_dim = len(m_tmp." + parname +")" )
-
- if not x_current_dim == X_hf.shape[1]:
+ if parname.endswith('lengthscale'):
+ exec('x_current_dim = len(m_tmp.' + parname + ')') # noqa: S102
+
+ if x_current_dim != X_hf.shape[1]:
kr = self.create_kernel(X_hf.shape[1])
- X_dummy = np.zeros((1, X_hf.shape[1]))
- Y_dummy = np.zeros((1,1))
+ X_dummy = np.zeros((1, X_hf.shape[1])) # noqa: N806
+ Y_dummy = np.zeros((1, 1)) # noqa: N806
m_new = self.create_gpy_model(X_dummy, Y_dummy, kr)
m_tmp = m_new.copy()
- #m_tmp.optimize()
-
+ # m_tmp.optimize()
+
if self.do_logtransform:
if np.min(Y_hf) < 0:
msg = 'Error running SimCenterUQ - Response contains negative values. Please uncheck the log-transform option in the UQ tab'
self.exit(msg)
- Y_hfs = np.log(Y_hf)
+ Y_hfs = np.log(Y_hf) # noqa: N806
else:
- Y_hfs = Y_hf
+ Y_hfs = Y_hf # noqa: N806
if self.do_logtransform and self.do_mf:
if np.min(Y_lf) < 0:
msg = 'Error running SimCenterUQ - Response contains negative values. Please uncheck the log-transform option in the UQ tab'
self.exit(msg)
- Y_lfs = np.log(Y_lf)
+ Y_lfs = np.log(Y_lf) # noqa: N806
else:
- Y_lfs = Y_lf
+ Y_lfs = Y_lf # noqa: N806
# # below is dummy
# if np.all(np.isnan(X_lf)) and np.all(np.isnan(Y_lf)):
# X_lf = self.X_lf
# Y_lfs = self.Y_lf
- if not self.do_mf :
-
+ if not self.do_mf:
# if self.heteroscedastic:
# m_tmp = GPy.models.GPHeteroscedasticRegression(
# X_hf, Y_hfs, kernel=self.kg.copy()
# )
- # # TODO: temporary... need to find a way to not calibrate but update the variance
+ # # TODO: temporary... need to find a way to not calibrate but update the variance # noqa: TD002
# m_tmp.optimize()
# self.var_str[ny] = np.ones((m_tmp.Y.shape[0], 1))
- X_new, X_idx, indices, counts = np.unique(X_hf, axis=0, return_index=True, return_counts=True,
- return_inverse=True)
+ X_new, X_idx, indices, counts = np.unique( # noqa: F841, N806
+ X_hf,
+ axis=0,
+ return_index=True,
+ return_counts=True,
+ return_inverse=True,
+ )
n_unique = X_new.shape[0]
- #if n_unique == X_hf.shape[0]: # unique set p - just to homogeneous GP
+ # if n_unique == X_hf.shape[0]: # unique set p - just to homogeneous GP
if not self.stochastic[ny] or enforce_hom:
# just homogeneous GP
m_tmp.set_XY(X_hf, Y_hfs)
self.var_str[ny] = np.ones((m_tmp.Y.shape[0], 1))
self.Y_mean[ny] = Y_hfs
- self.indices_unique = range(0,Y_hfs.shape[0])
+ self.indices_unique = range(Y_hfs.shape[0])
self.n_unique_hf = X_hf.shape[0]
- elif n_unique == X_hf.shape[0]: # no repl
- #Y_mean=Y_hfs[X_idx]
- #Y_mean1, nugget_mean1 = self.predictStoMeans(X_new, Y_mean)
- Y_mean1, nugget_mean1 = self.predictStoMeans(X_hf, Y_hfs)
-
- if np.max(nugget_mean1)<1.e-10:
- self.set_XY(m_tmp,ny, X_hf, Y_hfs,enforce_hom=True)
- return;
-
- else:
- Y_metadata, m_var,norm_var_str = self.predictStoVars(X_hf, (Y_hfs-Y_mean1)**2, X_hf,Y_hfs,counts)
+ elif n_unique == X_hf.shape[0]: # no repl
+ # Y_mean=Y_hfs[X_idx]
+ # Y_mean1, nugget_mean1 = self.predictStoMeans(X_new, Y_mean)
+ Y_mean1, nugget_mean1 = self.predictStoMeans(X_hf, Y_hfs) # noqa: N806
+
+ if np.max(nugget_mean1) < 1.0e-10: # noqa: PLR2004
+ self.set_XY(m_tmp, ny, X_hf, Y_hfs, enforce_hom=True)
+ return None
+ else: # noqa: RET505
+ Y_metadata, m_var, norm_var_str = self.predictStoVars( # noqa: N806
+ X_hf, (Y_hfs - Y_mean1) ** 2, X_hf, Y_hfs, counts
+ )
m_tmp.set_XY2(X_hf, Y_hfs, Y_metadata=Y_metadata)
self.m_var_list[ny] = m_var
self.var_str[ny] = norm_var_str
- self.indices_unique = range(0,Y_hfs.shape[0])
+ self.indices_unique = range(Y_hfs.shape[0])
self.n_unique_hf = X_new.shape[0]
self.Y_mean[ny] = Y_hfs
else:
# nonunique set - check if nugget is zero
- Y_mean, Y_var = np.zeros((n_unique, 1)), np.zeros((n_unique, 1))
+ Y_mean, Y_var = np.zeros((n_unique, 1)), np.zeros((n_unique, 1)) # noqa: N806
for idx in range(n_unique):
- Y_subset = Y_hfs[[i for i in np.where(indices == idx)[0]], :]
+ Y_subset = Y_hfs[[i for i in np.where(indices == idx)[0]], :] # noqa: C416, N806
Y_mean[idx, :] = np.mean(Y_subset, axis=0)
Y_var[idx, :] = np.var(Y_subset, axis=0)
- idx_repl = [i for i in np.where(counts > 1)[0]]
+ idx_repl = [i for i in np.where(counts > 1)[0]] # noqa: C416
- if (np.max(Y_var) / np.var(Y_mean) < 1.e-10):
+ if np.max(Y_var) / np.var(Y_mean) < 1.0e-10: # noqa: PLR2004
# NUGGET IS ZERO - no need for stochastic kriging
if self.do_logtransform:
- Y_mean = np.exp(Y_mean)
+ Y_mean = np.exp(Y_mean) # noqa: N806
- m_tmp = self.set_XY(m_tmp, ny, X_new, Y_mean, X_lf, Y_lf) # send only unique to nonstochastic
+ m_tmp = self.set_XY(
+ m_tmp, ny, X_new, Y_mean, X_lf, Y_lf
+ ) # send only unique to nonstochastic
self.indices_unique = indices
return m_tmp
- else:
- # NUGGET IS NONZERO - Do either stochastic/nonstochastic kriging
- if self.nugget_opt == "Heteroscedastic":
-
- #
- # Constructing secondary GP model - can we make use of the "variance of sample variance"
- #
- # TODO: log-variance
-
- Y_metadata, m_var,norm_var_str = self.predictStoVars(X_new[idx_repl, :],Y_var[idx_repl],X_new,Y_mean,counts)
- '''
+ elif self.nugget_opt == 'Heteroscedastic': # noqa: RET505
+ #
+ # Constructing secondary GP model - can we make use of the "variance of sample variance"
+ #
+ # TODO: log-variance # noqa: TD002
+
+ Y_metadata, m_var, norm_var_str = self.predictStoVars( # noqa: N806
+ X_new[idx_repl, :],
+ Y_var[idx_repl],
+ X_new,
+ Y_mean,
+ counts,
+ )
+ """
kernel_var = GPy.kern.Matern52(input_dim=self.x_dim, ARD=True)
log_vars = np.log(Y_var[idx_repl])
m_var = GPy.models.GPRegression(X_new[idx_repl, :], log_vars, kernel_var, normalizer=True,
@@ -781,29 +800,29 @@ def set_XY(self, m_tmp, ny, X_hf, Y_hf, X_lf=float("nan"), Y_lf=float("nan"), en
# norm_var_str = (X_new+2)**2/max((X_new+2)**2)
Y_metadata = {'variance_structure': norm_var_str / counts}
- '''
- m_tmp.set_XY2(X_new, Y_mean, Y_metadata=Y_metadata)
+ """
+ m_tmp.set_XY2(X_new, Y_mean, Y_metadata=Y_metadata)
- self.m_var_list[ny] = m_var
- self.var_str[ny] = norm_var_str
- self.indices_unique = indices
- self.n_unique_hf = X_new.shape[0]
- self.Y_mean[ny] = Y_mean
+ self.m_var_list[ny] = m_var
+ self.var_str[ny] = norm_var_str
+ self.indices_unique = indices
+ self.n_unique_hf = X_new.shape[0]
+ self.Y_mean[ny] = Y_mean
- else:
- # still nonstochastic gp
- m_tmp.set_XY(X_hf, Y_hfs)
- self.var_str[ny] = np.ones((m_tmp.Y.shape[0],1))
- self.indices_unique = range(0,Y_hfs.shape[0])
- self.Y_mean[ny] = Y_hfs
- self.n_unique_hf = X_hf.shape[0]
- self.stochastic[ny] = False
+ else:
+ # still nonstochastic gp
+ m_tmp.set_XY(X_hf, Y_hfs)
+ self.var_str[ny] = np.ones((m_tmp.Y.shape[0], 1))
+ self.indices_unique = range(Y_hfs.shape[0])
+ self.Y_mean[ny] = Y_hfs
+ self.n_unique_hf = X_hf.shape[0]
+ self.stochastic[ny] = False
else:
(
- X_list_tmp,
- Y_list_tmp,
- ) = emf.convert_lists_to_array.convert_xy_lists_to_arrays(
+ X_list_tmp, # noqa: N806
+ Y_list_tmp, # noqa: N806
+ ) = emf.convert_lists_to_array.convert_xy_lists_to_arrays( # noqa: F821
[X_hf, X_lf], [Y_hfs, Y_lfs]
)
m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
@@ -812,96 +831,116 @@ def set_XY(self, m_tmp, ny, X_hf, Y_hf, X_lf=float("nan"), Y_lf=float("nan"), en
if self.set_normalizer:
if not self.do_mf:
self.normMeans[ny] = m_tmp.normalizer.mean
- self.normVars[ny] = m_tmp.normalizer.std ** 2
+ self.normVars[ny] = m_tmp.normalizer.std**2
else:
self.normMeans[ny] = 0
self.normVars[ny] = 1
-
return m_tmp
- def predictStoVars(self, X_repl,Y_var_repl, X_new, Y_mean,counts):
-
-
+ def predictStoVars(self, X_repl, Y_var_repl, X_new, Y_mean, counts): # noqa: N802, N803, D102
my_x_dim = X_repl.shape[1]
- kernel_var = GPy.kern.Matern52(input_dim=my_x_dim, ARD=True) + GPy.kern.Linear(input_dim=my_x_dim, ARD=True)
+ kernel_var = GPy.kern.Matern52(
+ input_dim=my_x_dim, ARD=True
+ ) + GPy.kern.Linear(input_dim=my_x_dim, ARD=True)
log_vars = np.log(Y_var_repl)
- m_var = GPy.models.GPRegression(X_repl, log_vars, kernel_var, normalizer=True, Y_metadata=None)
+ m_var = GPy.models.GPRegression(
+ X_repl, log_vars, kernel_var, normalizer=True, Y_metadata=None
+ )
for parname in m_var.parameter_names():
- if parname.endswith("lengthscale"):
+ if parname.endswith('lengthscale'):
for nx in range(X_repl.shape[1]):
myrange = np.max(X_repl, axis=0) - np.min(X_repl, axis=0)
# m_mean.Mat52.lengthscale[[nx]].constrain_bounded( myrange[nx]/X.shape[0], float("Inf"))
m_var.sum.Mat52.lengthscale[[nx]] = myrange[nx] * 100
- m_var.sum.Mat52.lengthscale[[nx]].constrain_bounded(myrange[nx] / X_repl.shape[0]*10, myrange[nx] * 100,warning=False)
- # TODO change the kernel
+ m_var.sum.Mat52.lengthscale[[nx]].constrain_bounded(
+ myrange[nx] / X_repl.shape[0] * 10,
+ myrange[nx] * 100,
+ warning=False,
+ )
+ # TODO change the kernel # noqa: TD002, TD004
m_var.optimize(max_f_eval=1000)
- m_var.optimize_restarts(self.nopt, parallel=True, num_processes=self.n_processor,verbose=False)
- print(m_var)
+ m_var.optimize_restarts(
+ self.nopt, parallel=True, num_processes=self.n_processor, verbose=False
+ )
+ print(m_var) # noqa: T201
- log_var_pred, dum = m_var.predict(X_new)
+ log_var_pred, dum = m_var.predict(X_new) # noqa: F841
var_pred = np.exp(log_var_pred)
- #
- #
# norm_var_str = (var_pred.T[0]/counts) / max(var_pred.T[0]/counts)
if self.set_normalizer:
- norm_var_str = (var_pred.T[0]) / np.var(Y_mean) # if normalization was used..
+ norm_var_str = (var_pred.T[0]) / np.var(
+ Y_mean
+ ) # if normalization was used..
else:
- norm_var_str = (var_pred.T[0]) # if normalization was used..
+ norm_var_str = var_pred.T[0] # if normalization was used..
# norm_var_str = (X_new+2)**2/max((X_new+2)**2)
- Y_metadata = {'variance_structure': norm_var_str / counts}
+ Y_metadata = {'variance_structure': norm_var_str / counts} # noqa: N806
- return Y_metadata, m_var,norm_var_str
+ return Y_metadata, m_var, norm_var_str
- def predictStoMeans(self, X, Y):
+ def predictStoMeans(self, X, Y): # noqa: N802, N803, D102
# under homoscedasticity
my_x_dim = X.shape[1]
kernel_mean = GPy.kern.Matern52(input_dim=my_x_dim, ARD=True)
- #kernel_mean = GPy.kern.Matern52(input_dim=my_x_dim, ARD=True) + GPy.kern.Linear(input_dim=my_x_dim, ARD=True)
+ # kernel_mean = GPy.kern.Matern52(input_dim=my_x_dim, ARD=True) + GPy.kern.Linear(input_dim=my_x_dim, ARD=True)
if self.do_linear and not self.isEEUQ:
- kernel_mean = kernel_mean + GPy.kern.Linear(input_dim=my_x_dim, ARD=True)
-
- m_mean = GPy.models.GPRegression(X, Y, kernel_mean, normalizer=True, Y_metadata=None)
+ kernel_mean = kernel_mean + GPy.kern.Linear(input_dim=my_x_dim, ARD=True) # noqa: PLR6104
+ m_mean = GPy.models.GPRegression(
+ X, Y, kernel_mean, normalizer=True, Y_metadata=None
+ )
for parname in m_mean.parameter_names():
- if parname.endswith("lengthscale"):
+ if parname.endswith('lengthscale'):
for nx in range(X.shape[1]):
- myrange = np.max(X,axis=0)-np.min(X,axis=0)
- #m_mean.kern.Mat52.lengthscale[[nx]]= myrange[nx]*100
- #m_mean.kern.Mat52.lengthscale[[nx]].constrain_bounded(myrange[nx]/X.shape[0]*50, myrange[nx]*100)
+ myrange = np.max(X, axis=0) - np.min(X, axis=0)
+ # m_mean.kern.Mat52.lengthscale[[nx]]= myrange[nx]*100
+ # m_mean.kern.Mat52.lengthscale[[nx]].constrain_bounded(myrange[nx]/X.shape[0]*50, myrange[nx]*100)
if self.isEEUQ:
- m_mean.kern.lengthscale[[nx]]= myrange[nx]*100
- m_mean.kern.lengthscale[[nx]].constrain_bounded(myrange[nx]/X.shape[0]*50, myrange[nx]*100,warning=False)
+ m_mean.kern.lengthscale[[nx]] = myrange[nx] * 100
+ m_mean.kern.lengthscale[[nx]].constrain_bounded(
+ myrange[nx] / X.shape[0] * 50,
+ myrange[nx] * 100,
+ warning=False,
+ )
+ elif self.do_linear:
+ m_mean.kern.Mat52.lengthscale[[nx]] = myrange[nx] * 5000
+ m_mean.kern.Mat52.lengthscale[[nx]].constrain_bounded(
+ myrange[nx] / X.shape[0] * 50,
+ myrange[nx] * 10000,
+ warning=False,
+ )
else:
- if self.do_linear:
- m_mean.kern.Mat52.lengthscale[[nx]] = myrange[nx] * 5000
- m_mean.kern.Mat52.lengthscale[[nx]].constrain_bounded(myrange[nx] / X.shape[0] * 50, myrange[nx] * 10000,warning=False)
- else:
- m_mean.kern.lengthscale[[nx]] = myrange[nx] * 5000
- m_mean.kern.lengthscale[[nx]].constrain_bounded(myrange[nx] / X.shape[0] * 50, myrange[nx] * 10000,warning=False)
-
- #m_mean.optimize(messages=True, max_f_eval=1000)
- #m_mean.Gaussian_noise.variance = np.var(Y) # First calibrate parameters
- m_mean.optimize_restarts(self.nopt, parallel=True, num_processes=self.n_processor,
- verbose=True) # First calibrate parameters
-
- # m_mean.optimize(messages=True, max_f_eval=1000)
-
- #if self.do_linear:
-
- #m_mean.Gaussian_noise.variance=m_mean.Mat52.variance+m_mean.Gaussian_noise.variance
- #else:
- #m_mean.Gaussian_noise.variance=m_mean.RBF.variance+m_mean.Gaussian_noise.variance
- #m_mean.optimize_restarts(10,parallel=True)
-
+ m_mean.kern.lengthscale[[nx]] = myrange[nx] * 5000
+ m_mean.kern.lengthscale[[nx]].constrain_bounded(
+ myrange[nx] / X.shape[0] * 50,
+ myrange[nx] * 10000,
+ warning=False,
+ )
+
+ # m_mean.optimize(messages=True, max_f_eval=1000)
+ # m_mean.Gaussian_noise.variance = np.var(Y) # First calibrate parameters
+ m_mean.optimize_restarts(
+ self.nopt, parallel=True, num_processes=self.n_processor, verbose=True
+ ) # First calibrate parameters
+
+ # m_mean.optimize(messages=True, max_f_eval=1000)
+
+ # if self.do_linear:
+
+ # m_mean.Gaussian_noise.variance=m_mean.Mat52.variance+m_mean.Gaussian_noise.variance
+ # else:
+ # m_mean.Gaussian_noise.variance=m_mean.RBF.variance+m_mean.Gaussian_noise.variance
+ # m_mean.optimize_restarts(10,parallel=True)
+
mean_pred, mean_var = m_mean.predict(X)
- '''
+ """
import matplotlib.pyplot as plt
print(m_mean)
#print(m_mean.Mat52.lengthscale)
@@ -909,16 +948,15 @@ def predictStoMeans(self, X, Y):
plt.plot(X[:, 4], mean_pred, 'rx');
plt.errorbar(X[:, 4],mean_pred.T[0],yerr=np.sqrt(mean_var.T)[0],fmt='x');
plt.show()
- '''
+ """
return mean_pred, mean_var
-
- def calibrate(self):
- print("Calibrating in parallel",flush=True)
- warnings.filterwarnings("ignore")
+ def calibrate(self): # noqa: C901, D102
+ print('Calibrating in parallel', flush=True) # noqa: T201
+ warnings.filterwarnings('ignore')
t_opt = time.time()
nugget_opt_tmp = self.nugget_opt
- nopt =self.nopt
+ nopt = self.nopt
parallel_calib = False
# parallel_calib = self.do_parallel
@@ -934,54 +972,67 @@ def calibrate(self):
self.heteroscedastic,
nopt,
ny,
- self.n_processor
+ self.n_processor,
)
for ny in range(self.y_dim)
)
result_objs = list(self.pool.starmap(calibrating, iterables))
for m_tmp, msg, ny in result_objs:
self.m_list[ny] = m_tmp
- if not msg == "":
+ if msg != '': # noqa: PLC1901
self.exit(msg)
- # TODO: terminate it gracefully....
+ # TODO: terminate it gracefully.... # noqa: TD002
# see https://stackoverflow.com/questions/21104997/keyboard-interrupt-with-pythons-multiprocessing
else:
for ny in range(self.y_dim):
- self.m_list[ny], msg, ny = calibrating(copy.deepcopy(self.m_list[ny]), nugget_opt_tmp, self.nuggetVal, self.normVars[ny],
- self.do_mf, self.heteroscedastic, nopt , ny, self.n_processor)
- if not msg == "":
+ self.m_list[ny], msg, ny = calibrating( # noqa: PLW2901
+ copy.deepcopy(self.m_list[ny]),
+ nugget_opt_tmp,
+ self.nuggetVal,
+ self.normVars[ny],
+ self.do_mf,
+ self.heteroscedastic,
+ nopt,
+ ny,
+ self.n_processor,
+ )
+ if msg != '': # noqa: PLC1901
self.exit(msg)
####
-
+
# because EE-UQ results are more likely to have huge nugget.
- #if False:
+ # if False:
if self.isEEUQ:
-
if self.heteroscedastic:
- variance_keyword = "het_Gauss.variance"
+ variance_keyword = 'het_Gauss.variance'
else:
- variance_keyword = "Gaussian_noise.variance"
-
+ variance_keyword = 'Gaussian_noise.variance'
for ny in range(self.y_dim):
for parname in self.m_list[ny].parameter_names():
- if parname.endswith("variance") and (not "Gauss" in parname):
- exec("my_new_var = max(self.m_list[ny]." + variance_keyword + ", 10*self.m_list[ny]." + parname+")")
- exec("self.m_list[ny]." + variance_keyword + "= my_new_var")
+ if parname.endswith('variance') and ('Gauss' not in parname):
+ exec( # noqa: S102
+ 'my_new_var = max(self.m_list[ny].'
+ + variance_keyword
+ + ', 10*self.m_list[ny].'
+ + parname
+ + ')'
+ )
+ exec('self.m_list[ny].' + variance_keyword + '= my_new_var') # noqa: S102
self.m_list[ny].optimize()
self.calib_time = time.time() - t_opt
- print(" Calibration time: {:.2f} s".format(self.calib_time),flush=True)
- Y_preds, Y_pred_vars, Y_pred_vars_w_measures, e2 = self.get_cross_validation_err()
+ print(f' Calibration time: {self.calib_time:.2f} s', flush=True) # noqa: T201
+ Y_preds, Y_pred_vars, Y_pred_vars_w_measures, e2 = ( # noqa: N806
+ self.get_cross_validation_err()
+ )
return Y_preds, Y_pred_vars, Y_pred_vars_w_measures, e2
- def train_surrogate(self, t_init):
-
-
+ def train_surrogate(self, t_init): # noqa: C901, D102, PLR0915
self.nc1 = min(200 * self.x_dim, 2000) # candidate points
self.nq = min(200 * self.x_dim, 2000) # integration points
# FEM index
@@ -989,11 +1040,11 @@ def train_surrogate(self, t_init):
self.id_sim_lf = 0
self.time_hf_tot = 0
self.time_lf_tot = 0
- self.time_hf_avg = float("Inf")
- self.time_lf_avg = float("Inf")
+ self.time_hf_avg = float('Inf')
+ self.time_lf_avg = float('Inf')
self.time_ratio = 1
- x_dim = self.x_dim
+ x_dim = self.x_dim # noqa: F841
y_dim = self.y_dim
#
@@ -1003,31 +1054,35 @@ def train_surrogate(self, t_init):
model_hf = self.modelInfoHF
model_lf = self.modelInfoLF
- self.set_FEM(self.rv_name, self.do_parallel, self.y_dim, t_init, model_hf.thr_t)
-
- def FEM_batch_hf(X, id_sim):
+ self.set_FEM(
+ self.rv_name, self.do_parallel, self.y_dim, t_init, model_hf.thr_t
+ )
- Xstr = X.astype(str) # DiscStr: Xstr will be replaced with the string
+ def FEM_batch_hf(X, id_sim): # noqa: N802, N803
+ # DiscStr: Xstr will be replaced with the string
+ Xstr = X.astype(str) # noqa: N806
for nx in self.rvDiscIdx:
for ns in range(X.shape[0]):
- Xstr[ns][nx] = "\""+self.rvDiscStr[nx][int(X[ns][nx]-1)]+"\""
+ Xstr[ns][nx] = '"' + self.rvDiscStr[nx][int(X[ns][nx] - 1)] + '"'
tmp = time.time()
if model_hf.is_model or model_hf.model_without_sampling:
- res = self.run_FEM_batch(Xstr, id_sim, runIdx=model_hf.runIdx, alterInput=self.rvDiscIdx)
+ res = self.run_FEM_batch(
+ Xstr, id_sim, runIdx=model_hf.runIdx, alterInput=self.rvDiscIdx
+ )
else:
res = np.zeros((0, self.x_dim)), np.zeros((0, self.y_dim)), id_sim
self.time_hf_tot += time.time() - tmp
self.time_hf_avg = (
- np.float64(self.time_hf_tot) / res[2]
+ np.float64(self.time_hf_tot) / res[2]
) # so that it gives inf when divided by zero
self.time_ratio = self.time_hf_avg / self.time_lf_avg
return res
- def FEM_batch_lf(X, id_sim):
-
- Xstr = X.astype(str) # DiscStr: Xstr will be replaced with the string
+ def FEM_batch_lf(X, id_sim): # noqa: N802, N803
+ # DiscStr: Xstr will be replaced with the string
+ Xstr = X.astype(str) # noqa: N806
for nx in self.rvDiscIdx:
for ns in range(X.shape[0]):
@@ -1035,60 +1090,62 @@ def FEM_batch_lf(X, id_sim):
tmp = time.time()
if model_lf.is_model:
- res = self.run_FEM_batch(Xstr, id_sim, runIdx=model_lf.runIdx, alterInput=self.rvDiscIdx)
+ res = self.run_FEM_batch(
+ Xstr, id_sim, runIdx=model_lf.runIdx, alterInput=self.rvDiscIdx
+ )
else:
res = np.zeros((0, self.x_dim)), np.zeros((0, self.y_dim)), id_sim
self.time_lf_tot += time.time() - tmp
- if res[2]>0:
+ if res[2] > 0:
self.time_lf_avg = (
- float(self.time_lf_tot) / res[2]
+ float(self.time_lf_tot) / res[2]
) # so that it gives inf when divided by zero
else:
- self.time_lf_avg = (float('Inf'))
+ self.time_lf_avg = float('Inf')
self.time_ratio = self.time_lf_avg / self.time_lf_avg
return res
- tmp = time.time()
+ tmp = time.time() # noqa: F841
#
# get initial samples for high fidelity modeling
#
- X_hf_tmp = model_hf.sampling(max([model_hf.n_init - model_hf.n_existing, 0]))
+ X_hf_tmp = model_hf.sampling(max([model_hf.n_init - model_hf.n_existing, 0])) # noqa: N806
#
# if X is from a data file & Y is from simulation
#
if model_hf.model_without_sampling:
- X_hf_tmp, model_hf.X_existing = model_hf.X_existing, X_hf_tmp
- X_hf_tmp, Y_hf_tmp, self.id_sim_hf = FEM_batch_hf(X_hf_tmp, self.id_sim_hf)
-
+ X_hf_tmp, model_hf.X_existing = model_hf.X_existing, X_hf_tmp # noqa: N806
+ X_hf_tmp, Y_hf_tmp, self.id_sim_hf = FEM_batch_hf(X_hf_tmp, self.id_sim_hf) # noqa: N806
- if model_hf.X_existing.shape[0]==0:
+ if model_hf.X_existing.shape[0] == 0:
self.X_hf, self.Y_hf = X_hf_tmp, Y_hf_tmp
else:
- if model_hf.X_existing.shape[1]!=X_hf_tmp.shape[1]:
- msg = "Error importing input dimension specified {} is different from the written {}.".format(model_hf.X_existing.shape[1],X_hf_tmp.shape[1])
+ if model_hf.X_existing.shape[1] != X_hf_tmp.shape[1]:
+ msg = f'Error importing input dimension specified {model_hf.X_existing.shape[1]} is different from the written {X_hf_tmp.shape[1]}.'
self.exit(msg)
- self.X_hf, self.Y_hf = np.vstack([model_hf.X_existing, X_hf_tmp]), np.vstack(
- [model_hf.Y_existing, Y_hf_tmp]
+ self.X_hf, self.Y_hf = (
+ np.vstack([model_hf.X_existing, X_hf_tmp]),
+ np.vstack([model_hf.Y_existing, Y_hf_tmp]),
)
- X_lf_tmp = model_lf.sampling(max([model_lf.n_init - model_lf.n_existing, 0]))
+ X_lf_tmp = model_lf.sampling(max([model_lf.n_init - model_lf.n_existing, 0])) # noqa: N806
# Design of experiments - Nearest neighbor sampling
# Giselle Fernández-Godino, M., Park, C., Kim, N. H., & Haftka, R. T. (2019). Issues in deciding whether to use multifidelity surrogates. AIAA Journal, 57(5), 2039-2054.
self.n_LFHFoverlap = 0
new_x_lf_tmp = np.zeros((0, self.x_dim))
- X_tmp = X_lf_tmp
+ X_tmp = X_lf_tmp # noqa: N806
for x_hf in self.X_hf:
if X_tmp.shape[0] > 0:
- id = closest_node(x_hf, X_tmp, self.ll)
+ id = closest_node(x_hf, X_tmp, self.ll) # noqa: A001
new_x_lf_tmp = np.vstack([new_x_lf_tmp, x_hf])
- X_tmp = np.delete(X_tmp, id, axis=0)
+ X_tmp = np.delete(X_tmp, id, axis=0) # noqa: N806
self.n_LFHFoverlap += 1
new_x_lf_tmp = np.vstack([new_x_lf_tmp, X_tmp])
@@ -1096,21 +1153,18 @@ def FEM_batch_lf(X, id_sim):
new_x_lf_tmp, self.id_sim_lf
)
- self.X_lf, self.Y_lf = np.vstack(
- [model_lf.X_existing, new_x_lf_tmp]
- ), np.vstack([model_lf.Y_existing, new_y_lf_tmp])
+ self.X_lf, self.Y_lf = (
+ np.vstack([model_lf.X_existing, new_x_lf_tmp]),
+ np.vstack([model_lf.Y_existing, new_y_lf_tmp]),
+ )
- if self.X_lf.shape[0]!=0:
+ if self.X_lf.shape[0] != 0:
if self.X_hf.shape[1] != self.X_lf.shape[1]:
- msg = "Error importing input data: dimension inconsistent: high fidelity model have {} RV(s) but low fidelity model have {}.".format(
- self.X_hf.shape[1], self.X_lf.shape[1]
- )
+ msg = f'Error importing input data: dimension inconsistent: high fidelity model have {self.X_hf.shape[1]} RV(s) but low fidelity model have {self.X_lf.shape[1]}.'
self.exit(msg)
if self.Y_hf.shape[1] != self.Y_lf.shape[1]:
- msg = "Error importing input data: dimension inconsistent: high fidelity model have {} QoI(s) but low fidelity model have {}.".format(
- self.Y_hf.shape[1], self.Y_lf.shape[1]
- )
+ msg = f'Error importing input data: dimension inconsistent: high fidelity model have {self.Y_hf.shape[1]} QoI(s) but low fidelity model have {self.Y_lf.shape[1]}.'
self.exit(msg)
for i in range(y_dim):
@@ -1130,8 +1184,7 @@ def FEM_batch_lf(X, id_sim):
self.NRMSE_hist = np.zeros((1, y_dim), float)
self.NRMSE_idx = np.zeros((1, 1), int)
- print("======== RUNNING GP DoE ===========",flush=True)
-
+ print('======== RUNNING GP DoE ===========', flush=True) # noqa: T201
#
# Run Design of experiments
@@ -1141,110 +1194,115 @@ def FEM_batch_lf(X, id_sim):
nc1 = self.nc1
nq = self.nq
n_new = 0
- while exit_flag == False:
+ while exit_flag == False: # noqa: E712
# Initial calibration
# Calibrate self.m_list
- self.Y_cvs, self.Y_cv_vars, self.Y_cv_var_w_measures, e2 = self.calibrate()
+ self.Y_cvs, self.Y_cv_vars, self.Y_cv_var_w_measures, e2 = (
+ self.calibrate()
+ )
if self.do_logtransform:
# self.Y_cv = np.exp(2*self.Y_cvs+self.Y_cv_vars)*(np.exp(self.Y_cv_vars)-1) # in linear space
- # TODO: Let us use median instead of mean?
+ # TODO: Let us use median instead of mean? # noqa: TD002
self.Y_cv = np.exp(self.Y_cvs)
self.Y_cv_var = np.exp(2 * self.Y_cvs + self.Y_cv_vars) * (
- np.exp(self.Y_cv_vars) - 1
+ np.exp(self.Y_cv_vars) - 1
) # in linear space
- self.Y_cv_var_w_measure = np.exp(2 * self.Y_cvs + self.Y_cv_var_w_measures) * (
- np.exp(self.Y_cv_var_w_measures) - 1
- ) # in linear space
+ self.Y_cv_var_w_measure = np.exp(
+ 2 * self.Y_cvs + self.Y_cv_var_w_measures
+ ) * (np.exp(self.Y_cv_var_w_measures) - 1) # in linear space
else:
self.Y_cv = self.Y_cvs
self.Y_cv_var = self.Y_cv_vars
self.Y_cv_var_w_measure = self.Y_cv_var_w_measures
if self.n_unique_hf < model_hf.thr_count:
- if self.doeIdx == "HF":
- tmp_doeIdx = self.doeIdx # single fideility
+ if self.doeIdx == 'HF':
+ tmp_doeIdx = self.doeIdx # single fideility # noqa: N806
else:
- tmp_doeIdx = "HFHF" # HF in multifideility
+ tmp_doeIdx = 'HFHF' # HF in multifideility # noqa: N806
- [x_new_hf, y_idx_hf, score_hf] = self.run_design_of_experiments(
+ [x_new_hf, y_idx_hf, score_hf] = self.run_design_of_experiments( # noqa: F841
nc1, nq, e2, tmp_doeIdx
)
else:
score_hf = 0
if self.id_sim_lf < model_lf.thr_count:
- [x_new_lf, y_idx_lf, score_lf] = self.run_design_of_experiments(
- nc1, nq, e2, "LF"
+ [x_new_lf, y_idx_lf, score_lf] = self.run_design_of_experiments( # noqa: F841
+ nc1, nq, e2, 'LF'
)
else:
score_lf = 0 # score : reduced amount of variance
- if self.doeIdx == "HFLF":
- fideilityIdx = np.argmax(
+ if self.doeIdx == 'HFLF':
+ fideilityIdx = np.argmax( # noqa: N806
[score_hf / self.time_hf_avg, score_lf / self.time_lf_avg]
)
if fideilityIdx == 0:
- tmp_doeIdx = "HF"
+ tmp_doeIdx = 'HF' # noqa: N806
else:
- tmp_doeIdx = "LF"
+ tmp_doeIdx = 'LF' # noqa: N806
else:
- tmp_doeIdx = self.doeIdx
+ tmp_doeIdx = self.doeIdx # noqa: N806
if self.do_logtransform:
- Y_hfs = np.log(self.Y_hf)
+ Y_hfs = np.log(self.Y_hf) # noqa: N806
else:
- Y_hfs = self.Y_hf
+ Y_hfs = self.Y_hf # noqa: N806
- NRMSE_val = self.normalized_mean_sq_error(self.Y_cvs, Y_hfs)
+ NRMSE_val = self.normalized_mean_sq_error(self.Y_cvs, Y_hfs) # noqa: N806
self.NRMSE_hist = np.vstack((self.NRMSE_hist, np.array(NRMSE_val)))
self.NRMSE_idx = np.vstack((self.NRMSE_idx, i))
if (
- self.n_unique_hf >= model_hf.thr_count # self.id_sim_hf >= model_hf.thr_count
- and self.id_sim_lf >= model_lf.thr_count
+ self.n_unique_hf
+ >= model_hf.thr_count # self.id_sim_hf >= model_hf.thr_count
+ and self.id_sim_lf >= model_lf.thr_count
):
-
n_iter = i
- self.exit_code = "count"
+ self.exit_code = 'count'
if self.id_sim_hf == 0 and self.id_sim_lf == 0:
- self.exit_code = "data"
+ self.exit_code = 'data'
exit_flag = True
break
-
- if self.X_hf.shape[0] == model_hf.thr_count and np.sum(self.stochastic)==0:
+
+ if (
+ self.X_hf.shape[0] == model_hf.thr_count
+ and np.sum(self.stochastic) == 0
+ ):
# This is when replicated unwantedly
n_iter = i
- self.exit_code = "count"
+ self.exit_code = 'count'
exit_flag = True
break
-
+
if np.max(NRMSE_val) < model_hf.thr_NRMSE:
n_iter = i
- self.exit_code = "accuracy"
+ self.exit_code = 'accuracy'
exit_flag = True
break
if time.time() - t_init > model_hf.thr_t - self.calib_time:
- n_iter = i
- self.exit_code = "time"
- doe_off = True
+ n_iter = i # noqa: F841
+ self.exit_code = 'time'
+ doe_off = True # noqa: F841
break
- if tmp_doeIdx.startswith("HF"):
+ if tmp_doeIdx.startswith('HF'):
n_new = x_new_hf.shape[0]
- if n_new +self.n_unique_hf > model_hf.thr_count:
- n_new = model_hf.thr_count -self.n_unique_hf
+ if n_new + self.n_unique_hf > model_hf.thr_count:
+ n_new = model_hf.thr_count - self.n_unique_hf
x_new_hf = x_new_hf[0:n_new, :]
x_hf_new, y_hf_new, self.id_sim_hf = FEM_batch_hf(
x_new_hf, self.id_sim_hf
)
self.X_hf = np.vstack([self.X_hf, x_hf_new])
self.Y_hf = np.vstack([self.Y_hf, y_hf_new])
- i =self.n_unique_hf + n_new
+ i = self.n_unique_hf + n_new
- if tmp_doeIdx.startswith("LF"):
+ if tmp_doeIdx.startswith('LF'):
n_new = x_new_lf.shape[0]
if n_new + self.id_sim_lf > model_lf.thr_count:
n_new = model_lf.thr_count - self.id_sim_lf
@@ -1255,7 +1313,7 @@ def FEM_batch_lf(X, id_sim):
self.X_lf = np.vstack([self.X_lf, x_lf_new])
self.Y_lf = np.vstack([self.Y_lf, y_lf_new])
i = self.id_sim_lf + n_new
- # TODO
+ # TODO # noqa: TD002, TD004
# print(">> {:.2f} s".format(time.time() - t_init))
@@ -1274,13 +1332,13 @@ def FEM_batch_lf(X, id_sim):
self.verify()
self.verify_nugget()
- print("my exit code = {}".format(self.exit_code),flush=True)
- print("1. count = {}".format(self.id_sim_hf),flush=True)
- print("1. count_unique = {}".format(self.n_unique_hf),flush=True)
- print("2. max(NRMSE) = {}".format(np.max(self.NRMSE_val)),flush=True)
- print("3. time = {:.2f} s".format(self.sim_time),flush=True)
+ print(f'my exit code = {self.exit_code}', flush=True) # noqa: T201
+ print(f'1. count = {self.id_sim_hf}', flush=True) # noqa: T201
+ print(f'1. count_unique = {self.n_unique_hf}', flush=True) # noqa: T201
+ print(f'2. max(NRMSE) = {np.max(self.NRMSE_val)}', flush=True) # noqa: T201
+ print(f'3. time = {self.sim_time:.2f} s', flush=True) # noqa: T201
- """
+ r"""
self.inbound50
self.Gausspvalue
@@ -1457,7 +1515,7 @@ def FEM_batch_lf(X, id_sim):
plt.xlabel("CV")
plt.ylabel("Exact")
plt.show()
- """
+ """ # noqa: W291, W293
# plt.show()
# plt.plot(self.Y_cv[:, 1],Y_exact[:,1],'x')
# plt.plot(Y_exact[:, 1],Y_exact[:, 1],'x')
@@ -1472,14 +1530,14 @@ def FEM_batch_lf(X, id_sim):
#
# self.m_list[i].predict()
- def verify(self):
- Y_cv = self.Y_cv
- Y = self.Y_hf
+ def verify(self): # noqa: D102
+ Y_cv = self.Y_cv # noqa: N806
+ Y = self.Y_hf # noqa: N806
model_hf = self.modelInfoHF
if model_hf.is_model:
n_err = 1000
- Xerr = model_hf.resampling(self.m_list[0].X,n_err)
+ Xerr = model_hf.resampling(self.m_list[0].X, n_err) # noqa: N806
y_pred_var = np.zeros((n_err, self.y_dim))
y_data_var = np.zeros((n_err, self.y_dim))
@@ -1487,7 +1545,7 @@ def verify(self):
y_base_var = np.zeros((self.y_dim,))
for ny in range(self.y_dim):
m_tmp = self.m_list[ny]
- #y_data_var[:, ny] = np.var(Y[:, ny])
+ # y_data_var[:, ny] = np.var(Y[:, ny])
y_data_var[:, ny] = np.var(self.m_list[ny].Y)
# if self.do_logtransform:
# log_mean = np.mean(np.log(Y[:, ny]))
@@ -1497,22 +1555,26 @@ def verify(self):
# y_var_vals = np.var(Y[:, ny])
for ns in range(n_err):
- y_preds, y_pred_vars = self.predict(m_tmp, Xerr[ns, :][np.newaxis])
+ y_preds, y_pred_vars = self.predict(
+ m_tmp, Xerr[ns, :][np.newaxis]
+ )
y_pred_var[ns, ny] = y_pred_vars
y_pred_mean[ns, ny] = y_preds
-
- #dummy, y_base_var[ny] = self.predict(m_tmp, Xerr[ns, :][np.newaxis]*10000)
- dummy, y_base_var[ny] = self.predict(m_tmp, Xerr[ns, :][np.newaxis] * 10000)
- #if self.do_logtransform:
- # y_pred_var[ns, ny] = np.exp(2 * y_preds + y_pred_vars) * (
- # np.exp(y_pred_vars) - 1
- # )
- #else:
- # y_pred_var[ns, ny] = y_pred_vars
+ # dummy, y_base_var[ny] = self.predict(m_tmp, Xerr[ns, :][np.newaxis]*10000)
+ dummy, y_base_var[ny] = self.predict( # noqa: F841
+ m_tmp, Xerr[ns, :][np.newaxis] * 10000
+ )
+
+ # if self.do_logtransform:
+ # y_pred_var[ns, ny] = np.exp(2 * y_preds + y_pred_vars) * (
+ # np.exp(y_pred_vars) - 1
+ # )
+ # else:
+ # y_pred_var[ns, ny] = y_pred_vars
- error_ratio2_Pr = y_pred_var / y_data_var
- print(np.max(error_ratio2_Pr, axis=0),flush=True)
+ error_ratio2_Pr = y_pred_var / y_data_var # noqa: N806
+ print(np.max(error_ratio2_Pr, axis=0), flush=True) # noqa: T201
perc_thr_tmp = np.hstack(
[np.array([1]), np.arange(10, 1000, 50), np.array([999])]
@@ -1529,7 +1591,7 @@ def verify(self):
self.perc_val = 0
corr_val = np.zeros((self.y_dim,))
- R2_val = np.zeros((self.y_dim,))
+ R2_val = np.zeros((self.y_dim,)) # noqa: N806
for ny in range(self.y_dim):
corr_val[ny] = np.corrcoef(Y[:, ny], Y_cv[:, ny])[0, 1]
R2_val[ny] = 1 - np.sum(pow(Y_cv[:, ny] - Y[:, ny], 2)) / np.sum(
@@ -1542,27 +1604,33 @@ def verify(self):
self.corr_val = corr_val
self.R2_val = R2_val
-
-
- def verify_nugget(self):
- Y_cv = self.Y_cv
- Y_cv_var_w_measure = self.Y_cv_var_w_measure
- Y = self.Y_hf
- model_hf = self.modelInfoHF
+ def verify_nugget(self): # noqa: D102
+ Y_cv = self.Y_cv # noqa: N806
+ Y_cv_var_w_measure = self.Y_cv_var_w_measure # noqa: N806
+ Y = self.Y_hf # noqa: N806
+ model_hf = self.modelInfoHF # noqa: F841
self.inbound50 = np.zeros((self.y_dim,))
self.Gausspvalue = np.zeros((self.y_dim,))
if not self.do_mf:
-
for ny in range(self.y_dim):
if not self.do_logtransform:
+ PI_lb = norm.ppf( # noqa: N806
+ 0.25,
+ loc=Y_cv[:, ny],
+ scale=np.sqrt(Y_cv_var_w_measure[:, ny]),
+ )
+ PI_ub = norm.ppf( # noqa: N806
+ 0.75,
+ loc=Y_cv[:, ny],
+ scale=np.sqrt(Y_cv_var_w_measure[:, ny]),
+ )
+ num_in_bound = np.sum((Y[:, ny] > PI_lb) * (Y[:, ny] < PI_ub))
- PI_lb = norm.ppf(0.25, loc=Y_cv[:, ny], scale=np.sqrt(Y_cv_var_w_measure[:, ny]))
- PI_ub = norm.ppf(0.75, loc=Y_cv[:, ny], scale=np.sqrt(Y_cv_var_w_measure[:, ny]))
- num_in_bound = np.sum((Y[:, ny]>PI_lb)*(Y[:, ny]PI_lb)*(np.log(Y[:, ny]) PI_lb) * (np.log(Y[:, ny]) < PI_ub)
+ )
- norm_residual = (np.log(Y[:, ny]) - log_Y_cv) / np.sqrt(log_Y_cv_var_w_measure)
+ norm_residual = (np.log(Y[:, ny]) - log_Y_cv) / np.sqrt(
+ log_Y_cv_var_w_measure
+ )
stats = cramervonmises(norm_residual, 'norm')
- self.inbound50[ny] = num_in_bound/Y.shape[0]
+ self.inbound50[ny] = num_in_bound / Y.shape[0]
self.Gausspvalue[ny] = stats.pvalue
else:
pass
-
-
- def save_model(self, filename):
-
+ def save_model(self, filename): # noqa: C901, D102, PLR0915
if self.isEEUQ:
- self.rv_name_new=[]
+ self.rv_name_new = []
for nx in range(self.x_dim):
- if (self.modelInfoHF.xDistTypeArr[nx] == "U"):
+ if self.modelInfoHF.xDistTypeArr[nx] == 'U':
self.rv_name_new += [self.rv_name[nx]]
-
- if len(self.IM_names)>0:
- self.rv_name_new += self.IM_names
- self.rv_name=self.rv_name_new
- self.x_dim=len(self.rv_name_new)
+ if len(self.IM_names) > 0:
+ self.rv_name_new += self.IM_names
+
+ self.rv_name = self.rv_name_new
+ self.x_dim = len(self.rv_name_new)
if self.do_mf:
- with open(self.work_dir + "/" + filename + ".pkl", "wb") as file:
+ with open(self.work_dir + '/' + filename + '.pkl', 'wb') as file: # noqa: PTH123
pickle.dump(self.m_list, file)
- header_string_x = " " + " ".join([str(elem) for elem in self.rv_name]) + " "
- header_string_y = " " + " ".join([str(elem) for elem in self.g_name])
+ header_string_x = ' ' + ' '.join([str(elem) for elem in self.rv_name]) + ' '
+ header_string_y = ' ' + ' '.join([str(elem) for elem in self.g_name])
header_string = header_string_x + header_string_y
xy_data = np.concatenate(
- (np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T, self.X_hf, self.Y_hf),
+ (
+ np.asmatrix(np.arange(1, self.X_hf.shape[0] + 1)).T,
+ self.X_hf,
+ self.Y_hf,
+ ),
axis=1,
)
xy_data = xy_data.astype(float)
- self.X_hf = self.X_hf.astype(float)
- self.Y_hf = self.Y_hf.astype(float)
+ self.X_hf = self.X_hf.astype(float)
+ self.Y_hf = self.Y_hf.astype(float)
np.savetxt(
- self.work_dir + "/dakotaTab.out",
+ self.work_dir + '/dakotaTab.out',
xy_data,
header=header_string,
- fmt="%1.4e",
- comments="%",
+ fmt='%1.4e',
+ comments='%',
)
np.savetxt(
- self.work_dir + "/inputTab.out",
+ self.work_dir + '/inputTab.out',
self.X_hf,
header=header_string_x,
- fmt="%1.4e",
- comments="%",
+ fmt='%1.4e',
+ comments='%',
)
np.savetxt(
- self.work_dir + "/outputTab.out",
+ self.work_dir + '/outputTab.out',
self.Y_hf,
header=header_string_y,
- fmt="%1.4e",
- comments="%",
+ fmt='%1.4e',
+ comments='%',
)
y_ub = np.zeros(self.Y_cv.shape)
y_lb = np.zeros(self.Y_cv.shape)
- y_ubm = np.zeros(self.Y_cv.shape) # with measruement
+ y_ubm = np.zeros(self.Y_cv.shape) # with measruement
y_lbm = np.zeros(self.Y_cv.shape)
if not self.do_logtransform:
@@ -1656,10 +1733,14 @@ def save_model(self, filename):
0.95, loc=self.Y_cv[:, ny], scale=np.sqrt(self.Y_cv_var[:, ny])
).tolist()
y_lbm[:, ny] = norm.ppf(
- 0.05, loc=self.Y_cv[:, ny], scale=np.sqrt(self.Y_cv_var_w_measure[:, ny])
+ 0.05,
+ loc=self.Y_cv[:, ny],
+ scale=np.sqrt(self.Y_cv_var_w_measure[:, ny]),
).tolist()
y_ubm[:, ny] = norm.ppf(
- 0.95, loc=self.Y_cv[:, ny], scale=np.sqrt(self.Y_cv_var_w_measure[:, ny])
+ 0.95,
+ loc=self.Y_cv[:, ny],
+ scale=np.sqrt(self.Y_cv_var_w_measure[:, ny]),
).tolist()
else:
for ny in range(self.y_dim):
@@ -1671,143 +1752,162 @@ def save_model(self, filename):
y_ub[:, ny] = lognorm.ppf(0.95, s=sig, scale=np.exp(mu)).tolist()
sig_m = np.sqrt(
- np.log(self.Y_cv_var_w_measure[:, ny] / pow(self.Y_cv[:, ny], 2) + 1)
+ np.log(
+ self.Y_cv_var_w_measure[:, ny] / pow(self.Y_cv[:, ny], 2) + 1
+ )
)
y_lbm[:, ny] = lognorm.ppf(0.05, s=sig_m, scale=np.exp(mu)).tolist()
y_ubm[:, ny] = lognorm.ppf(0.95, s=sig_m, scale=np.exp(mu)).tolist()
- xy_sur_data = np.hstack((xy_data, self.Y_cv, y_lb, y_ub, self.Y_cv_var, y_lbm, y_ubm, self.Y_cv_var_w_measure))
+ xy_sur_data = np.hstack(
+ (
+ xy_data,
+ self.Y_cv,
+ y_lb,
+ y_ub,
+ self.Y_cv_var,
+ y_lbm,
+ y_ubm,
+ self.Y_cv_var_w_measure,
+ )
+ )
g_name_sur = self.g_name
header_string_sur = (
- header_string
- + " "
- + ".median ".join(g_name_sur)
- + ".median "
- + ".q5 ".join(g_name_sur)
- + ".q5 "
- + ".q95 ".join(g_name_sur)
- + ".q95 "
- + ".var ".join(g_name_sur)
- + ".var "
- + ".q5_w_mnoise ".join(g_name_sur)
- + ".q5_w_mnoise "
- + ".q95_w_mnoise ".join(g_name_sur)
- + ".q95_w_mnoise "
- + ".var_w_mnoise ".join(g_name_sur)
- + ".var_w_mnoise "
+ header_string
+ + ' '
+ + '.median '.join(g_name_sur)
+ + '.median '
+ + '.q5 '.join(g_name_sur)
+ + '.q5 '
+ + '.q95 '.join(g_name_sur)
+ + '.q95 '
+ + '.var '.join(g_name_sur)
+ + '.var '
+ + '.q5_w_mnoise '.join(g_name_sur)
+ + '.q5_w_mnoise '
+ + '.q95_w_mnoise '.join(g_name_sur)
+ + '.q95_w_mnoise '
+ + '.var_w_mnoise '.join(g_name_sur)
+ + '.var_w_mnoise '
)
np.savetxt(
- self.work_dir + "/surrogateTab.out",
+ self.work_dir + '/surrogateTab.out',
xy_sur_data,
header=header_string_sur,
- fmt="%1.4e",
- comments="%",
+ fmt='%1.4e',
+ comments='%',
)
results = {}
- hfJson = {}
- hfJson["doSampling"] = self.modelInfoHF.is_model
- hfJson["doSimulation"] = self.modelInfoHF.is_model
- hfJson["DoEmethod"] = self.modelInfoHF.doe_method
- hfJson["thrNRMSE"] = self.modelInfoHF.thr_NRMSE
- hfJson["valSamp"] = self.modelInfoHF.n_existing + self.id_sim_hf
- hfJson["valSampUnique"] = self.n_unique_hf
- hfJson["valSim"] = self.id_sim_hf
-
- constIdx = []
- constVal = []
+ hfJson = {} # noqa: N806
+ hfJson['doSampling'] = self.modelInfoHF.is_model
+ hfJson['doSimulation'] = self.modelInfoHF.is_model
+ hfJson['DoEmethod'] = self.modelInfoHF.doe_method
+ hfJson['thrNRMSE'] = self.modelInfoHF.thr_NRMSE
+ hfJson['valSamp'] = self.modelInfoHF.n_existing + self.id_sim_hf
+ hfJson['valSampUnique'] = self.n_unique_hf
+ hfJson['valSim'] = self.id_sim_hf
+
+ constIdx = [] # noqa: N806
+ constVal = [] # noqa: N806
for ny in range(self.y_dim):
if np.var(self.Y_hf[:, ny]) == 0:
- constIdx += [ny]
- constVal += [np.mean((self.Y_hf[:, ny]))]
+ constIdx += [ny] # noqa: N806
+ constVal += [np.mean(self.Y_hf[:, ny])] # noqa: N806
- hfJson["constIdx"] = constIdx
- hfJson["constVal"] = constVal
+ hfJson['constIdx'] = constIdx
+ hfJson['constVal'] = constVal
- results["inpData"] = self.modelInfoHF.inpData
- results["outData"] = self.modelInfoHF.outData
- results["valSamp"] = self.X_hf.shape[0]
- results["doStochastic"] = self.stochastic
- results["doNormalization"] = self.set_normalizer
- results["isEEUQ"] = self.isEEUQ
+ results['inpData'] = self.modelInfoHF.inpData
+ results['outData'] = self.modelInfoHF.outData
+ results['valSamp'] = self.X_hf.shape[0]
+ results['doStochastic'] = self.stochastic
+ results['doNormalization'] = self.set_normalizer
+ results['isEEUQ'] = self.isEEUQ
if self.isEEUQ:
- if len(self.IM_names)>0:
- IM_sub_Json = {}
- IM_sub_Json["IntensityMeasure"] = self.intensityMeasure
- IM_sub_Json["GeneralInformation"] = {"units":self.unitInfo}
- IM_sub_Json["Events"] = {}
+ if len(self.IM_names) > 0:
+ IM_sub_Json = {} # noqa: N806
+ IM_sub_Json['IntensityMeasure'] = self.intensityMeasure
+ IM_sub_Json['GeneralInformation'] = {'units': self.unitInfo}
+ IM_sub_Json['Events'] = {}
- results["intensityMeasureInfo"] = IM_sub_Json
+ results['intensityMeasureInfo'] = IM_sub_Json
- results["highFidelityInfo"] = hfJson
+ results['highFidelityInfo'] = hfJson
- lfJson = {}
+ lfJson = {} # noqa: N806
if self.do_mf:
- lfJson["doSampling"] = self.modelInfoLF.is_data
- lfJson["doSimulation"] = self.modelInfoLF.is_model
- lfJson["DoEmethod"] = self.modelInfoLF.doe_method
- lfJson["thrNRMSE"] = self.modelInfoLF.thr_NRMSE
- lfJson["valSamp"] = self.modelInfoLF.n_existing + self.id_sim_lf
- lfJson["valSim"] = self.id_sim_lf
- results["inpData"] = self.modelInfoLF.inpData
- results["outData"] = self.modelInfoLF.outData
- results["valSamp"] = self.X_lf.shape[0]
-
- results["lowFidelityInfo"] = lfJson
+ lfJson['doSampling'] = self.modelInfoLF.is_data
+ lfJson['doSimulation'] = self.modelInfoLF.is_model
+ lfJson['DoEmethod'] = self.modelInfoLF.doe_method
+ lfJson['thrNRMSE'] = self.modelInfoLF.thr_NRMSE
+ lfJson['valSamp'] = self.modelInfoLF.n_existing + self.id_sim_lf
+ lfJson['valSim'] = self.id_sim_lf
+ results['inpData'] = self.modelInfoLF.inpData
+ results['outData'] = self.modelInfoLF.outData
+ results['valSamp'] = self.X_lf.shape[0]
+
+ results['lowFidelityInfo'] = lfJson
else:
- results["lowFidelityInfo"] = "None"
-
- results["doLogtransform"] = self.do_logtransform
- results["doLinear"] = self.do_linear
- results["doMultiFidelity"] = self.do_mf
- results["kernName"] = self.kernel
- results["terminationCode"] = self.exit_code
- results["valTime"] = self.sim_time
- results["xdim"] = self.x_dim
- results["ydim"] = self.y_dim
- results["xlabels"] = self.rv_name
- results["ylabels"] = self.g_name
- results["yExact"] = {}
- results["yPredict"] = {}
- results["valNRMSE"] = {}
- results["valR2"] = {}
- results["valCorrCoeff"] = {}
- results["valIQratio"] = {}
- results["valPval"] = {}
- results["yPredict_PI_lb"] = {}
- results["yPredict_PI_ub"] = {}
- results["xExact"] = {}
- results["valNugget"] = {}
- results["valNugget1"] = {}
- results["valNugget2"] = {}
+ results['lowFidelityInfo'] = 'None'
+
+ results['doLogtransform'] = self.do_logtransform
+ results['doLinear'] = self.do_linear
+ results['doMultiFidelity'] = self.do_mf
+ results['kernName'] = self.kernel
+ results['terminationCode'] = self.exit_code
+ results['valTime'] = self.sim_time
+ results['xdim'] = self.x_dim
+ results['ydim'] = self.y_dim
+ results['xlabels'] = self.rv_name
+ results['ylabels'] = self.g_name
+ results['yExact'] = {}
+ results['yPredict'] = {}
+ results['valNRMSE'] = {}
+ results['valR2'] = {}
+ results['valCorrCoeff'] = {}
+ results['valIQratio'] = {}
+ results['valPval'] = {}
+ results['yPredict_PI_lb'] = {}
+ results['yPredict_PI_ub'] = {}
+ results['xExact'] = {}
+ results['valNugget'] = {}
+ results['valNugget1'] = {}
+ results['valNugget2'] = {}
for nx in range(self.x_dim):
- results["xExact"][self.rv_name[nx]] = self.X_hf[:, nx].tolist()
+ results['xExact'][self.rv_name[nx]] = self.X_hf[:, nx].tolist()
for ny in range(self.y_dim):
- results["yExact"][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
- results["yPredict"][self.g_name[ny]] = self.Y_cv[:, ny].tolist()
+ results['yExact'][self.g_name[ny]] = self.Y_hf[:, ny].tolist()
+ results['yPredict'][self.g_name[ny]] = self.Y_cv[:, ny].tolist()
if not self.do_logtransform:
- results["yPredict_PI_lb"][self.g_name[ny]] = norm.ppf(
- 0.25, loc=self.Y_cv[:, ny], scale=np.sqrt(self.Y_cv_var_w_measure[:, ny])
+ results['yPredict_PI_lb'][self.g_name[ny]] = norm.ppf(
+ 0.25,
+ loc=self.Y_cv[:, ny],
+ scale=np.sqrt(self.Y_cv_var_w_measure[:, ny]),
).tolist()
- results["yPredict_PI_ub"][self.g_name[ny]] = norm.ppf(
- 0.75, loc=self.Y_cv[:, ny], scale=np.sqrt(self.Y_cv_var_w_measure[:, ny])
+ results['yPredict_PI_ub'][self.g_name[ny]] = norm.ppf(
+ 0.75,
+ loc=self.Y_cv[:, ny],
+ scale=np.sqrt(self.Y_cv_var_w_measure[:, ny]),
).tolist()
else:
mu = np.log(self.Y_cv[:, ny])
sigm = np.sqrt(
- np.log(self.Y_cv_var_w_measure[:, ny] / pow(self.Y_cv[:, ny], 2) + 1)
+ np.log(
+ self.Y_cv_var_w_measure[:, ny] / pow(self.Y_cv[:, ny], 2) + 1
+ )
)
- results["yPredict_PI_lb"][self.g_name[ny]] = lognorm.ppf(
+ results['yPredict_PI_lb'][self.g_name[ny]] = lognorm.ppf(
0.25, s=sigm, scale=np.exp(mu)
).tolist()
- results["yPredict_PI_ub"][self.g_name[ny]] = lognorm.ppf(
+ results['yPredict_PI_ub'][self.g_name[ny]] = lognorm.ppf(
0.75, s=sigm, scale=np.exp(mu)
).tolist()
@@ -1817,206 +1917,195 @@ def save_model(self, filename):
# nuggetVal_linear = np.exp(2*log_mean+log_var)*(np.exp(log_var)-1) # in linear space
if self.do_mf:
- results["valNugget1"][self.g_name[ny]] = float(
- self.m_list[ny].gpy_model["mixed_noise.Gaussian_noise.variance"]*self.normVars[ny]
+ results['valNugget1'][self.g_name[ny]] = float(
+ self.m_list[ny].gpy_model['mixed_noise.Gaussian_noise.variance']
+ * self.normVars[ny]
)
- results["valNugget2"][self.g_name[ny]] = float(
- self.m_list[ny].gpy_model["mixed_noise.Gaussian_noise_1.variance"]*self.normVars[ny]
+ results['valNugget2'][self.g_name[ny]] = float(
+ self.m_list[ny].gpy_model[
+ 'mixed_noise.Gaussian_noise_1.variance'
+ ]
+ * self.normVars[ny]
+ )
+ elif not self.heteroscedastic:
+ results['valNugget'][self.g_name[ny]] = float(
+ self.m_list[ny]['Gaussian_noise.variance'] * self.normVars[ny]
)
- else:
- if not self.heteroscedastic:
- results["valNugget"][self.g_name[ny]] = float(
- self.m_list[ny]["Gaussian_noise.variance"]*self.normVars[ny]
- )
-
- results["valNRMSE"][self.g_name[ny]] = self.NRMSE_val[ny]
- results["valR2"][self.g_name[ny]] = self.R2_val[ny]
- results["valCorrCoeff"][self.g_name[ny]] = self.corr_val[ny]
- results["valIQratio"][self.g_name[ny]] = self.inbound50[ny]
- results["valPval"][self.g_name[ny]] = self.Gausspvalue[ny]
+ results['valNRMSE'][self.g_name[ny]] = self.NRMSE_val[ny]
+ results['valR2'][self.g_name[ny]] = self.R2_val[ny]
+ results['valCorrCoeff'][self.g_name[ny]] = self.corr_val[ny]
+ results['valIQratio'][self.g_name[ny]] = self.inbound50[ny]
+ results['valPval'][self.g_name[ny]] = self.Gausspvalue[ny]
if np.isnan(self.NRMSE_val[ny]) or np.isinf(self.NRMSE_val[ny]):
- results["valNRMSE"][self.g_name[ny]] = 'null'
+ results['valNRMSE'][self.g_name[ny]] = 'null'
if np.isnan(self.R2_val[ny]) or np.isinf(self.R2_val[ny]):
- results["valR2"][self.g_name[ny]] = 'null'
- if np.isnan(self.corr_val[ny])or np.isinf(self.corr_val[ny]):
- results["valCorrCoeff"][self.g_name[ny]] = 'null'
+ results['valR2'][self.g_name[ny]] = 'null'
+ if np.isnan(self.corr_val[ny]) or np.isinf(self.corr_val[ny]):
+ results['valCorrCoeff'][self.g_name[ny]] = 'null'
- results["predError"] = {}
- results["predError"]["percent"] = self.perc_thr
- results["predError"]["value"] = self.perc_val
+ results['predError'] = {}
+ results['predError']['percent'] = self.perc_thr
+ results['predError']['value'] = self.perc_val
# results["fem"] = self.femInfo
rv_list = []
for nx in range(len(self.rvName)):
rvs = {}
- rvs["name"] = self.rvName[nx]
- rvs["distribution"] = self.rvDist[nx]
- rvs["value"] = self.rvVal[nx]
- rv_list = rv_list + [rvs]
- results["randomVariables"] = rv_list
-
- ### Used for surrogate
- results["modelInfo"] = {}
+ rvs['name'] = self.rvName[nx]
+ rvs['distribution'] = self.rvDist[nx]
+ rvs['value'] = self.rvVal[nx]
+ rv_list = rv_list + [rvs] # noqa: PLR6104, RUF005
+ results['randomVariables'] = rv_list
+ # Used for surrogate
+ results['modelInfo'] = {}
for ny in range(self.y_dim):
if self.stochastic[ny]:
- results["modelInfo"][self.g_name[ny]+"_Var"] = {}
+ results['modelInfo'][self.g_name[ny] + '_Var'] = {}
for parname in self.m_var_list[ny].parameter_names():
- results["modelInfo"][self.g_name[ny]+"_Var"][parname] = list(
- eval("self.m_var_list[ny]." + parname)
+ results['modelInfo'][self.g_name[ny] + '_Var'][parname] = list(
+ eval('self.m_var_list[ny].' + parname) # noqa: S307
)
- results["modelInfo"][self.g_name[ny] + "_Var"]["TrainingSamplesY"] = self.m_var_list[ny].Y.flatten().tolist()
+ results['modelInfo'][self.g_name[ny] + '_Var'][
+ 'TrainingSamplesY'
+ ] = self.m_var_list[ny].Y.flatten().tolist()
else:
- results["modelInfo"][self.g_name[ny]+"_Var"] = 0
+ results['modelInfo'][self.g_name[ny] + '_Var'] = 0
if not self.do_mf:
for ny in range(self.y_dim):
- results["modelInfo"][self.g_name[ny]] = {}
+ results['modelInfo'][self.g_name[ny]] = {}
for parname in self.m_list[ny].parameter_names():
- results["modelInfo"][self.g_name[ny]][parname] = list(
- eval("self.m_list[ny]." + parname)
+ results['modelInfo'][self.g_name[ny]][parname] = list(
+ eval('self.m_list[ny].' + parname) # noqa: S307
)
if self.isEEUQ:
# read SAM.json
- SAMpath = self.work_dir + "/templatedir/SAM.json"
+ SAMpath = self.work_dir + '/templatedir/SAM.json' # noqa: N806
try:
- with open(SAMpath, 'r', encoding='utf-8') as f:
- SAMjson = json.load(f)
- except Exception as e:
- with open(SAMpath+".sc", 'r', encoding='utf-8') as f:
- SAMjson = json.load(f)
-
- EDPpath = self.work_dir + "/templatedir/EDP.json"
- with open(EDPpath, 'r', encoding='utf-8') as f:
- EDPjson = json.load(f)
- results["SAM"] = SAMjson
- results["EDP"] = EDPjson
-
- with open(self.work_dir + "/dakota.out", "w", encoding='utf-8') as fp:
+ with open(SAMpath, encoding='utf-8') as f: # noqa: PTH123
+ SAMjson = json.load(f) # noqa: N806
+ except Exception: # noqa: BLE001
+ with open(SAMpath + '.sc', encoding='utf-8') as f: # noqa: PTH123
+ SAMjson = json.load(f) # noqa: N806
+
+ EDPpath = self.work_dir + '/templatedir/EDP.json' # noqa: N806
+ with open(EDPpath, encoding='utf-8') as f: # noqa: PTH123
+ EDPjson = json.load(f) # noqa: N806
+ results['SAM'] = SAMjson
+ results['EDP'] = EDPjson
+
+ with open(self.work_dir + '/dakota.out', 'w', encoding='utf-8') as fp: # noqa: PTH123
json.dump(results, fp, indent=1)
- with open(self.work_dir + "/GPresults.out", "w") as file:
-
- file.write("* Problem setting\n")
- file.write(" - dimension of x : {}\n".format(self.x_dim))
- file.write(" - dimension of y : {}\n".format(self.y_dim))
+ with open(self.work_dir + '/GPresults.out', 'w') as file: # noqa: PLR1702, PLW1514, PTH123
+ file.write('* Problem setting\n')
+ file.write(f' - dimension of x : {self.x_dim}\n')
+ file.write(f' - dimension of y : {self.y_dim}\n')
if self.doe_method:
- file.write(" - design of experiments : {} \n".format(self.doe_method))
+ file.write(f' - design of experiments : {self.doe_method} \n')
# if not self.do_doe:
# if self.do_simulation and self.do_sampling:
# file.write(
# " - design of experiments (DoE) turned off - DoE evaluation time exceeds the model simulation time \n")
- file.write("\n")
+ file.write('\n')
- file.write("* High-fidelity model\n")
+ file.write('* High-fidelity model\n')
# file.write(" - sampling : {}\n".format(self.modelInfoHF.is_model))
- file.write(" - simulation : {}\n".format(self.modelInfoHF.is_model))
- file.write("\n")
+ file.write(f' - simulation : {self.modelInfoHF.is_model}\n')
+ file.write('\n')
if self.do_mf:
- file.write("* Low-fidelity model\n")
+ file.write('* Low-fidelity model\n')
# file.write(" - sampling : {}\n".format(self.modelInfoLF.is_model))
- file.write(" - simulation : {}\n".format(self.modelInfoLF.is_model))
- file.write("\n")
+ file.write(f' - simulation : {self.modelInfoLF.is_model}\n')
+ file.write('\n')
- file.write("* Convergence\n")
- file.write(' - exit code : "{}"\n'.format(self.exit_code))
- file.write(" analysis terminated ")
- if self.exit_code == "count":
+ file.write('* Convergence\n')
+ file.write(f' - exit code : "{self.exit_code}"\n')
+ file.write(' analysis terminated ')
+ if self.exit_code == 'count':
file.write(
- "as number of counts reached the maximum (HFmax={})\n".format(
- self.modelInfoHF.thr_count
- )
+ f'as number of counts reached the maximum (HFmax={self.modelInfoHF.thr_count})\n'
)
if self.do_mf:
file.write(
- "as number of counts reached the maximum (HFmax={}, LFmax={})\n".format(
- self.modelInfoHF.thr_count, self.modelInfoLF.thr_count
- )
+ f'as number of counts reached the maximum (HFmax={self.modelInfoHF.thr_count}, LFmax={self.modelInfoLF.thr_count})\n'
)
- elif self.exit_code == "accuracy":
+ elif self.exit_code == 'accuracy':
file.write(
- 'as minimum accuracy level (NRMSE={:.2f}) is achieved"\n'.format(
- self.thr_NRMSE
- )
+ f'as minimum accuracy level (NRMSE={self.thr_NRMSE:.2f}) is achieved"\n'
)
- elif self.exit_code == "time":
+ elif self.exit_code == 'time':
file.write(
- 'as maximum running time (t={:.1f}s) reached"\n'.format(self.thr_t)
+ f'as maximum running time (t={self.thr_t:.1f}s) reached"\n'
)
- elif self.exit_code == "data":
- file.write("without simulation\n")
+ elif self.exit_code == 'data':
+ file.write('without simulation\n')
else:
- file.write("- cannot identify the exit code\n")
+ file.write('- cannot identify the exit code\n')
- file.write(" - number of HF simulations : {}\n".format(self.id_sim_hf))
+ file.write(f' - number of HF simulations : {self.id_sim_hf}\n')
if self.do_mf:
- file.write(" - number of LF simulations : {}\n".format(self.id_sim_lf))
+ file.write(f' - number of LF simulations : {self.id_sim_lf}\n')
file.write(
- " - maximum normalized root-mean-squared error (NRMSE): {:.5f}\n".format(
- np.max(self.NRMSE_val)
- )
+ f' - maximum normalized root-mean-squared error (NRMSE): {np.max(self.NRMSE_val):.5f}\n'
)
for ny in range(self.y_dim):
- file.write(
- " {} : {:.2f}\n".format(self.g_name[ny], self.NRMSE_val[ny])
- )
+ file.write(f' {self.g_name[ny]} : {self.NRMSE_val[ny]:.2f}\n')
- file.write(" - analysis time : {:.1f} sec\n".format(self.sim_time))
- file.write(" - calibration interval : {}\n".format(self.cal_interval))
- file.write("\n")
+ file.write(f' - analysis time : {self.sim_time:.1f} sec\n')
+ file.write(f' - calibration interval : {self.cal_interval}\n')
+ file.write('\n')
- file.write("* GP parameters\n".format(self.y_dim))
- file.write(" - Kernel : {}\n".format(self.kernel))
- file.write(" - Linear : {}\n\n".format(self.do_linear))
+ file.write('* GP parameters\n'.format())
+ file.write(f' - Kernel : {self.kernel}\n')
+ file.write(f' - Linear : {self.do_linear}\n\n')
if not self.do_mf:
for ny in range(self.y_dim):
- file.write(" [{}]\n".format(self.g_name[ny]))
+ file.write(f' [{self.g_name[ny]}]\n')
m_tmp = self.m_list[ny]
for parname in m_tmp.parameter_names():
- file.write(" - {} ".format(parname))
- parvals = eval("m_tmp." + parname)
+ file.write(f' - {parname} ')
+ parvals = eval('m_tmp.' + parname) # noqa: S307
if len(parvals) == self.x_dim:
- file.write("\n")
+ file.write('\n')
for nx in range(self.x_dim):
file.write(
- " {} : {:.2e}\n".format(
- self.rv_name[nx], parvals[nx]
- )
+ f' {self.rv_name[nx]} : {parvals[nx]:.2e}\n'
)
else:
- file.write(" : {:.2e}\n".format(parvals[0]))
- file.write("\n".format(self.g_name[ny]))
+ file.write(f' : {parvals[0]:.2e}\n')
+ file.write('\n'.format())
- print("Results Saved",flush=True)
+ print('Results Saved', flush=True) # noqa: T201
return 0
- def run_design_of_experiments(self, nc1, nq, e2, doeIdx="HF"):
-
- if doeIdx == "LF":
- lfset = set([tuple(x) for x in self.X_lf.tolist()])
- hfset = set([tuple(x) for x in self.X_hf.tolist()])
+ def run_design_of_experiments(self, nc1, nq, e2, doeIdx='HF'): # noqa: C901, D102, N803, PLR0912, PLR0914, PLR0915
+ if doeIdx == 'LF':
+ lfset = set([tuple(x) for x in self.X_lf.tolist()]) # noqa: C403
+ hfset = set([tuple(x) for x in self.X_hf.tolist()]) # noqa: C403
hfsamples = hfset - lfset
if len(hfsamples) == 0:
lf_additional_candi = np.zeros((0, self.x_dim))
else:
lf_additional_candi = np.array([np.array(x) for x in hfsamples])
- def sampling(N):
+ def sampling(N): # noqa: N803
return model_lf.sampling(N)
else:
- def sampling(N):
+ def sampling(N): # noqa: N803
return model_hf.sampling(N)
# doeIdx = 0
@@ -2027,17 +2116,17 @@ def sampling(N):
model_hf = self.modelInfoHF
model_lf = self.modelInfoLF
- X_hf = self.X_hf
- Y_hf = self.Y_hf
- X_lf = self.X_lf
- Y_lf = self.Y_lf
- ll = self.ll # Todo which ll?
+ X_hf = self.X_hf # noqa: N806
+ Y_hf = self.Y_hf # noqa: N806
+ X_lf = self.X_lf # noqa: N806
+ Y_lf = self.Y_lf # noqa: N806
+ ll = self.ll # TODO which ll? # noqa: TD002, TD004
y_var = np.var(Y_hf, axis=0) # normalization
y_idx = np.argmax(np.sum(e2 / y_var, axis=0))
if np.max(y_var) == 0:
# if this Y is constant
- self.doe_method = "none"
+ self.doe_method = 'none'
self.doe_stop = True
# dimension of interest
@@ -2046,13 +2135,11 @@ def sampling(N):
r = 1
- if self.doe_method == "none":
-
+ if self.doe_method == 'none': # noqa: PLR1702
update_point = sampling(self.cal_interval)
score = 0
- elif self.doe_method == "pareto":
-
+ elif self.doe_method == 'pareto':
#
# Initial candidates
#
@@ -2060,7 +2147,7 @@ def sampling(N):
xc1 = sampling(nc1) # same for hf/lf
xq = sampling(nq) # same for hf/lf
- if doeIdx.startswith("LF"):
+ if doeIdx.startswith('LF'):
xc1 = np.vstack([xc1, lf_additional_candi])
nc1 = xc1.shape[0]
#
@@ -2076,11 +2163,11 @@ def sampling(N):
# cri2[i] = sum(e2[:, y_idx] / Y_pred_var[:, y_idx] * wei.T)
cri2[i] = sum(e2[:, y_idx] * wei.T)
- VOI = np.zeros(yc1_pred.shape)
+ VOI = np.zeros(yc1_pred.shape) # noqa: N806
for i in range(nc1):
pdfvals = (
- m_stack.kern.K(np.array([xq[i]]), xq) ** 2
- / m_stack.kern.K(np.array([xq[0]])) ** 2
+ m_stack.kern.K(np.array([xq[i]]), xq) ** 2
+ / m_stack.kern.K(np.array([xq[0]])) ** 2
)
VOI[i] = np.mean(pdfvals) * np.prod(
np.diff(model_hf.xrange, axis=1)
@@ -2093,31 +2180,31 @@ def sampling(N):
logcrimi2 = np.log(cri2[:, 0])
rankid = np.zeros(nc1)
- varRank = np.zeros(nc1)
- biasRank = np.zeros(nc1)
- for id in range(nc1):
+ varRank = np.zeros(nc1) # noqa: N806
+ biasRank = np.zeros(nc1) # noqa: N806
+ for id in range(nc1): # noqa: A001
idx_tmp = np.argwhere(
(logcrimi1 >= logcrimi1[id]) * (logcrimi2 >= logcrimi2[id])
)
- varRank[id] = np.sum((logcrimi1 >= logcrimi1[id]))
- biasRank[id] = np.sum((logcrimi2 >= logcrimi2[id]))
+ varRank[id] = np.sum(logcrimi1 >= logcrimi1[id])
+ biasRank[id] = np.sum(logcrimi2 >= logcrimi2[id])
rankid[id] = idx_tmp.size
num_1rank = np.sum(rankid == 1)
idx_1rank = list((np.argwhere(rankid == 1)).flatten())
- if doeIdx.startswith("HF"):
- X_stack = X_hf
- Y_stack = Y_hf[:, y_idx][np.newaxis].T
- elif doeIdx.startswith("LF"):
- X_stack = X_lf
- Y_stack = Y_lf[:, y_idx][np.newaxis].T
+ if doeIdx.startswith('HF'):
+ X_stack = X_hf # noqa: N806
+ Y_stack = Y_hf[:, y_idx][np.newaxis].T # noqa: N806
+ elif doeIdx.startswith('LF'):
+ X_stack = X_lf # noqa: N806
+ Y_stack = Y_lf[:, y_idx][np.newaxis].T # noqa: N806
if num_1rank < self.cal_interval:
# When number of pareto is smaller than cal_interval
prob = np.ones((nc1,))
prob[list(rankid == 1)] = 0
- prob = prob / sum(prob)
+ prob = prob / sum(prob) # noqa: PLR6104
idx_pareto = idx_1rank + list(
np.random.choice(nc1, self.cal_interval - num_1rank, p=prob)
)
@@ -2134,56 +2221,59 @@ def sampling(N):
idx_pareto_new = [best_global]
del idx_pareto_candi[best_local]
- for i in range(self.cal_interval - 1):
- X_stack = np.vstack([X_stack, xc1[best_global, :][np.newaxis]])
- Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # any variables
+ for i in range(self.cal_interval - 1): # noqa: B007
+ X_stack = np.vstack([X_stack, xc1[best_global, :][np.newaxis]]) # noqa: N806
+ # any variables
+ Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # noqa: N806
- if doeIdx.startswith("HF"):
+ if doeIdx.startswith('HF'):
m_stack = self.set_XY(m_stack, y_idx, X_stack, Y_stack)
- elif doeIdx.startswith("LF"): # any variables
- m_tmp = self.set_XY(m_tmp, y_idx, self.X_hf, self.Y_hf, X_stack, Y_stack)
+ elif doeIdx.startswith('LF'): # any variables
+ m_tmp = self.set_XY(
+ m_tmp, y_idx, self.X_hf, self.Y_hf, X_stack, Y_stack
+ )
- dummy, Yq_var = self.predict(m_stack, xc1[idx_pareto_candi, :])
+ dummy, Yq_var = self.predict(m_stack, xc1[idx_pareto_candi, :]) # noqa: N806
cri1 = Yq_var * VOI[idx_pareto_candi]
cri1 = (cri1 - np.min(cri1)) / (np.max(cri1) - np.min(cri1))
score_tmp = (
- cri1 * cri2[idx_pareto_candi]
+ cri1 * cri2[idx_pareto_candi]
) # only update the variance
best_local = np.argsort(-np.squeeze(score_tmp))[0]
best_global = idx_pareto_candi[best_local]
- idx_pareto_new = idx_pareto_new + [best_global]
+ idx_pareto_new = idx_pareto_new + [best_global] # noqa: PLR6104, RUF005
del idx_pareto_candi[best_local]
idx_pareto = idx_pareto_new
update_point = xc1[idx_pareto, :]
score = 0
- elif self.doe_method == "imse":
+ elif self.doe_method == 'imse':
update_point = np.zeros((self.cal_interval, self.x_dim))
update_score = np.zeros((self.cal_interval, 1))
- if doeIdx.startswith("HF"):
- X_stack = X_hf
- Y_stack = Y_hf[:, y_idx][np.newaxis].T
- elif doeIdx.startswith("LF"):
- X_stack = X_lf
- Y_stack = Y_lf[:, y_idx][np.newaxis].T
+ if doeIdx.startswith('HF'):
+ X_stack = X_hf # noqa: N806
+ Y_stack = Y_hf[:, y_idx][np.newaxis].T # noqa: N806
+ elif doeIdx.startswith('LF'):
+ X_stack = X_lf # noqa: N806
+ Y_stack = Y_lf[:, y_idx][np.newaxis].T # noqa: N806
for ni in range(self.cal_interval):
#
# Initial candidates
#
xc1 = sampling(nc1) # same for hf/lf
- if doeIdx.startswith("LF"):
+ if doeIdx.startswith('LF'):
xc1 = np.vstack([xc1, lf_additional_candi])
nc1 = xc1.shape[0]
xq = sampling(nq) # same for hf/lf
- dummy, Yq_var = self.predict(m_stack, xq)
+ dummy, Yq_var = self.predict(m_stack, xq) # noqa: N806
if ni == 0:
- IMSEbase = 1 / xq.shape[0] * sum(Yq_var.flatten())
+ IMSEbase = 1 / xq.shape[0] * sum(Yq_var.flatten()) # noqa: N806
tmp = time.time()
if self.do_parallel:
@@ -2200,27 +2290,25 @@ def sampling(N):
for i in range(nc1)
)
result_objs = list(self.pool.starmap(imse, iterables))
- IMSEc1 = np.zeros(nc1)
- for IMSE_val, idx in result_objs:
+ IMSEc1 = np.zeros(nc1) # noqa: N806
+ for IMSE_val, idx in result_objs: # noqa: N806
IMSEc1[idx] = IMSE_val
- print(
- "IMSE: finding the next DOE {} - parallel .. time = {:.2f}".format(
- ni, time.time() - tmp
- ,flush=True)
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} - parallel .. time = {time.time() - tmp:.2f}'
) # 7s # 3-4s
- # TODO: terminate it gracefully....
+ # TODO: terminate it gracefully.... # noqa: TD002
# see https://stackoverflow.com/questions/21104997/keyboard-interrupt-with-pythons-multiprocessing
try:
while True:
time.sleep(0.5)
- if all([r.ready() for r in result]):
+ if all([r.ready() for r in result]): # noqa: C419, F821
break
except KeyboardInterrupt:
- pool.terminate()
- pool.join()
+ pool.terminate() # noqa: F821
+ pool.join() # noqa: F821
else:
- IMSEc1 = np.zeros(nc1)
+ IMSEc1 = np.zeros(nc1) # noqa: N806
for i in range(nc1):
IMSEc1[i], dummy = imse(
copy.deepcopy(m_stack),
@@ -2231,20 +2319,19 @@ def sampling(N):
y_idx,
doeIdx,
)
- print(
- "IMSE: finding the next DOE {} - serial .. time = {}".format(
- ni, time.time() - tmp
- ,flush=True)
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} - serial .. time = {time.time() - tmp}'
) # 4s
new_idx = np.argmin(IMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ # any variables
+ Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # noqa: N806
update_point[ni, :] = x_point
- if doeIdx == "HFHF":
+ if doeIdx == 'HFHF':
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2253,9 +2340,9 @@ def sampling(N):
self.X_lf,
self.Y_lf[:, y_idx][np.newaxis].T,
)
- elif doeIdx == "HF":
+ elif doeIdx == 'HF':
m_stack = self.set_XY(m_stack, y_idx, X_stack, Y_stack)
- elif doeIdx == "LF": # any variables
+ elif doeIdx == 'LF': # any variables
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2267,23 +2354,23 @@ def sampling(N):
score = IMSEbase - np.min(IMSEc1, axis=0)
- elif self.doe_method == "imsew":
+ elif self.doe_method == 'imsew':
update_point = np.zeros((self.cal_interval, self.x_dim))
- update_score = np.zeros((self.cal_interval, 1))
+ update_score = np.zeros((self.cal_interval, 1)) # noqa: F841
- if doeIdx.startswith("HF"):
- X_stack = X_hf
- Y_stack = Y_hf[:, y_idx][np.newaxis].T
- elif doeIdx.startswith("LF"):
- X_stack = X_lf
- Y_stack = Y_lf[:, y_idx][np.newaxis].T
+ if doeIdx.startswith('HF'):
+ X_stack = X_hf # noqa: N806
+ Y_stack = Y_hf[:, y_idx][np.newaxis].T # noqa: N806
+ elif doeIdx.startswith('LF'):
+ X_stack = X_lf # noqa: N806
+ Y_stack = Y_lf[:, y_idx][np.newaxis].T # noqa: N806
for ni in range(self.cal_interval):
#
# Initial candidates
#
xc1 = sampling(nc1) # same for hf/lf
- if doeIdx.startswith("LF"):
+ if doeIdx.startswith('LF'):
xc1 = np.vstack([xc1, lf_additional_candi])
nc1 = xc1.shape[0]
@@ -2294,9 +2381,11 @@ def sampling(N):
phiq[i, :] = e2[closest_node(xq[i, :], X_hf, ll)]
phiqr = pow(phiq[:, y_idx], r)
- dummy, Yq_var = self.predict(m_stack, xq)
+ dummy, Yq_var = self.predict(m_stack, xq) # noqa: N806
if ni == 0:
- IMSEbase = 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten())
+ IMSEbase = ( # noqa: N806
+ 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten())
+ )
tmp = time.time()
if self.do_parallel:
@@ -2313,18 +2402,16 @@ def sampling(N):
for i in range(nc1)
)
result_objs = list(self.pool.starmap(imse, iterables))
- IMSEc1 = np.zeros(nc1)
- for IMSE_val, idx in result_objs:
+ IMSEc1 = np.zeros(nc1) # noqa: N806
+ for IMSE_val, idx in result_objs: # noqa: N806
IMSEc1[idx] = IMSE_val
- print(
- "IMSE: finding the next DOE {} - parallel .. time = {:.2f}".format(
- ni, time.time() - tmp
- ,flush=True)
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} - parallel .. time = {time.time() - tmp:.2f}'
) # 7s # 3-4s
else:
- IMSEc1 = np.zeros(nc1)
+ IMSEc1 = np.zeros(nc1) # noqa: N806
for i in range(nc1):
- IMSEc1[i], dummy = imse(
+ IMSEc1[i], dummy = imse( # noqa: F841
copy.deepcopy(m_stack),
xc1[i, :][np.newaxis],
xq,
@@ -2334,21 +2421,21 @@ def sampling(N):
doeIdx,
)
if np.mod(i, 200) == 0:
- print("IMSE iter {}, candi {}/{}".format(ni, i, nc1)) # 4s
- print(
- "IMSE: finding the next DOE {} - serial .. time = {}".format(
- ni, time.time() - tmp
- ,flush=True)
+ # 4s
+ print(f'IMSE iter {ni}, candi {i}/{nc1}') # noqa: T201
+ print( # noqa: T201
+ f'IMSE: finding the next DOE {ni} - serial .. time = {time.time() - tmp}'
) # 4s
new_idx = np.argmin(IMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ # any variables
+ Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # noqa: N806
update_point[ni, :] = x_point
- if doeIdx == "HFHF":
+ if doeIdx == 'HFHF':
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2357,9 +2444,9 @@ def sampling(N):
self.X_lf,
self.Y_lf[:, y_idx][np.newaxis].T,
)
- elif doeIdx == "HF":
+ elif doeIdx == 'HF':
m_stack = self.set_XY(m_stack, y_idx, X_stack, Y_stack)
- elif doeIdx == "LF": # any variables
+ elif doeIdx == 'LF': # any variables
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2371,20 +2458,19 @@ def sampling(N):
score = IMSEbase - np.min(IMSEc1, axis=0)
- elif self.doe_method == "mmsew":
- if doeIdx.startswith("HF"):
- X_stack = X_hf
- Y_stack = Y_hf[:, y_idx][np.newaxis].T
- elif doeIdx.startswith("LF"):
- X_stack = X_lf
- Y_stack = Y_lf[:, y_idx][np.newaxis].T
+ elif self.doe_method == 'mmsew':
+ if doeIdx.startswith('HF'):
+ X_stack = X_hf # noqa: N806
+ Y_stack = Y_hf[:, y_idx][np.newaxis].T # noqa: N806
+ elif doeIdx.startswith('LF'):
+ X_stack = X_lf # noqa: N806
+ Y_stack = Y_lf[:, y_idx][np.newaxis].T # noqa: N806
update_point = np.zeros((self.cal_interval, self.x_dim))
for ni in range(self.cal_interval):
-
xc1 = sampling(nc1) # same for hf/lf
- if doeIdx.startswith("LF"):
+ if doeIdx.startswith('LF'):
xc1 = np.vstack([xc1, lf_additional_candi])
nc1 = xc1.shape[0]
@@ -2394,46 +2480,48 @@ def sampling(N):
phicr = pow(phic[:, y_idx], r)
yc1_pred, yc1_var = self.predict(m_stack, xc1) # use only variance
- MMSEc1 = yc1_var.flatten() * phicr.flatten()
+ MMSEc1 = yc1_var.flatten() * phicr.flatten() # noqa: N806
new_idx = np.argmax(MMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ # any variables
+ Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # noqa: N806
# m_stack.set_XY(X=X_stack, Y=Y_stack)
- if doeIdx.startswith("HF"):
+ if doeIdx.startswith('HF'):
m_stack = self.set_XY(m_stack, y_idx, X_stack, Y_stack)
- elif doeIdx.startswith("LF"): # any variables
- m_tmp = self.set_XY(m_tmp, y_idx,self.X_hf, self.Y_hf, X_stack, Y_stack)
+ elif doeIdx.startswith('LF'): # any variables
+ m_tmp = self.set_XY(
+ m_tmp, y_idx, self.X_hf, self.Y_hf, X_stack, Y_stack
+ )
update_point[ni, :] = x_point
score = np.max(MMSEc1, axis=0)
- elif self.doe_method == "mmse":
-
- if doeIdx.startswith("HF"):
- X_stack = X_hf
- Y_stack = Y_hf[:, y_idx][np.newaxis].T
- elif doeIdx.startswith("LF"):
- X_stack = X_lf
- Y_stack = Y_lf[:, y_idx][np.newaxis].T
+ elif self.doe_method == 'mmse':
+ if doeIdx.startswith('HF'):
+ X_stack = X_hf # noqa: N806
+ Y_stack = Y_hf[:, y_idx][np.newaxis].T # noqa: N806
+ elif doeIdx.startswith('LF'):
+ X_stack = X_lf # noqa: N806
+ Y_stack = Y_lf[:, y_idx][np.newaxis].T # noqa: N806
update_point = np.zeros((self.cal_interval, self.x_dim))
for ni in range(self.cal_interval):
-
xc1 = sampling(nc1) # same for hf/lf
- if doeIdx.startswith("LF"):
+ if doeIdx.startswith('LF'):
xc1 = np.vstack([xc1, lf_additional_candi])
nc1 = xc1.shape[0]
yc1_pred, yc1_var = self.predict(m_stack, xc1) # use only variance
- MMSEc1 = yc1_var.flatten()
+ MMSEc1 = yc1_var.flatten() # noqa: N806
new_idx = np.argmax(MMSEc1, axis=0)
x_point = xc1[new_idx, :][np.newaxis]
- X_stack = np.vstack([X_stack, x_point])
- Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # any variables
+ X_stack = np.vstack([X_stack, x_point]) # noqa: N806
+ # any variables
+ Y_stack = np.vstack([Y_stack, np.zeros((1, 1))]) # noqa: N806
# m_stack.set_XY(X=X_stack, Y=Y_stack)
# if doeIdx.startswith("HF"):
@@ -2441,7 +2529,7 @@ def sampling(N):
# elif doeIdx.startswith("LF"): # any variables
# self.set_XY(m_stack, self.X_hf, self.Y_hf, X_stack, Y_stack)
- if doeIdx == "HFHF":
+ if doeIdx == 'HFHF':
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2450,9 +2538,9 @@ def sampling(N):
self.X_lf,
self.Y_lf[:, y_idx][np.newaxis].T,
)
- elif doeIdx == "HF":
- m_stack = self.set_XY(m_stack, y_idx,X_stack, Y_stack)
- elif doeIdx == "LF": # any variables
+ elif doeIdx == 'HF':
+ m_stack = self.set_XY(m_stack, y_idx, X_stack, Y_stack)
+ elif doeIdx == 'LF': # any variables
m_stack = self.set_XY(
m_stack,
y_idx,
@@ -2467,85 +2555,104 @@ def sampling(N):
score = np.max(MMSEc1, axis=0)
else:
msg = (
- "Error running SimCenterUQ: cannot identify the doe method <"
- + self.doe_method
- + ">"
+ 'Error running SimCenterUQ: cannot identify the doe method <'
+ + self.doe_method
+ + '>'
)
self.exit(msg)
return update_point, y_idx, score
- def normalized_mean_sq_error(self, yp, ye):
+ def normalized_mean_sq_error(self, yp, ye): # noqa: D102, PLR6301
n = yp.shape[0]
data_bound = np.max(ye, axis=0) - np.min(ye, axis=0)
- RMSE = np.sqrt(1 / n * np.sum(pow(yp - ye, 2), axis=0))
- NRMSE = RMSE / data_bound
- NRMSE[np.argwhere((data_bound == 0))] = 0
+ RMSE = np.sqrt(1 / n * np.sum(pow(yp - ye, 2), axis=0)) # noqa: N806
+ NRMSE = RMSE / data_bound # noqa: N806
+ NRMSE[np.argwhere(data_bound == 0)] = 0
return NRMSE
- def get_cross_validation_err(self):
-
- print("Calculating cross validation errors",flush=True)
- time_tmp = time.time();
- X_hf = self.X_hf # contains separate samples
- Y_hf = self.Y_hf
+ def get_cross_validation_err(self): # noqa: D102
+ print('Calculating cross validation errors', flush=True) # noqa: T201
+ time_tmp = time.time()
+ X_hf = self.X_hf # contains separate samples # noqa: N806
+ Y_hf = self.Y_hf # noqa: N806
e2 = np.zeros(Y_hf.shape) # only for unique...
- Y_pred = np.zeros(Y_hf.shape)
- Y_pred_var = np.zeros(Y_hf.shape)
- Y_pred_var_w_measure = np.zeros(Y_hf.shape)
+ Y_pred = np.zeros(Y_hf.shape) # noqa: N806
+ Y_pred_var = np.zeros(Y_hf.shape) # noqa: N806
+ Y_pred_var_w_measure = np.zeros(Y_hf.shape) # noqa: N806
#
# Efficient cross validation TODO: check if it works for heteroskedacstic
#
- if (not self.do_mf) and (not self.heteroscedastic): # note: heteroscedastic is not our stochastic kriging
- #X_unique, dum, indices, dum = np.unique(X_hf, axis=0, return_index=True, return_counts=True,
+ if (not self.do_mf) and (
+ not self.heteroscedastic
+ ): # note: heteroscedastic is not our stochastic kriging
+ # X_unique, dum, indices, dum = np.unique(X_hf, axis=0, return_index=True, return_counts=True,
# return_inverse=True)
- #self.n_unique_hf = indices.shape[0]
+ # self.n_unique_hf = indices.shape[0]
indices = self.indices_unique
for ny in range(Y_hf.shape[1]):
-
- Xm = self.m_list[ny].X # contains unique samples
- Ym = self.m_list[ny].Y
+ Xm = self.m_list[ny].X # contains unique samples # noqa: N806
+ Ym = self.m_list[ny].Y # noqa: N806
# works both for stochastic/stochastic
- nugget_mat = np.diag(np.squeeze(self.var_str[ny])) * self.m_list[ny].Gaussian_noise.parameters
+ nugget_mat = (
+ np.diag(np.squeeze(self.var_str[ny]))
+ * self.m_list[ny].Gaussian_noise.parameters
+ )
- Rmat = self.m_list[ny].kern.K(Xm)
- Rinv = np.linalg.inv(Rmat + nugget_mat)
- e = np.squeeze(np.matmul(Rinv, (Ym-self.normMeans[ny])))/np.squeeze(np.diag(Rinv))
- #e = np.squeeze(np.matmul(Rinv, (Ym))) / np.squeeze(np.diag(Rinv))
+ Rmat = self.m_list[ny].kern.K(Xm) # noqa: N806
+ Rinv = np.linalg.inv(Rmat + nugget_mat) # noqa: N806
+ e = np.squeeze(
+ np.matmul(Rinv, (Ym - self.normMeans[ny]))
+ ) / np.squeeze(np.diag(Rinv))
+ # e = np.squeeze(np.matmul(Rinv, (Ym))) / np.squeeze(np.diag(Rinv))
# works both for stochastic/stochastic
for nx in range(X_hf.shape[0]):
e2[nx, ny] = e[indices[nx]] ** 2
- #Y_pred_var[nx, ny] = 1 / np.diag(Rinv)[indices[nx]] * self.normVars[ny]
- Y_pred[nx, ny] = self.Y_mean[ny][indices[nx]] - e[indices[nx]]
- #Y_pred_var_w_measure[nx, ny] = Y_pred_var[nx, ny] + self.m_list[ny].Gaussian_noise.parameters[0]*self.var_str[ny][indices[nx]] * self.normVars[ny]
- Y_pred_var_w_measure[nx, ny] = 1 / np.diag(Rinv)[indices[nx]] * self.normVars[ny]
- Y_pred_var[nx, ny] = max(0, Y_pred_var_w_measure[nx, ny] - self.m_list[ny].Gaussian_noise.parameters[0] * self.var_str[ny][indices[nx]] * self.normVars[ny])
+ # Y_pred_var[nx, ny] = 1 / np.diag(Rinv)[indices[nx]] * self.normVars[ny]
+ Y_pred[nx, ny] = self.Y_mean[ny][indices[nx]] - e[indices[nx]]
+ # Y_pred_var_w_measure[nx, ny] = Y_pred_var[nx, ny] + self.m_list[ny].Gaussian_noise.parameters[0]*self.var_str[ny][indices[nx]] * self.normVars[ny]
+ Y_pred_var_w_measure[nx, ny] = (
+ 1 / np.diag(Rinv)[indices[nx]] * self.normVars[ny]
+ )
+ Y_pred_var[nx, ny] = max(
+ 0,
+ Y_pred_var_w_measure[nx, ny]
+ - self.m_list[ny].Gaussian_noise.parameters[0]
+ * self.var_str[ny][indices[nx]]
+ * self.normVars[ny],
+ )
else:
- Y_pred2 = np.zeros(Y_hf.shape)
- Y_pred_var2 = np.zeros(Y_hf.shape)
+ Y_pred2 = np.zeros(Y_hf.shape) # noqa: N806
+ Y_pred_var2 = np.zeros(Y_hf.shape) # noqa: N806
e22 = np.zeros(Y_hf.shape)
for ny in range(Y_hf.shape[1]):
m_tmp = copy.deepcopy(self.m_list[ny])
for ns in range(X_hf.shape[0]):
- X_tmp = np.delete(X_hf, ns, axis=0)
- Y_tmp = np.delete(Y_hf, ns, axis=0)
+ X_tmp = np.delete(X_hf, ns, axis=0) # noqa: N806
+ Y_tmp = np.delete(Y_hf, ns, axis=0) # noqa: N806
if self.stochastic:
- Y_meta_tmp = m_tmp.Y_metadata
- Y_meta_tmp['variance_structure']= np.delete(m_tmp.Y_metadata['variance_structure'], ns, axis=0)
- m_tmp.set_XY2(X_tmp,Y_tmp[:, ny][np.newaxis].transpose(),Y_metadata = Y_meta_tmp)
+ Y_meta_tmp = m_tmp.Y_metadata # noqa: N806
+ Y_meta_tmp['variance_structure'] = np.delete(
+ m_tmp.Y_metadata['variance_structure'], ns, axis=0
+ )
+ m_tmp.set_XY2(
+ X_tmp,
+ Y_tmp[:, ny][np.newaxis].transpose(),
+ Y_metadata=Y_meta_tmp,
+ )
else:
m_tmp.set_XY(X_tmp, Y_tmp[:, ny][np.newaxis].transpose())
- print(ns)
+ print(ns) # noqa: T201
# m_tmp = self.set_XY(
# m_tmp,
# ny,
@@ -2556,100 +2663,127 @@ def get_cross_validation_err(self):
# )
x_loo = X_hf[ns, :][np.newaxis]
- Y_pred_tmp, Y_err_tmp = self.predict(m_tmp, x_loo)
+ Y_pred_tmp, Y_err_tmp = self.predict(m_tmp, x_loo) # noqa: N806
Y_pred2[ns, ny] = Y_pred_tmp
Y_pred_var2[ns, ny] = Y_err_tmp
if self.do_logtransform:
- Y_exact = np.log(Y_hf[ns, ny])
+ Y_exact = np.log(Y_hf[ns, ny]) # noqa: N806
else:
- Y_exact = Y_hf[ns, ny]
+ Y_exact = Y_hf[ns, ny] # noqa: N806
e22[ns, ny] = pow((Y_pred_tmp - Y_exact), 2) # for nD outputs
- Y_pred = Y_pred2
- Y_pred_var = Y_pred_var2
+ Y_pred = Y_pred2 # noqa: N806
+ Y_pred_var = Y_pred_var2 # noqa: N806
if not self.do_mf:
- Y_pred_var_w_measure[:, ny] = Y_pred_var2[:, ny] + self.m_list[ny].Gaussian_noise.parameters * self.normVars[ny]
+ Y_pred_var_w_measure[:, ny] = (
+ Y_pred_var2[:, ny]
+ + self.m_list[ny].Gaussian_noise.parameters
+ * self.normVars[ny]
+ )
else:
- # TODO account for Gaussian_noise.parameters as well
- Y_pred_var_w_measure[:, ny] = Y_pred_var2[:, ny] + self.m_list[ny].gpy_model.mixed_noise.Gaussian_noise_1.parameters * self.normVars[ny]
+ # TODO account for Gaussian_noise.parameters as well # noqa: TD002, TD004
+ Y_pred_var_w_measure[:, ny] = (
+ Y_pred_var2[:, ny]
+ + self.m_list[
+ ny
+ ].gpy_model.mixed_noise.Gaussian_noise_1.parameters
+ * self.normVars[ny]
+ )
e2 = e22
# np.hstack([Y_pred_var,Y_pred_var2])
# np.hstack([e2,e22])
- '''
+ r"""
import matplotlib.pyplot as plt
plt.plot(Y_pred_var/self.normVars[ny]); plt.plot(Y_pred_var2/self.normVars[ny]);
plt.title("With nugget (Linear)"); plt.xlabel("Training sample id"); plt.ylabel("LOOCV variance (before multiplying $\sigma_z^2$)"); plt.legend(["Closedform","iteration"]);
plt.show();
- '''
- print(" Cross validation calculation time: {:.2f} s".format(time.time() - time_tmp),flush=True)
+ """ # noqa: W291, W293
+ print( # noqa: T201
+ f' Cross validation calculation time: {time.time() - time_tmp:.2f} s',
+ flush=True,
+ )
return Y_pred, Y_pred_var, Y_pred_var_w_measure, e2
-def imse(m_tmp, xcandi, xq, phiqr, i, y_idx, doeIdx="HF"):
- if doeIdx == "HF":
- X = m_tmp.X
- Y = m_tmp.Y
- X_tmp = np.vstack([X, xcandi])
- Y_tmp = np.vstack([Y, np.zeros((1, Y.shape[1]))]) # any variables
+def imse(m_tmp, xcandi, xq, phiqr, i, y_idx, doeIdx='HF'): # noqa: ARG001, N803, D103
+ if doeIdx == 'HF':
+ X = m_tmp.X # noqa: N806
+ Y = m_tmp.Y # noqa: N806
+ X_tmp = np.vstack([X, xcandi]) # noqa: N806
+ # any variables
+ Y_tmp = np.vstack([Y, np.zeros((1, Y.shape[1]))]) # noqa: N806
# self.set_XY(m_tmp, X_tmp, Y_tmp)
m_tmp.set_XY(X_tmp, Y_tmp)
- dummy, Yq_var = m_tmp.predict(xq)
-
- elif doeIdx == "HFHF":
- idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0).T[0]
- idxLF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 1).T[0]
- X_hf = m_tmp.gpy_model.X[idxHF, :-1]
- Y_hf = m_tmp.gpy_model.Y[idxHF, :]
- X_lf = m_tmp.gpy_model.X[idxLF, :-1]
- Y_lf = m_tmp.gpy_model.Y[idxLF, :]
- X_tmp = np.vstack([X_hf, xcandi])
- Y_tmp = np.vstack([Y_hf, np.zeros((1, Y_hf.shape[1]))]) # any variables
+ dummy, Yq_var = m_tmp.predict(xq) # noqa: N806
+
+ elif doeIdx == 'HFHF':
+ idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0).T[0] # noqa: N806
+ idxLF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 1).T[0] # noqa: N806
+ X_hf = m_tmp.gpy_model.X[idxHF, :-1] # noqa: N806
+ Y_hf = m_tmp.gpy_model.Y[idxHF, :] # noqa: N806
+ X_lf = m_tmp.gpy_model.X[idxLF, :-1] # noqa: N806
+ Y_lf = m_tmp.gpy_model.Y[idxLF, :] # noqa: N806
+ X_tmp = np.vstack([X_hf, xcandi]) # noqa: N806
+ # any variables
+ Y_tmp = np.vstack([Y_hf, np.zeros((1, Y_hf.shape[1]))]) # noqa: N806
# self.set_XY(m_tmp, X_tmp, Y_tmp, X_lf, Y_lf)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays(
- [X_tmp, X_lf], [Y_tmp, Y_lf]
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays( # noqa: F821
+ [X_tmp, X_lf], [Y_tmp, Y_lf]
+ )
)
m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- xq_list = convert_x_list_to_array([xq, np.zeros((0, xq.shape[1]))])
- dummy, Yq_var = m_tmp.predict(xq_list)
-
- elif doeIdx.startswith("LF"):
- idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0).T[0]
- idxLF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 1).T[0]
- X_hf = m_tmp.gpy_model.X[idxHF, :-1]
- Y_hf = m_tmp.gpy_model.Y[idxHF, :]
- X_lf = m_tmp.gpy_model.X[idxLF, :-1]
- Y_lf = m_tmp.gpy_model.Y[idxLF, :]
- X_tmp = np.vstack([X_lf, xcandi])
- Y_tmp = np.vstack([Y_lf, np.zeros((1, Y_lf.shape[1]))]) # any variables
+ xq_list = convert_x_list_to_array([xq, np.zeros((0, xq.shape[1]))]) # noqa: F821
+ dummy, Yq_var = m_tmp.predict(xq_list) # noqa: N806
+
+ elif doeIdx.startswith('LF'):
+ idxHF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 0).T[0] # noqa: N806
+ idxLF = np.argwhere(m_tmp.gpy_model.X[:, -1] == 1).T[0] # noqa: N806
+ X_hf = m_tmp.gpy_model.X[idxHF, :-1] # noqa: N806
+ Y_hf = m_tmp.gpy_model.Y[idxHF, :] # noqa: N806
+ X_lf = m_tmp.gpy_model.X[idxLF, :-1] # noqa: N806
+ Y_lf = m_tmp.gpy_model.Y[idxLF, :] # noqa: N806
+ X_tmp = np.vstack([X_lf, xcandi]) # noqa: N806
+ # any variables
+ Y_tmp = np.vstack([Y_lf, np.zeros((1, Y_lf.shape[1]))]) # noqa: N806
# self.set_XY(m_tmp, X_hf, Y_hf, X_tmp, Y_tmp)
- X_list_tmp, Y_list_tmp = emf.convert_lists_to_array.convert_xy_lists_to_arrays(
- [X_hf, X_tmp], [Y_hf, Y_tmp]
+ X_list_tmp, Y_list_tmp = ( # noqa: N806
+ emf.convert_lists_to_array.convert_xy_lists_to_arrays( # noqa: F821
+ [X_hf, X_tmp], [Y_hf, Y_tmp]
+ )
)
m_tmp.set_data(X=X_list_tmp, Y=Y_list_tmp)
- xq_list = convert_x_list_to_array([xq, np.zeros((0, xq.shape[1]))])
- dummy, Yq_var = m_tmp.predict(xq_list)
+ xq_list = convert_x_list_to_array([xq, np.zeros((0, xq.shape[1]))]) # noqa: F821
+ dummy, Yq_var = m_tmp.predict(xq_list) # noqa: F841, N806
else:
- print("doe method <{}> is not supported".format(doeIdx),flush=True)
+ print(f'doe method <{doeIdx}> is not supported', flush=True) # noqa: T201
# dummy, Yq_var = self.predict(m_tmp,xq)
- IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten())
+ IMSEc1 = 1 / xq.shape[0] * sum(phiqr.flatten() * Yq_var.flatten()) # noqa: N806
return IMSEc1, i
-class model_info:
- def __init__(
- self, surrogateJson, rvJson, work_dir, x_dim, y_dim, n_processor, idx=0
+class model_info: # noqa: D101
+ def __init__( # noqa: C901
+ self,
+ surrogateJson, # noqa: N803
+ rvJson, # noqa: N803
+ work_dir,
+ x_dim,
+ y_dim,
+ n_processor,
+ idx=0,
):
def exit_tmp(msg):
- print(msg)
- print(msg, file=sys.stderr)
- exit(-1)
+ print(msg) # noqa: T201
+ print(msg, file=sys.stderr) # noqa: T201
+ exit(-1) # noqa: PLR1722
# idx = -1 : no info (dummy) paired with 0
# idx = 0 : single fidelity
@@ -2665,105 +2799,98 @@ def exit_tmp(msg):
self.model_without_sampling = False # default
if idx == 0:
# not MF
- if surrogateJson["method"] == "Sampling and Simulation":
+ if surrogateJson['method'] == 'Sampling and Simulation':
self.is_model = True
- self.is_data = surrogateJson["existingDoE"]
- elif surrogateJson["method"] == "Import Data File":
+ self.is_data = surrogateJson['existingDoE']
+ elif surrogateJson['method'] == 'Import Data File':
self.is_model = False
self.is_data = True
- if not surrogateJson["outputData"]:
+ if not surrogateJson['outputData']:
self.model_without_sampling = True # checkbox not checked...
else:
msg = 'Error reading json: either select "Import Data File" or "Sampling and Simulation"'
exit_tmp(msg)
- elif idx == 1 or idx == 2:
+ elif idx == 1 or idx == 2: # noqa: PLR1714, PLR2004
# MF
self.is_data = True # default
- self.is_model = surrogateJson["fromModel"]
+ self.is_model = surrogateJson['fromModel']
if self.is_model:
- self.is_data = surrogateJson["existingDoE"]
+ self.is_data = surrogateJson['existingDoE']
elif idx == -1:
self.is_data = False
self.is_model = False
if idx == 0:
# single model
- input_file = "templatedir/inpFile.in"
- output_file = "templatedir/outFile.in"
+ input_file = 'templatedir/inpFile.in'
+ output_file = 'templatedir/outFile.in'
elif idx == 1:
# high-fidelity
- input_file = "templatedir/inpFile_HF.in"
- output_file = "templatedir/outFile_HF.in"
- elif idx == 2:
+ input_file = 'templatedir/inpFile_HF.in'
+ output_file = 'templatedir/outFile_HF.in'
+ elif idx == 2: # noqa: PLR2004
# low-fidelity
- input_file = "templatedir/inpFile_LF.in"
- output_file = "templatedir/outFile_LF.in"
+ input_file = 'templatedir/inpFile_LF.in'
+ output_file = 'templatedir/outFile_LF.in'
if self.is_data:
- self.inpData = os.path.join(work_dir, input_file)
- self.outData = os.path.join(work_dir, output_file)
+ self.inpData = os.path.join(work_dir, input_file) # noqa: PTH118
+ self.outData = os.path.join(work_dir, output_file) # noqa: PTH118
self.X_existing = read_txt(self.inpData, exit_tmp)
self.n_existing = self.X_existing.shape[0]
- if not (self.X_existing.shape[1] == self.x_dim):
- msg = "Error importing input data - dimension inconsistent: have {} RV(s) but have {} column(s).".format(
- self.x_dim, self.X_existing.shape[1]
- )
+ if self.X_existing.shape[1] != self.x_dim:
+ msg = f'Error importing input data - dimension inconsistent: have {self.x_dim} RV(s) but have {self.X_existing.shape[1]} column(s).'
exit_tmp(msg)
if not self.model_without_sampling: # i.e. check box clicked
self.Y_existing = read_txt(self.outData, exit_tmp)
- if not (self.Y_existing.shape[1] == self.y_dim):
- msg = "Error importing input data - dimension inconsistent: have {} QoI(s) but have {} column(s).".format(
- self.y_dim, self.Y_existing.shape[1]
- )
+ if self.Y_existing.shape[1] != self.y_dim:
+ msg = f'Error importing input data - dimension inconsistent: have {self.y_dim} QoI(s) but have {self.Y_existing.shape[1]} column(s).'
exit_tmp(msg)
- if not (self.Y_existing.shape[0] == self.X_existing.shape[0]):
- msg = "Error importing input data: numbers of samples of inputs ({}) and outputs ({}) are inconsistent".format(
- self.X_existing.shape[0], self.Y_existing.shape[0]
- )
+ if self.Y_existing.shape[0] != self.X_existing.shape[0]:
+ msg = f'Error importing input data: numbers of samples of inputs ({self.X_existing.shape[0]}) and outputs ({self.Y_existing.shape[0]}) are inconsistent'
exit_tmp(msg)
else:
self.Y_existing = np.zeros((0, y_dim))
else:
- self.inpData = ""
- self.outData = ""
+ self.inpData = ''
+ self.outData = ''
self.X_existing = np.zeros((0, x_dim))
self.Y_existing = np.zeros((0, y_dim))
self.n_existing = 0
if self.is_model:
- self.doe_method = surrogateJson["DoEmethod"]
- self.doe_method = surrogateJson["DoEmethod"]
+ self.doe_method = surrogateJson['DoEmethod']
+ self.doe_method = surrogateJson['DoEmethod']
- self.thr_count = surrogateJson["samples"] # number of samples
- if (self.thr_count==1):
- msg = "The number of samples should be greater."
+ self.thr_count = surrogateJson['samples'] # number of samples
+ if self.thr_count == 1:
+ msg = 'The number of samples should be greater.'
exit_tmp(msg)
- if self.doe_method == "None":
+ if self.doe_method == 'None':
self.user_init = self.thr_count
else:
try:
- self.user_init = surrogateJson["initialDoE"]
- except:
+ self.user_init = surrogateJson['initialDoE']
+ except: # noqa: E722
self.user_init = -1 # automate
- self.nugget_opt = surrogateJson["nuggetOpt"]
- if self.nugget_opt == "Heteroscedastic":
-
- self.numSampToBeRepl = surrogateJson["numSampToBeRepl"]
- self.numRepl = surrogateJson["numRepl"]
+ self.nugget_opt = surrogateJson['nuggetOpt']
+ if self.nugget_opt == 'Heteroscedastic':
+ self.numSampToBeRepl = surrogateJson['numSampToBeRepl']
+ self.numRepl = surrogateJson['numRepl']
self.numSampRepldone = False
if self.numRepl == -1: # use default
self.numRepl = 10
- #elif self.numRepl < 2 :
+ # elif self.numRepl < 2 :
# msg = "Error reading json: number of replications should be greater than 1 and a value greater than 5 is recommended"
# exit_tmp(msg)
@@ -2777,42 +2904,42 @@ def exit_tmp(msg):
self.numSampToBeRepl = 0
self.numRepl = 0
self.numSampRepldone = True
-
- ## convergence criteria
- self.thr_NRMSE = surrogateJson["accuracyLimit"]
- self.thr_t = surrogateJson["timeLimit"] * 60
+
+ # convergence criteria
+ self.thr_NRMSE = surrogateJson['accuracyLimit']
+ self.thr_t = surrogateJson['timeLimit'] * 60
self.xrange = np.empty((0, 2), float)
- self.xDistTypeArr=[]
+ self.xDistTypeArr = []
for rv in rvJson:
- if rv["distribution"]=="Uniform":
- self.xrange = np.vstack(
- (self.xrange, [rv["lowerbound"], rv["upperbound"]])
- )
- self.xDistTypeArr += ["U"]
- elif rv["distribution"]=="discrete_design_set_string":
+ if rv['distribution'] == 'Uniform':
self.xrange = np.vstack(
- (self.xrange, [1, len(rv["elements"])])
+ (self.xrange, [rv['lowerbound'], rv['upperbound']])
)
- self.xDistTypeArr += ["DS"]
+ self.xDistTypeArr += ['U']
+ elif rv['distribution'] == 'discrete_design_set_string':
+ self.xrange = np.vstack((self.xrange, [1, len(rv['elements'])]))
+ self.xDistTypeArr += ['DS']
else:
- msg = "Error in input RV: all RV should be set to Uniform distribution"
+ msg = 'Error in input RV: all RV should be set to Uniform distribution'
exit_tmp(msg)
-
else:
- self.doe_method = "None"
+ self.doe_method = 'None'
self.user_init = 0
self.thr_count = 0
self.thr_NRMSE = 0.02
- self.thr_t = float("inf")
+ self.thr_t = float('inf')
if self.is_data:
self.xrange = np.vstack(
- [np.min(self.X_existing, axis=0), np.max(self.X_existing, axis=0)]
+ [
+ np.min(self.X_existing, axis=0),
+ np.max(self.X_existing, axis=0),
+ ]
).T
else:
self.xrange = np.zeros((self.x_dim, 2))
- # TODO should I use "effective" number of dims?
+ # TODO should I use "effective" number of dims? # noqa: TD002, TD004
self.ll = self.xrange[:, 1] - self.xrange[:, 0]
if self.user_init <= 0: # automated choice 8*D
n_init_tmp = int(np.ceil(8 * self.x_dim / n_processor) * n_processor)
@@ -2824,57 +2951,57 @@ def exit_tmp(msg):
# self.n_init = 4
self.doe_method = self.doe_method.lower()
-
-
-
-
- def sampling(self, n):
+ def sampling(self, n): # noqa: D102
# n is "total" samples
if n > 0:
- X_samples = np.zeros((n, self.x_dim))
- ## LHS
+ X_samples = np.zeros((n, self.x_dim)) # noqa: N806
+ # LHS
sampler = qmc.LatinHypercube(d=self.x_dim)
- U = sampler.random(n=n)
+ U = sampler.random(n=n) # noqa: N806
for nx in range(self.x_dim):
-
- if (self.xDistTypeArr[nx]=="U"):
+ if self.xDistTypeArr[nx] == 'U':
X_samples[:, nx] = (
- U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
- + self.xrange[nx, 0]
+ U[:, nx] * (self.xrange[nx, 1] - self.xrange[nx, 0])
+ + self.xrange[nx, 0]
)
else:
- X_samples[:, nx] = np.ceil(U[:, nx]*self.xrange[nx, 1])
-
- if (self.numRepl)*self.numSampToBeRepl >0 and not self.numSampRepldone:
- X_samples = np.vstack([X_samples,np.tile(X_samples[0:self.numSampToBeRepl,:],(self.numRepl-1,1))])
- self.numSampRepldone = True;
+ X_samples[:, nx] = np.ceil(U[:, nx] * self.xrange[nx, 1])
+ if (
+ self.numRepl
+ ) * self.numSampToBeRepl > 0 and not self.numSampRepldone:
+ X_samples = np.vstack( # noqa: N806
+ [
+ X_samples,
+ np.tile(
+ X_samples[0 : self.numSampToBeRepl, :],
+ (self.numRepl - 1, 1),
+ ),
+ ]
+ )
+ self.numSampRepldone = True
else:
- X_samples = np.zeros((0, self.x_dim))
-
+ X_samples = np.zeros((0, self.x_dim)) # noqa: N806
return X_samples
- def resampling(self, X, n):
+ def resampling(self, X, n): # noqa: D102, N803, PLR6301
# n is "total" samples
# cube bounds obtained from data
dim = X.shape[1]
- minvals = np.min(X,axis=0)
- maxvals = np.max(X,axis=0)
- print(dim)
- X_samples = np.zeros((n, dim))
+ minvals = np.min(X, axis=0)
+ maxvals = np.max(X, axis=0)
+ print(dim) # noqa: T201
+ X_samples = np.zeros((n, dim)) # noqa: N806
sampler = qmc.LatinHypercube(d=dim)
- U = sampler.random(n=n)
+ U = sampler.random(n=n) # noqa: N806
for nx in range(dim):
- X_samples[:, nx] = (
- U[:, nx] * (maxvals[nx] - minvals[nx]) + minvals[nx]
- )
-
- return X_samples
+ X_samples[:, nx] = U[:, nx] * (maxvals[nx] - minvals[nx]) + minvals[nx]
+ return X_samples
# def set_FEM(self, rv_name, do_parallel, y_dim, t_init):
# self.rv_name = rv_name
@@ -2897,16 +3024,16 @@ def resampling(self, X, n):
# return X, Y, id_sim
-### Additional functions
+# Additional functions
-def weights_node2(node, nodes, ls):
+def weights_node2(node, nodes, ls): # noqa: D103
nodes = np.asarray(nodes)
deltas = nodes - node
deltas_norm = np.zeros(deltas.shape)
for nx in range(ls.shape[0]):
deltas_norm[:, nx] = (
- (deltas[:, nx]) / ls[nx] * nodes.shape[0]
+ (deltas[:, nx]) / ls[nx] * nodes.shape[0]
) # additional weights?
dist_ls = np.sqrt(np.sum(pow(deltas_norm, 2), axis=1))
weig = np.exp(-pow(dist_ls, 2))
@@ -2915,67 +3042,81 @@ def weights_node2(node, nodes, ls):
return weig / sum(weig)
-def calibrating(m_tmp, nugget_opt_tmp, nuggetVal, normVar, do_mf, do_heteroscedastic, nopt, ny, n_processor): # nuggetVal = self.nuggetVal[ny]
-
- msg = ""
+def calibrating( # noqa: C901, D103
+ m_tmp,
+ nugget_opt_tmp,
+ nuggetVal, # noqa: N803
+ normVar, # noqa: N803
+ do_mf,
+ do_heteroscedastic,
+ nopt,
+ ny,
+ n_processor,
+): # nuggetVal = self.nuggetVal[ny]
+ msg = ''
if do_heteroscedastic:
- variance_keyword = "het_Gauss.variance"
+ variance_keyword = 'het_Gauss.variance'
else:
- variance_keyword = "Gaussian_noise.variance"
+ variance_keyword = 'Gaussian_noise.variance'
if not do_mf:
- if nugget_opt_tmp == "Optimize":
- #m_tmp[variance_keyword].unfix()
- X = m_tmp.X
- for parname in m_tmp.parameter_names():
- if parname.endswith("lengthscale"):
- for nx in range(X.shape[1]):
- myrange = np.max(X, axis=0) - np.min(X, axis=0)
- exec('m_tmp.'+parname+'[[nx]] = myrange[nx]')
-
- elif nugget_opt_tmp == "Fixed Values":
- m_tmp[variance_keyword].constrain_fixed(nuggetVal[ny]/normVar,warning=False)
- elif nugget_opt_tmp == "Fixed Bounds":
- m_tmp[variance_keyword].constrain_bounded(nuggetVal[ny][0]/normVar, nuggetVal[ny][1]/normVar,warning=False)
- elif nugget_opt_tmp == "Zero":
- m_tmp[variance_keyword].constrain_fixed(0,warning=False)
- X = m_tmp.X
+ if nugget_opt_tmp == 'Optimize':
+ # m_tmp[variance_keyword].unfix()
+ X = m_tmp.X # noqa: N806
for parname in m_tmp.parameter_names():
- if parname.endswith("lengthscale"):
- for nx in range(X.shape[1]):
+ if parname.endswith('lengthscale'):
+ for nx in range(X.shape[1]): # noqa: B007
myrange = np.max(X, axis=0) - np.min(X, axis=0)
- exec('m_tmp.'+parname+'[[nx]] = myrange[nx]')
- elif nugget_opt_tmp == "Heteroscedastic":
+ exec('m_tmp.' + parname + '[[nx]] = myrange[nx]') # noqa: S102
- X = m_tmp.X
+ elif nugget_opt_tmp == 'Fixed Values':
+ m_tmp[variance_keyword].constrain_fixed(
+ nuggetVal[ny] / normVar, warning=False
+ )
+ elif nugget_opt_tmp == 'Fixed Bounds':
+ m_tmp[variance_keyword].constrain_bounded(
+ nuggetVal[ny][0] / normVar, nuggetVal[ny][1] / normVar, warning=False
+ )
+ elif nugget_opt_tmp == 'Zero':
+ m_tmp[variance_keyword].constrain_fixed(0, warning=False)
+ X = m_tmp.X # noqa: N806
for parname in m_tmp.parameter_names():
- if parname.endswith("lengthscale"):
- for nx in range(X.shape[1]):
+ if parname.endswith('lengthscale'):
+ for nx in range(X.shape[1]): # noqa: B007
myrange = np.max(X, axis=0) - np.min(X, axis=0)
- exec('m_tmp.'+parname+'[[nx]] = myrange[nx]*100')
- exec('m_tmp.'+parname+'[[nx]].constrain_bounded(myrange[nx] / X.shape[0], myrange[nx]*100,warning=False)')
- #m_tmp[parname][nx] = myrange[nx]*100
- #m_tmp[parname][nx].constrain_bounded(myrange[nx] / X.shape[0], myrange[nx]*100)
- # TODO change the kernel
+ exec('m_tmp.' + parname + '[[nx]] = myrange[nx]') # noqa: S102
+ elif nugget_opt_tmp == 'Heteroscedastic':
+ X = m_tmp.X # noqa: N806
+ for parname in m_tmp.parameter_names():
+ if parname.endswith('lengthscale'):
+ for nx in range(X.shape[1]): # noqa: B007
+ myrange = np.max(X, axis=0) - np.min(X, axis=0) # noqa: F841
+ exec('m_tmp.' + parname + '[[nx]] = myrange[nx]*100') # noqa: S102
+ exec( # noqa: S102
+ 'm_tmp.'
+ + parname
+ + '[[nx]].constrain_bounded(myrange[nx] / X.shape[0], myrange[nx]*100,warning=False)'
+ )
+ # m_tmp[parname][nx] = myrange[nx]*100
+ # m_tmp[parname][nx].constrain_bounded(myrange[nx] / X.shape[0], myrange[nx]*100)
+ # TODO change the kernel # noqa: TD002, TD004
else:
- msg = "Nugget keyword not identified: " + nugget_opt_tmp
-
+ msg = 'Nugget keyword not identified: ' + nugget_opt_tmp
if do_mf:
- # TODO: is this right?
- if nugget_opt_tmp == "Optimize":
+ # TODO: is this right? # noqa: TD002
+ if nugget_opt_tmp == 'Optimize':
m_tmp.gpy_model.mixed_noise.Gaussian_noise.unfix()
m_tmp.gpy_model.mixed_noise.Gaussian_noise_1.unfix()
- elif nugget_opt_tmp == "Fixed Values":
+ elif nugget_opt_tmp == 'Fixed Values':
# m_tmp.gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(self.nuggetVal[ny])
# m_tmp.gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(self.nuggetVal[ny])
msg = 'Currently Nugget Fixed Values option is not supported'
# self.exit(msg)
-
- elif nugget_opt_tmp == "Fixed Bounds":
+ elif nugget_opt_tmp == 'Fixed Bounds':
# m_tmp.gpy_model.mixed_noise.Gaussian_noise.constrain_bounded(self.nuggetVal[ny][0],
# self.nuggetVal[ny][1])
# m_tmp.gpy_model.mixed_noise.Gaussian_noise_1.constrain_bounded(self.nuggetVal[ny][0],
@@ -2983,88 +3124,101 @@ def calibrating(m_tmp, nugget_opt_tmp, nuggetVal, normVar, do_mf, do_heterosceda
msg = 'Currently Nugget Fixed Bounds option is not supported'
# self.exit(msg)
- elif nugget_opt_tmp == "Zero":
- m_tmp.gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(0,warning=False)
- m_tmp.gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(0,warning=False)
+ elif nugget_opt_tmp == 'Zero':
+ m_tmp.gpy_model.mixed_noise.Gaussian_noise.constrain_fixed(
+ 0, warning=False
+ )
+ m_tmp.gpy_model.mixed_noise.Gaussian_noise_1.constrain_fixed(
+ 0, warning=False
+ )
- if msg == "":
+ if msg == '': # noqa: PLC1901
m_tmp.optimize()
- #n=0;
+ # n=0;
if not do_mf:
-
- m_tmp.optimize_restarts(num_restarts=nopt, parallel=True, num_processes=n_processor,verbose=True)
+ m_tmp.optimize_restarts(
+ num_restarts=nopt,
+ parallel=True,
+ num_processes=n_processor,
+ verbose=True,
+ )
else:
- m_tmp.gpy_model.optimize_restarts(num_restarts=nopt, parallel=True, num_processes=n_processor,verbose=False)
- print(m_tmp)
+ m_tmp.gpy_model.optimize_restarts(
+ num_restarts=nopt,
+ parallel=True,
+ num_processes=n_processor,
+ verbose=False,
+ )
+ print(m_tmp) # noqa: T201
# while n+20 <= nopt:
# m_tmp.optimize_restarts(num_restarts=20)
# n = n+20
# if not nopt==n:
# m_tmp.optimize_restarts(num_restarts=nopt-n)
-
- print("",flush=True)
+ print(flush=True) # noqa: T201
return m_tmp, msg, ny
-def closest_node(x, X, ll):
- X = np.asarray(X)
+def closest_node(x, X, ll): # noqa: N803, D103
+ X = np.asarray(X) # noqa: N806
deltas = X - x
deltas_norm = np.zeros(deltas.shape)
for nx in range(X.shape[1]):
deltas_norm[:, nx] = deltas[:, nx] / ll[nx]
- dist_2 = np.einsum("ij,ij->i", deltas_norm, deltas_norm) # square sum
+ dist_2 = np.einsum('ij,ij->i', deltas_norm, deltas_norm) # square sum
return np.argmin(dist_2)
-def read_txt(text_dir, exit_fun):
- if not os.path.exists(text_dir):
- msg = "Error: file does not exist: " + text_dir
+def read_txt(text_dir, exit_fun): # noqa: D103
+ if not os.path.exists(text_dir): # noqa: PTH110
+ msg = 'Error: file does not exist: ' + text_dir
exit_fun(msg)
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PTH123
# Iterate through the file until the table starts
header_count = 0
for line in f:
- if line.replace(" ", "").startswith("%"):
- header_count = header_count + 1
+ if line.replace(' ', '').startswith('%'):
+ header_count = header_count + 1 # noqa: PLR6104
else:
break
# print(line)
try:
- with open(text_dir) as f:
- X = np.loadtxt(f, skiprows=header_count)
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
+ X = np.loadtxt(f, skiprows=header_count) # noqa: N806
except ValueError:
- with open(text_dir) as f:
+ with open(text_dir) as f: # noqa: PLW1514, PLW2901, PTH123
try:
- X = np.genfromtxt(f, skip_header=header_count, delimiter=",")
- X=np.atleast_2d(X)
+ X = np.genfromtxt(f, skip_header=header_count, delimiter=',') # noqa: N806
+ X = np.atleast_2d(X) # noqa: N806
# if there are extra delimiter, remove nan
if np.isnan(X[-1, -1]):
- X = np.delete(X, -1, 1)
+ X = np.delete(X, -1, 1) # noqa: N806
# X = np.loadtxt(f, skiprows=header_count, delimiter=',')
except ValueError:
- msg = "Error: unsupported file format " + text_dir
+ msg = 'Error: unsupported file format ' + text_dir
exit_fun(msg)
if np.isnan(X).any():
- msg = "Error: unsupported file format " + text_dir + ".\nThe header should have % character in front."
+ msg = (
+ 'Error: unsupported file format '
+ + text_dir
+ + '.\nThe header should have % character in front.'
+ )
exit_fun(msg)
if X.ndim == 1:
- X = np.array([X]).transpose()
+ X = np.array([X]).transpose() # noqa: N806
return X
-
-
-if __name__ == "__main__":
+if __name__ == '__main__':
main(sys.argv)
sys.stderr.close()
-
# try:
# main(sys.argv)
# open(os.path.join(os.getcwd(), errFileName ), 'w').close()
diff --git a/modules/performUQ/UCSD_UQ/UCSD_UQ.py b/modules/performUQ/UCSD_UQ/UCSD_UQ.py
index 16c2c4327..e77adbd13 100644
--- a/modules/performUQ/UCSD_UQ/UCSD_UQ.py
+++ b/modules/performUQ/UCSD_UQ/UCSD_UQ.py
@@ -1,57 +1,56 @@
-import argparse
+import argparse # noqa: CPY001, D100, INP001
import os
import platform
+import shlex
import stat
-import subprocess
-from pathlib import Path
+import subprocess # noqa: S404
import sys
-import shlex
+from pathlib import Path
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
- parser.add_argument("--workflowInput")
- parser.add_argument("--workflowOutput")
- parser.add_argument("--driverFile")
- parser.add_argument("--runType")
-
- args, unknowns = parser.parse_known_args()
+ parser.add_argument('--workflowInput')
+ parser.add_argument('--workflowOutput')
+ parser.add_argument('--driverFile')
+ parser.add_argument('--runType')
- workflowInput = args.workflowInput
- workflowOutput = args.workflowOutput
- driverFile = args.driverFile
- runType = args.runType
+ args, unknowns = parser.parse_known_args() # noqa: F841
- if runType in ["runningLocal"]:
+ workflowInput = args.workflowInput # noqa: N806
+ workflowOutput = args.workflowOutput # noqa: N806, F841
+ driverFile = args.driverFile # noqa: N806
+ runType = args.runType # noqa: N806
- if platform.system() == "Windows":
- pythonCommand = "python"
- driverFile = driverFile + ".bat"
+ if runType == 'runningLocal':
+ if platform.system() == 'Windows':
+ pythonCommand = 'python' # noqa: N806
+ driverFile = driverFile + '.bat' # noqa: N806, PLR6104
else:
- pythonCommand = "python3"
+ pythonCommand = 'python3' # noqa: N806
- mainScriptDir = os.path.dirname(os.path.realpath(__file__))
- mainScript = os.path.join(mainScriptDir, "mainscript.py")
- templateDir = os.getcwd()
- tmpSimCenterDir = str(Path(templateDir).parents[0])
+ mainScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
+ mainScript = os.path.join(mainScriptDir, 'mainscript.py') # noqa: PTH118, N806
+ templateDir = os.getcwd() # noqa: PTH109, N806
+ tmpSimCenterDir = str(Path(templateDir).parents[0]) # noqa: N806
# Change permission of driver file
- os.chmod(driverFile, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
- st = os.stat(driverFile)
- os.chmod(driverFile, st.st_mode | stat.S_IEXEC)
- driverFile = "./" + driverFile
- print("WORKFLOW: " + driverFile)
+ os.chmod(driverFile, stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH) # noqa: PTH101
+ st = os.stat(driverFile) # noqa: PTH116
+ os.chmod(driverFile, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+ driverFile = './' + driverFile # noqa: N806
+ print('WORKFLOW: ' + driverFile) # noqa: T201
command = (
f'"{pythonCommand}" "{mainScript}" "{tmpSimCenterDir}"'
f' "{templateDir}" {runType} {driverFile} {workflowInput}'
)
- print(command)
+ print(command) # noqa: T201
command_list = shlex.split(command)
- result = subprocess.run(
+ result = subprocess.run( # noqa: S603, UP022
command_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
@@ -59,18 +58,18 @@ def main(args):
text=True,
)
- err_file = Path(tmpSimCenterDir) / "UCSD_UQ.err"
+ err_file = Path(tmpSimCenterDir) / 'UCSD_UQ.err'
err_file.touch()
try:
result.check_returncode()
except subprocess.CalledProcessError:
- with open(err_file, "a") as f:
- f.write(f"ERROR: {result.stderr}\n\n")
- f.write(f"The command was: {result.args}\n\n")
- f.write(f"The return code was: {result.returncode}\n\n")
- f.write(f"The output of the command was: {result.stdout}\n\n")
+ with open(err_file, 'a') as f: # noqa: PLW1514, PTH123
+ f.write(f'ERROR: {result.stderr}\n\n')
+ f.write(f'The command was: {result.args}\n\n')
+ f.write(f'The return code was: {result.returncode}\n\n')
+ f.write(f'The output of the command was: {result.stdout}\n\n')
-if __name__ == "__main__":
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/performUQ/UCSD_UQ/calibration_utilities.py b/modules/performUQ/UCSD_UQ/calibration_utilities.py
index 7ddd6d4b6..882060ca3 100644
--- a/modules/performUQ/UCSD_UQ/calibration_utilities.py
+++ b/modules/performUQ/UCSD_UQ/calibration_utilities.py
@@ -1,38 +1,39 @@
-import numpy as np
-from scipy.linalg import block_diag
-import os
+import os # noqa: CPY001, D100, INP001
import shutil
+import sys
import time
-from numpy.typing import NDArray
-from typing import Callable
-from typing import TextIO
from importlib import import_module
-import sys
+from typing import Callable, TextIO
+import numpy as np
import pdfs
+from numpy.typing import NDArray
+from scipy.linalg import block_diag
class DataProcessingError(Exception):
"""Raised when errors found when processing user-supplied calibration and covariance data.
- Attributes:
+ Attributes
+ ----------
message -- explanation of the error
+
"""
def __init__(self, message):
self.message = message
-class CovarianceMatrixPreparer:
+class CovarianceMatrixPreparer: # noqa: D101
def __init__(
self,
- calibrationData: np.ndarray,
- edpLengthsList: list[int],
- edpNamesList: list[str],
- workdirMain: str,
- numExperiments: int,
- logFile: TextIO,
- runType: str,
+ calibrationData: np.ndarray, # noqa: N803
+ edpLengthsList: list[int], # noqa: FA102, N803
+ edpNamesList: list[str], # noqa: FA102, N803
+ workdirMain: str, # noqa: N803
+ numExperiments: int, # noqa: N803
+ logFile: TextIO, # noqa: N803
+ runType: str, # noqa: N803
) -> None:
self.calibrationData = calibrationData
self.edpLengthsList = edpLengthsList
@@ -42,256 +43,215 @@ def __init__(
self.logFile = logFile
self.runType = runType
- self.logFile.write("\n\n==========================")
- self.logFile.write("\nProcessing options for variance/covariance:")
+ self.logFile.write('\n\n==========================')
+ self.logFile.write('\nProcessing options for variance/covariance:')
self.logFile.write(
- "\n\tOne variance value or covariance matrix will be used per response quantity per experiment."
+ '\n\tOne variance value or covariance matrix will be used per response quantity per experiment.'
)
self.logFile.write(
- "\n\tIf the user does not supply variance or covariance data, a default variance value will be\n\t"
- "used per response quantity, which is constant across experiments. The default variance is\n\t"
- "computed as the variance of the transformed data, if there is data from more than one "
- "experiment.\n\t"
- "If there is data from only one experiment, then a default variance value is computed by \n\t"
- "assuming that the standard deviation of the error is 5% of the absolute maximum value of \n\t"
- "the corresponding transformed response data."
+ '\n\tIf the user does not supply variance or covariance data, a default variance value will be\n\t'
+ 'used per response quantity, which is constant across experiments. The default variance is\n\t'
+ 'computed as the variance of the transformed data, if there is data from more than one '
+ 'experiment.\n\t'
+ 'If there is data from only one experiment, then a default variance value is computed by \n\t'
+ 'assuming that the standard deviation of the error is 5% of the absolute maximum value of \n\t'
+ 'the corresponding transformed response data.'
)
- def getDefaultErrorVariances(self):
+ def getDefaultErrorVariances(self): # noqa: N802, D102
# For each response variable, compute the variance of the data. These will be the default error variance
# values used in the calibration process. Values of the multiplier on these default error variance values will be
# calibrated. There will be one such error variance value per response quantity. If there is only data from one
# experiment,then the default error std.dev. value is assumed to be 5% of the absolute maximum value of the data
# corresponding to that response quantity.
- defaultErrorVariances = 1e-12 * np.ones_like(
+ defaultErrorVariances = 1e-12 * np.ones_like( # noqa: N806
self.edpLengthsList, dtype=float
)
# defaultErrorVariances = np.zeros_like(self.edpLengthsList, dtype=float)
if (
np.shape(self.calibrationData)[0] > 1
): # if there are more than 1 rows of data, i.e. data from multiple experiments
- currentIndex = 0
+ currentIndex = 0 # noqa: N806
for i in range(len(self.edpLengthsList)):
- dataSlice = self.calibrationData[
+ dataSlice = self.calibrationData[ # noqa: N806
:, currentIndex : currentIndex + self.edpLengthsList[i]
]
v = np.nanvar(dataSlice)
if v != 0:
defaultErrorVariances[i] = v
- currentIndex += self.edpLengthsList[i]
+ currentIndex += self.edpLengthsList[i] # noqa: N806
else:
- currentIndex = 0
+ currentIndex = 0 # noqa: N806
for i in range(len(self.edpLengthsList)):
- dataSlice = self.calibrationData[
+ dataSlice = self.calibrationData[ # noqa: N806
:, currentIndex : currentIndex + self.edpLengthsList[i]
]
v = np.max(np.absolute(dataSlice))
if v != 0:
defaultErrorVariances[i] = (0.05 * v) ** 2
- currentIndex += self.edpLengthsList[i]
+ currentIndex += self.edpLengthsList[i] # noqa: N806
self.defaultErrorVariances = defaultErrorVariances
- def createCovarianceMatrix(self):
- covarianceMatrixList = []
- covarianceTypeList = []
+ def createCovarianceMatrix(self): # noqa: C901, N802, D102
+ covarianceMatrixList = [] # noqa: N806
+ covarianceTypeList = [] # noqa: N806
- logFile = self.logFile
- edpNamesList = self.edpNamesList
- workdirMain = self.workdirMain
- numExperiments = self.numExperiments
+ logFile = self.logFile # noqa: N806
+ edpNamesList = self.edpNamesList # noqa: N806
+ workdirMain = self.workdirMain # noqa: N806
+ numExperiments = self.numExperiments # noqa: N806
- logFile.write("\n\nLooping over the experiments and EDPs")
+ logFile.write('\n\nLooping over the experiments and EDPs')
# First, check if the user has passed in any covariance matrix data
- for expNum in range(1, numExperiments + 1):
- logFile.write("\n\nExperiment number: {}".format(expNum))
- for i, edpName in enumerate(edpNamesList):
- logFile.write("\n\tEDP: {}".format(edpName))
- covarianceFileName = "{}.{}.sigma".format(edpName, expNum)
- covarianceFile = os.path.join(workdirMain, covarianceFileName)
+ for expNum in range(1, numExperiments + 1): # noqa: N806, PLR1702
+ logFile.write(f'\n\nExperiment number: {expNum}')
+ for i, edpName in enumerate(edpNamesList): # noqa: N806
+ logFile.write(f'\n\tEDP: {edpName}')
+ covarianceFileName = f'{edpName}.{expNum}.sigma' # noqa: N806
+ covarianceFile = os.path.join(workdirMain, covarianceFileName) # noqa: PTH118, N806
logFile.write(
- "\n\t\tChecking to see if user-supplied file '{}' exists in '{}'".format(
- covarianceFileName, workdirMain
- )
+ f"\n\t\tChecking to see if user-supplied file '{covarianceFileName}' exists in '{workdirMain}'"
)
- if os.path.isfile(covarianceFile):
- logFile.write("\n\t\tFound a user supplied file.")
- if self.runType == "runningLocal":
+ if os.path.isfile(covarianceFile): # noqa: PTH113
+ logFile.write('\n\t\tFound a user supplied file.')
+ if self.runType == 'runningLocal':
src = covarianceFile
- dst = os.path.join(workdirMain, covarianceFileName)
+ dst = os.path.join(workdirMain, covarianceFileName) # noqa: PTH118
logFile.write(
- "\n\t\tCopying user-supplied covariance file from {} to {}".format(
- src, dst
- )
+ f'\n\t\tCopying user-supplied covariance file from {src} to {dst}'
)
shutil.copyfile(src, dst)
- covarianceFile = dst
+ covarianceFile = dst # noqa: N806
logFile.write(
- "\n\t\tReading in user supplied covariance matrix from file: '{}'".format(
- covarianceFile
- )
+ f"\n\t\tReading in user supplied covariance matrix from file: '{covarianceFile}'"
)
# Check the data in the covariance matrix file
- tmpCovFile = os.path.join(
- workdirMain, "quoFEMTempCovMatrixFile.sigma"
+ tmpCovFile = os.path.join( # noqa: PTH118, N806
+ workdirMain, 'quoFEMTempCovMatrixFile.sigma'
)
- numRows = 0
- numCols = 0
+ numRows = 0 # noqa: N806
+ numCols = 0 # noqa: N806
linenum = 0
- with open(tmpCovFile, "w") as f1:
- with open(covarianceFile, "r") as f:
+ with open(tmpCovFile, 'w') as f1: # noqa: PLW1514, PTH123, SIM117
+ with open(covarianceFile) as f: # noqa: PLW1514, PTH123
for line in f:
linenum += 1
if len(line.strip()) == 0:
continue
- else:
- line = line.replace(",", " ")
+ else: # noqa: RET507
+ line = line.replace(',', ' ') # noqa: PLW2901
# Check the length of the line
words = line.split()
if numRows == 0:
- numCols = len(words)
- else:
- if numCols != len(words):
- logFile.write(
- "\nERROR: The number of columns in line {} do not match the "
- "number of columns in line {} of file {}.".format(
- numRows,
- numRows - 1,
- covarianceFile,
- )
- )
- raise DataProcessingError(
- "ERROR: The number of columns in line {} do not match the "
- "number of columns in line {} of file {}.".format(
- numRows,
- numRows - 1,
- covarianceFile,
- )
- )
- tempLine = ""
+ numCols = len(words) # noqa: N806
+ elif numCols != len(words):
+ logFile.write(
+ f'\nERROR: The number of columns in line {numRows} do not match the '
+ f'number of columns in line {numRows - 1} of file {covarianceFile}.'
+ )
+ raise DataProcessingError( # noqa: TRY003
+ f'ERROR: The number of columns in line {numRows} do not match the ' # noqa: EM102
+ f'number of columns in line {numRows - 1} of file {covarianceFile}.'
+ )
+ tempLine = '' # noqa: N806
for w in words:
- tempLine += "{} ".format(w)
+ tempLine += f'{w} ' # noqa: N806
# logFile.write("\ncovMatrixLine {}: ".format(linenum), tempLine)
if numRows == 0:
f1.write(tempLine)
else:
- f1.write("\n")
+ f1.write('\n')
f1.write(tempLine)
- numRows += 1
- covMatrix = np.genfromtxt(tmpCovFile)
+ numRows += 1 # noqa: N806
+ covMatrix = np.genfromtxt(tmpCovFile) # noqa: N806
covarianceMatrixList.append(covMatrix)
# os.remove(tmpCovFile)
logFile.write(
- "\n\t\tFinished reading the file. Checking the dimensions of the covariance data."
+ '\n\t\tFinished reading the file. Checking the dimensions of the covariance data.'
)
if numRows == 1:
if numCols == 1:
- covarianceTypeList.append("scalar")
+ covarianceTypeList.append('scalar')
logFile.write(
- "\n\t\tScalar variance value provided. The covariance matrix is an identity matrix "
- "multiplied by this value."
+ '\n\t\tScalar variance value provided. The covariance matrix is an identity matrix '
+ 'multiplied by this value.'
)
elif numCols == self.edpLengthsList[i]:
- covarianceTypeList.append("diagonal")
+ covarianceTypeList.append('diagonal')
logFile.write(
- "\n\t\tA row vector provided. This will be treated as the diagonal entries of the "
- "covariance matrix."
+ '\n\t\tA row vector provided. This will be treated as the diagonal entries of the '
+ 'covariance matrix.'
)
else:
logFile.write(
- "\nERROR: The number of columns of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} columns".format(
- covarianceFile,
- self.edpLengthsList[i],
- numCols,
- )
+ f'\nERROR: The number of columns of data in the covariance matrix file {covarianceFile}'
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} columns'
)
- raise DataProcessingError(
- "ERROR: The number of columns of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} columns".format(
- covarianceFile,
- self.edpLengthsList[i],
- numCols,
- )
+ raise DataProcessingError( # noqa: TRY003
+ f'ERROR: The number of columns of data in the covariance matrix file {covarianceFile}' # noqa: EM102
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} columns'
)
elif numRows == self.edpLengthsList[i]:
if numCols == 1:
- covarianceTypeList.append("diagonal")
+ covarianceTypeList.append('diagonal')
logFile.write(
- "\t\tA column vector provided. This will be treated as the diagonal entries of the "
- "covariance matrix."
+ '\t\tA column vector provided. This will be treated as the diagonal entries of the '
+ 'covariance matrix.'
)
elif numCols == self.edpLengthsList[i]:
- covarianceTypeList.append("matrix")
- logFile.write(
- "\n\t\tA full covariance matrix provided."
- )
+ covarianceTypeList.append('matrix')
+ logFile.write('\n\t\tA full covariance matrix provided.')
else:
logFile.write(
- "\nERROR: The number of columns of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} columns".format(
- covarianceFile,
- self.edpLengthsList[i],
- numCols,
- )
+ f'\nERROR: The number of columns of data in the covariance matrix file {covarianceFile}'
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} columns'
)
- raise DataProcessingError(
- "ERROR: The number of columns of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} columns".format(
- covarianceFile,
- self.edpLengthsList[i],
- numCols,
- )
+ raise DataProcessingError( # noqa: TRY003
+ f'ERROR: The number of columns of data in the covariance matrix file {covarianceFile}' # noqa: EM102
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} columns'
)
else:
logFile.write(
- "\nERROR: The number of rows of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} rows".format(
- covarianceFile, self.edpLengthsList[i], numCols
- )
+ f'\nERROR: The number of rows of data in the covariance matrix file {covarianceFile}'
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} rows'
)
- raise DataProcessingError(
- "ERROR: The number of rows of data in the covariance matrix file {}"
- " must be either 1 or {}. Found {} rows".format(
- covarianceFile, self.edpLengthsList[i], numCols
- )
+ raise DataProcessingError( # noqa: TRY003
+ f'ERROR: The number of rows of data in the covariance matrix file {covarianceFile}' # noqa: EM102
+ f' must be either 1 or {self.edpLengthsList[i]}. Found {numCols} rows'
)
- logFile.write(
- "\n\t\tCovariance matrix: {}".format(covMatrix)
- )
+ logFile.write(f'\n\t\tCovariance matrix: {covMatrix}')
else:
logFile.write(
- "\n\t\tDid not find a user supplied file. Using the default variance value."
+ '\n\t\tDid not find a user supplied file. Using the default variance value.'
)
logFile.write(
- "\n\t\tThe covariance matrix is an identity matrix multiplied by this value."
+ '\n\t\tThe covariance matrix is an identity matrix multiplied by this value.'
)
- scalarVariance = np.array(self.defaultErrorVariances[i])
+ scalarVariance = np.array(self.defaultErrorVariances[i]) # noqa: N806
covarianceMatrixList.append(scalarVariance)
- covarianceTypeList.append("scalar")
- logFile.write(
- "\n\t\tCovariance matrix: {}".format(scalarVariance)
- )
+ covarianceTypeList.append('scalar')
+ logFile.write(f'\n\t\tCovariance matrix: {scalarVariance}')
self.covarianceMatrixList = covarianceMatrixList
self.covarianceTypeList = covarianceTypeList
logFile.write(
- f"\n\nThe covariance matrix for prediction errors being used is:"
+ '\n\nThe covariance matrix for prediction errors being used is:'
)
tmp = block_diag(*covarianceMatrixList)
for row in tmp:
- rowString = " ".join([f"{col:14.8g}" for col in row])
- logFile.write("\n\t{}".format(rowString))
+ rowString = ' '.join([f'{col:14.8g}' for col in row]) # noqa: N806
+ logFile.write(f'\n\t{rowString}')
return self.covarianceMatrixList
-class CalDataPreparer:
+class CalDataPreparer: # noqa: D101
def __init__(
self,
- workdirMain: str,
- workdirTemplate: str,
- calDataFileName: str,
- edpNamesList: list[str],
- edpLengthsList: list[int],
- logFile: TextIO,
+ workdirMain: str, # noqa: N803
+ workdirTemplate: str, # noqa: N803
+ calDataFileName: str, # noqa: N803
+ edpNamesList: list[str], # noqa: FA102, N803
+ edpLengthsList: list[int], # noqa: FA102, N803
+ logFile: TextIO, # noqa: N803
) -> None:
self.workdirMain = workdirMain
self.workdirTemplate = workdirTemplate
@@ -302,78 +262,64 @@ def __init__(
self.lineLength = sum(edpLengthsList)
self.moveCalDataFile(self.calDataFileName)
- def moveCalDataFile(self, calDataFileName):
- os.rename(
- os.path.join(self.workdirTemplate, calDataFileName),
- os.path.join(self.workdirMain, calDataFileName),
+ def moveCalDataFile(self, calDataFileName): # noqa: N802, N803, D102
+ os.rename( # noqa: PTH104
+ os.path.join(self.workdirTemplate, calDataFileName), # noqa: PTH118
+ os.path.join(self.workdirMain, calDataFileName), # noqa: PTH118
)
- def createHeadings(self):
- self.logFile.write("\n\tCreating headings")
- headings = "Exp_num interface "
- for i, edpName in enumerate(self.edpNamesList):
+ def createHeadings(self): # noqa: N802, D102
+ self.logFile.write('\n\tCreating headings')
+ headings = 'Exp_num interface '
+ for i, edpName in enumerate(self.edpNamesList): # noqa: N806
if self.edpLengthsList[i] == 1:
- headings += "{} ".format(edpName)
+ headings += f'{edpName} '
else:
for comp in range(self.edpLengthsList[i]):
- headings += "{}_{} ".format(edpName, comp + 1)
- self.logFile.write("\n\t\tThe headings are: \n\t\t{}".format(headings))
+ headings += f'{edpName}_{comp + 1} '
+ self.logFile.write(f'\n\t\tThe headings are: \n\t\t{headings}')
return headings
- def createTempCalDataFile(self, calDataFile):
- self.tempCalDataFile = os.path.join(
- self.workdirMain, "quoFEMTempCalibrationDataFile.cal"
+ def createTempCalDataFile(self, calDataFile): # noqa: N802, N803, D102
+ self.tempCalDataFile = os.path.join( # noqa: PTH118
+ self.workdirMain, 'quoFEMTempCalibrationDataFile.cal'
)
- f1 = open(self.tempCalDataFile, "w")
+ f1 = open(self.tempCalDataFile, 'w') # noqa: PLW1514, PTH123, SIM115
headings = self.createHeadings()
f1.write(headings)
interface = 1
self.numExperiments = 0
linenum = 0
- with open(calDataFile, "r") as f:
+ with open(calDataFile) as f: # noqa: PLW1514, PTH123
for line in f:
linenum += 1
if len(line.strip()) == 0:
continue
- else:
- line = line.replace(",", " ")
+ else: # noqa: RET507
+ line = line.replace(',', ' ') # noqa: PLW2901
# Check length of each line
words = line.split()
if len(words) == self.lineLength:
self.numExperiments += 1
- tempLine = "{} {} ".format(
- self.numExperiments, interface
- )
+ tempLine = f'{self.numExperiments} {interface} ' # noqa: N806
for w in words:
- tempLine += "{} ".format(w)
+ tempLine += f'{w} ' # noqa: N806
self.logFile.write(
- "\n\tLine {}, length {}: \n\t\t{}".format(
- linenum, len(words), tempLine
- )
+ f'\n\tLine {linenum}, length {len(words)}: \n\t\t{tempLine}'
)
- f1.write("\n{}".format(tempLine))
+ f1.write(f'\n{tempLine}')
else:
self.logFile.write(
- "\nERROR: The number of entries ({}) in line num {} of the file '{}' "
- "does not match the expected length {}".format(
- len(words),
- linenum,
- calDataFile,
- self.lineLength,
- )
+ f"\nERROR: The number of entries ({len(words)}) in line num {linenum} of the file '{calDataFile}' "
+ f'does not match the expected length {self.lineLength}'
)
- raise DataProcessingError(
- "ERROR: The number of entries ({}) in line num {} of the file '{}' "
- "does not match the expected length {}".format(
- len(words),
- linenum,
- calDataFile,
- self.lineLength,
- )
+ raise DataProcessingError( # noqa: TRY003
+ f"ERROR: The number of entries ({len(words)}) in line num {linenum} of the file '{calDataFile}' " # noqa: EM102
+ f'does not match the expected length {self.lineLength}'
)
f1.close()
- def readCleanedCalData(self):
+ def readCleanedCalData(self): # noqa: N802, D102
self.calibrationData = np.atleast_2d(
np.genfromtxt(
self.tempCalDataFile,
@@ -382,121 +328,113 @@ def readCleanedCalData(self):
)
)
- def getCalibrationData(self):
- calDataFile = os.path.join(self.workdirMain, self.calDataFileName)
+ def getCalibrationData(self): # noqa: N802, D102
+ calDataFile = os.path.join(self.workdirMain, self.calDataFileName) # noqa: PTH118, N806
self.logFile.write(
- "\nCalibration data file being processed: \n\t{}\n".format(
- calDataFile
- )
+ f'\nCalibration data file being processed: \n\t{calDataFile}\n'
)
self.createTempCalDataFile(calDataFile)
self.readCleanedCalData()
return self.calibrationData, self.numExperiments
-def transform_data_function(
+def transform_data_function( # noqa: D103
data_to_transform: np.ndarray,
- list_of_data_segment_lengths: list[int],
- list_of_scale_factors: list[float],
- list_of_shift_factors: list[float],
+ list_of_data_segment_lengths: list[int], # noqa: FA102
+ list_of_scale_factors: list[float], # noqa: FA102
+ list_of_shift_factors: list[float], # noqa: FA102
):
- currentPosition = 0
+ currentPosition = 0 # noqa: N806
for j in range(len(list_of_data_segment_lengths)):
slice_of_data = data_to_transform[
:,
- currentPosition : currentPosition
- + list_of_data_segment_lengths[j],
+ currentPosition : currentPosition + list_of_data_segment_lengths[j],
]
- slice_of_data = slice_of_data + list_of_shift_factors[j]
+ slice_of_data = slice_of_data + list_of_shift_factors[j] # noqa: PLR6104
data_to_transform[
:,
- currentPosition : currentPosition
- + list_of_data_segment_lengths[j],
- ] = (
- slice_of_data / list_of_scale_factors[j]
- )
- currentPosition += list_of_data_segment_lengths[j]
+ currentPosition : currentPosition + list_of_data_segment_lengths[j],
+ ] = slice_of_data / list_of_scale_factors[j]
+ currentPosition += list_of_data_segment_lengths[j] # noqa: N806
return data_to_transform
-class DataTransformer:
- def __init__(self, transformStrategy: str, logFile: TextIO) -> None:
+class DataTransformer: # noqa: D101
+ def __init__(self, transformStrategy: str, logFile: TextIO) -> None: # noqa: N803
self.logFile = logFile
- self.transformStrategyList = ["absMaxScaling", "standardize"]
+ self.transformStrategyList = ['absMaxScaling', 'standardize']
if transformStrategy not in self.transformStrategyList:
- string = " or ".join(self.transformStrategyList)
- raise ValueError(f"transform strategy must be one of {string}")
- else:
+ string = ' or '.join(self.transformStrategyList)
+ raise ValueError(f'transform strategy must be one of {string}') # noqa: EM102, TRY003
+ else: # noqa: RET506
self.transformStrategy = transformStrategy
logFile.write(
- "\n\nFor numerical convenience, a transformation is applied to the calibration data \nand model "
- "prediction corresponding to each response quantity. \nThe calibration data and model prediction for "
- "each response variable will \nfirst be shifted (a scalar value will be added to the data and "
- "prediction) and \nthen scaled (the data and prediction will be divided by a positive scalar value)."
+ '\n\nFor numerical convenience, a transformation is applied to the calibration data \nand model '
+ 'prediction corresponding to each response quantity. \nThe calibration data and model prediction for '
+ 'each response variable will \nfirst be shifted (a scalar value will be added to the data and '
+ 'prediction) and \nthen scaled (the data and prediction will be divided by a positive scalar value).'
)
- def computeScaleAndShiftFactors(
- self, calibrationData: np.ndarray, edpLengthsList: list[int]
+ def computeScaleAndShiftFactors( # noqa: N802, D102
+ self,
+ calibrationData: np.ndarray, # noqa: N803
+ edpLengthsList: list[int], # noqa: FA102, N803
):
self.calibrationData = calibrationData
self.edpLengthsList = edpLengthsList
- shiftFactors = []
- scaleFactors = []
- currentPosition = 0
- locShift = 0.0
- if self.transformStrategy in ["absMaxScaling"]:
+ shiftFactors = [] # noqa: N806
+ scaleFactors = [] # noqa: N806
+ currentPosition = 0 # noqa: N806
+ locShift = 0.0 # noqa: N806
+ if self.transformStrategy == 'absMaxScaling':
# Compute the scale factors - absolute maximum of the data for each response variable
self.logFile.write(
- "\n\nComputing scale and shift factors. "
- "\n\tThe shift factors are set to 0.0 by default."
- "\n\tThe scale factors used are the absolute maximum of the data for each response variable."
- "\n\tIf the absolute maximum of the data for any response variable is 0.0, "
- "\n\tthen the scale factor is set to 1.0, and the shift factor is set to 1.0."
+ '\n\nComputing scale and shift factors. '
+ '\n\tThe shift factors are set to 0.0 by default.'
+ '\n\tThe scale factors used are the absolute maximum of the data for each response variable.'
+ '\n\tIf the absolute maximum of the data for any response variable is 0.0, '
+ '\n\tthen the scale factor is set to 1.0, and the shift factor is set to 1.0.'
)
for j in range(len(self.edpLengthsList)):
- calibrationDataSlice = calibrationData[
+ calibrationDataSlice = calibrationData[ # noqa: N806
:,
currentPosition : currentPosition + self.edpLengthsList[j],
]
- absMax = np.absolute(np.max(calibrationDataSlice))
- if (
- absMax == 0
- ): # This is to handle the case if abs max of data = 0.
- locShift = 1.0
- absMax = 1.0
+ absMax = np.absolute(np.max(calibrationDataSlice)) # noqa: N806
+ if absMax == 0: # This is to handle the case if abs max of data = 0.
+ locShift = 1.0 # noqa: N806
+ absMax = 1.0 # noqa: N806
shiftFactors.append(locShift)
scaleFactors.append(absMax)
- currentPosition += self.edpLengthsList[j]
+ currentPosition += self.edpLengthsList[j] # noqa: N806
else:
self.logFile.write(
- "\n\nComputing scale and shift factors. "
- "\n\tThe shift factors are set to the negative of the mean value for each response variable."
- "\n\tThe scale factors used are the standard deviation of the data for each response variable."
- "\n\tIf the standard deviation of the data for any response variable is 0.0, "
- "\n\tthen the scale factor is set to 1.0."
+ '\n\nComputing scale and shift factors. '
+ '\n\tThe shift factors are set to the negative of the mean value for each response variable.'
+ '\n\tThe scale factors used are the standard deviation of the data for each response variable.'
+ '\n\tIf the standard deviation of the data for any response variable is 0.0, '
+ '\n\tthen the scale factor is set to 1.0.'
)
for j in range(len(self.edpLengthsList)):
- calibrationDataSlice = calibrationData[
+ calibrationDataSlice = calibrationData[ # noqa: N806
:,
currentPosition : currentPosition + self.edpLengthsList[j],
]
- meanValue = np.nanmean(calibrationDataSlice)
- stdValue = np.nanstd(calibrationDataSlice)
- if (
- stdValue == 0
- ): # This is to handle the case if stdev of data = 0.
- stdValue = 1.0
+ meanValue = np.nanmean(calibrationDataSlice) # noqa: N806
+ stdValue = np.nanstd(calibrationDataSlice) # noqa: N806
+ if stdValue == 0: # This is to handle the case if stdev of data = 0.
+ stdValue = 1.0 # noqa: N806
scaleFactors.append(stdValue)
shiftFactors.append(-meanValue)
- currentPosition += self.edpLengthsList[j]
+ currentPosition += self.edpLengthsList[j] # noqa: N806
self.scaleFactors = scaleFactors
self.shiftFactors = shiftFactors
return scaleFactors, shiftFactors
- def transformData(self):
+ def transformData(self): # noqa: N802, D102
return transform_data_function(
self.calibrationData,
self.edpLengthsList,
@@ -505,57 +443,53 @@ def transformData(self):
)
-def createLogFile(where: str, logfile_name: str):
- logfile = open(os.path.join(where, logfile_name), "w")
+def createLogFile(where: str, logfile_name: str): # noqa: N802, D103
+ logfile = open(os.path.join(where, logfile_name), 'w') # noqa: PLW1514, PTH118, PTH123, SIM115
logfile.write(
- "Starting analysis at: {}".format(
- time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
+ 'Starting analysis at: {}'.format(
+ time.strftime('%a, %d %b %Y %H:%M:%S', time.localtime())
)
)
logfile.write("\nRunning quoFEM's UCSD_UQ engine workflow")
- logfile.write("\nCWD: {}".format(os.path.abspath(".")))
+ logfile.write('\nCWD: {}'.format(os.path.abspath('.'))) # noqa: PTH100
return logfile
-def syncLogFile(logFile: TextIO):
+def syncLogFile(logFile: TextIO): # noqa: N802, N803, D103
logFile.flush()
os.fsync(logFile.fileno())
-def make_distributions(variables):
-
+def make_distributions(variables): # noqa: C901, D103
all_distributions_list = []
- for i in range(len(variables["names"])):
-
- if variables["distributions"][i] == "Uniform":
- lower_limit = float(variables["Par1"][i])
- upper_limit = float(variables["Par2"][i])
+ for i in range(len(variables['names'])):
+ if variables['distributions'][i] == 'Uniform':
+ lower_limit = float(variables['Par1'][i])
+ upper_limit = float(variables['Par2'][i])
all_distributions_list.append(
pdfs.Uniform(lower=lower_limit, upper=upper_limit)
)
- if variables["distributions"][i] == "Normal":
- mean = float(variables["Par1"][i])
- standard_deviation = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'Normal':
+ mean = float(variables['Par1'][i])
+ standard_deviation = float(variables['Par2'][i])
all_distributions_list.append(
pdfs.Normal(mu=mean, sig=standard_deviation)
)
- if variables["distributions"][i] == "Half-Normal":
- standard_deviation = float(variables["Par1"][i])
+ if variables['distributions'][i] == 'Half-Normal':
+ standard_deviation = float(variables['Par1'][i])
- all_distributions_list.append(
- pdfs.Halfnormal(sig=standard_deviation)
- )
+ all_distributions_list.append(pdfs.Halfnormal(sig=standard_deviation))
- if variables["distributions"][i] == "Truncated-Normal":
- mean = float(variables["Par1"][i])
- standard_deviation = float(variables["Par2"][i])
- lower_limit = float(variables["Par3"][i])
- upper_limit = float(variables["Par4"][i])
+ if variables['distributions'][i] == 'Truncated-Normal':
+ mean = float(variables['Par1'][i])
+ standard_deviation = float(variables['Par2'][i])
+ lower_limit = float(variables['Par3'][i])
+ upper_limit = float(variables['Par4'][i])
all_distributions_list.append(
pdfs.TrunNormal(
@@ -566,17 +500,17 @@ def make_distributions(variables):
)
)
- if variables["distributions"][i] == "InvGamma":
- a = float(variables["Par1"][i])
- b = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'InvGamma':
+ a = float(variables['Par1'][i])
+ b = float(variables['Par2'][i])
all_distributions_list.append(pdfs.InvGamma(a=a, b=b))
- if variables["distributions"][i] == "Beta":
- alpha = float(variables["Par1"][i])
- beta = float(variables["Par2"][i])
- lower_limit = float(variables["Par3"][i])
- upper_limit = float(variables["Par4"][i])
+ if variables['distributions'][i] == 'Beta':
+ alpha = float(variables['Par1'][i])
+ beta = float(variables['Par2'][i])
+ lower_limit = float(variables['Par3'][i])
+ upper_limit = float(variables['Par4'][i])
all_distributions_list.append(
pdfs.BetaDist(
@@ -587,37 +521,33 @@ def make_distributions(variables):
)
)
- if variables["distributions"][i] == "Lognormal":
- mu = float(variables["Par1"][i])
- sigma = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'Lognormal':
+ mu = float(variables['Par1'][i])
+ sigma = float(variables['Par2'][i])
all_distributions_list.append(pdfs.LogNormDist(mu=mu, sigma=sigma))
- if variables["distributions"][i] == "Gumbel":
- alpha = float(variables["Par1"][i])
- beta = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'Gumbel':
+ alpha = float(variables['Par1'][i])
+ beta = float(variables['Par2'][i])
- all_distributions_list.append(
- pdfs.GumbelDist(alpha=alpha, beta=beta)
- )
+ all_distributions_list.append(pdfs.GumbelDist(alpha=alpha, beta=beta))
- if variables["distributions"][i] == "Weibull":
- shape = float(variables["Par1"][i])
- scale = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'Weibull':
+ shape = float(variables['Par1'][i])
+ scale = float(variables['Par2'][i])
- all_distributions_list.append(
- pdfs.WeibullDist(shape=shape, scale=scale)
- )
+ all_distributions_list.append(pdfs.WeibullDist(shape=shape, scale=scale))
- if variables["distributions"][i] == "Exponential":
- lamda = float(variables["Par1"][i])
+ if variables['distributions'][i] == 'Exponential':
+ lamda = float(variables['Par1'][i])
all_distributions_list.append(pdfs.ExponentialDist(lamda=lamda))
- if variables["distributions"][i] == "Truncated exponential":
- lamda = float(variables["Par1"][i])
- lower_limit = float(variables["Par2"][i])
- upper_limit = float(variables["Par3"][i])
+ if variables['distributions'][i] == 'Truncated exponential':
+ lamda = float(variables['Par1'][i])
+ lower_limit = float(variables['Par2'][i])
+ upper_limit = float(variables['Par3'][i])
all_distributions_list.append(
pdfs.TruncatedExponentialDist(
@@ -627,26 +557,24 @@ def make_distributions(variables):
)
)
- if variables["distributions"][i] == "Gamma":
- k = float(variables["Par1"][i])
- lamda = float(variables["Par2"][i])
+ if variables['distributions'][i] == 'Gamma':
+ k = float(variables['Par1'][i])
+ lamda = float(variables['Par2'][i])
all_distributions_list.append(pdfs.GammaDist(k=k, lamda=lamda))
- if variables["distributions"][i] == "Chisquare":
- k = float(variables["Par1"][i])
+ if variables['distributions'][i] == 'Chisquare':
+ k = float(variables['Par1'][i])
all_distributions_list.append(pdfs.ChiSquareDist(k=k))
- if variables["distributions"][i] == "Discrete":
- if variables["Par2"][i] is None:
- value = variables["Par1"][i]
- all_distributions_list.append(
- pdfs.ConstantInteger(value=value)
- )
+ if variables['distributions'][i] == 'Discrete':
+ if variables['Par2'][i] is None:
+ value = variables['Par1'][i]
+ all_distributions_list.append(pdfs.ConstantInteger(value=value))
else:
- values = float(variables["Par1"][i])
- weights = float(variables["Par2"][i])
+ values = float(variables['Par1'][i])
+ weights = float(variables['Par2'][i])
all_distributions_list.append(
pdfs.DiscreteDist(values=values, weights=weights)
)
@@ -654,17 +582,17 @@ def make_distributions(variables):
return all_distributions_list
-class LogLikelihoodHandler:
+class LogLikelihoodHandler: # noqa: D101
def __init__(
self,
data: NDArray,
- covariance_matrix_blocks_list: list[NDArray],
- list_of_data_segment_lengths: list[int],
- list_of_scale_factors: list[float],
- list_of_shift_factors: list[float],
+ covariance_matrix_blocks_list: list[NDArray], # noqa: FA102
+ list_of_data_segment_lengths: list[int], # noqa: FA102
+ list_of_scale_factors: list[float], # noqa: FA102
+ list_of_shift_factors: list[float], # noqa: FA102
workdir_main,
full_path_to_tmcmc_code_directory: str,
- log_likelihood_file_name: str = "",
+ log_likelihood_file_name: str = '',
) -> None:
self.data = data
self.covariance_matrix_list = covariance_matrix_blocks_list
@@ -672,9 +600,7 @@ def __init__(
self.list_of_scale_factors = list_of_scale_factors
self.list_of_shift_factors = list_of_shift_factors
self.workdir_main = workdir_main
- self.full_path_to_tmcmc_code_directory = (
- full_path_to_tmcmc_code_directory
- )
+ self.full_path_to_tmcmc_code_directory = full_path_to_tmcmc_code_directory
self.log_likelihood_file_name = log_likelihood_file_name
sys.path.append(self.workdir_main)
self._copy_log_likelihood_module()
@@ -686,19 +612,17 @@ def _copy_log_likelihood_module(self):
if (
len(self.log_likelihood_file_name) == 0
): # if the log-likelihood file is an empty string
- self.log_likelihood_file_name = "defaultLogLikeScript.py"
- src = os.path.join(
+ self.log_likelihood_file_name = 'defaultLogLikeScript.py'
+ src = os.path.join( # noqa: PTH118
self.full_path_to_tmcmc_code_directory,
self.log_likelihood_file_name,
)
- dst = os.path.join(
- self.workdir_main, self.log_likelihood_file_name
- )
+ dst = os.path.join(self.workdir_main, self.log_likelihood_file_name) # noqa: PTH118
try:
shutil.copyfile(src, dst)
- except Exception:
+ except Exception: # noqa: BLE001
msg = f"ERROR: The log-likelihood script '{src}' cannot be copied to '{dst}'."
- raise Exception(msg)
+ raise Exception(msg) # noqa: B904, TRY002
def _get_num_experiments(self) -> int:
return np.shape(self.data)[0]
@@ -711,17 +635,15 @@ def _import_log_likelihood_module(
) -> Callable:
try:
module = import_module(log_likelihood_module_name)
- except:
- msg = "\n\t\t\t\tERROR: The log-likelihood script '{}' cannot be imported.".format(
- os.path.join(self.workdir_main, self.log_likelihood_file_name)
- )
- raise ImportError(msg)
- return module # type: ignore
-
- def get_log_likelihood_function(self) -> Callable:
- log_likelihood_module_name = os.path.splitext(
- self.log_likelihood_file_name
- )[0]
+ except: # noqa: E722
+ msg = f"\n\t\t\t\tERROR: The log-likelihood script '{os.path.join(self.workdir_main, self.log_likelihood_file_name)}' cannot be imported." # noqa: PTH118
+ raise ImportError(msg) # noqa: B904
+ return module # type: ignore
+
+ def get_log_likelihood_function(self) -> Callable: # noqa: D102
+ log_likelihood_module_name = os.path.splitext(self.log_likelihood_file_name)[ # noqa: PTH122
+ 0
+ ]
module = self._import_log_likelihood_module(log_likelihood_module_name)
return module.log_likelihood
@@ -737,10 +659,12 @@ def _compute_residuals(self, transformed_prediction: NDArray) -> NDArray:
return transformed_prediction - self.data
def _make_mean(self, response_num: int) -> NDArray:
- return np.zeros((self.list_of_data_segment_lengths[response_num]))
+ return np.zeros(self.list_of_data_segment_lengths[response_num])
def _make_covariance(self, response_num, cov_multiplier) -> NDArray:
- return cov_multiplier * np.atleast_2d(self.covariance_matrix_list[response_num])
+ return cov_multiplier * np.atleast_2d(
+ self.covariance_matrix_list[response_num]
+ )
def _make_input_for_log_likelihood_function(self, prediction) -> list:
return [
@@ -753,19 +677,17 @@ def _loop_for_log_likelihood(
list_of_covariance_multipliers,
):
transformed_prediction = self._transform_prediction(prediction)
- allResiduals = self._compute_residuals(transformed_prediction)
+ allResiduals = self._compute_residuals(transformed_prediction) # noqa: N806
loglike = 0
for i in range(self.num_experiments):
- currentPosition = 0
+ currentPosition = 0 # noqa: N806
for j in range(self.num_response_quantities):
length = self.list_of_data_segment_lengths[j]
residuals = allResiduals[
i, currentPosition : currentPosition + length
]
- currentPosition = currentPosition + length
- cov = self._make_covariance(
- j, list_of_covariance_multipliers[j]
- )
+ currentPosition = currentPosition + length # noqa: N806, PLR6104
+ cov = self._make_covariance(j, list_of_covariance_multipliers[j])
mean = self._make_mean(j)
ll = self.log_likelihood_function(residuals, mean, cov)
if not np.isnan(ll):
@@ -774,8 +696,10 @@ def _loop_for_log_likelihood(
loglike += -np.inf
return loglike
- def evaluate_log_likelihood(
- self, prediction: NDArray, list_of_covariance_multipliers: list[float]
+ def evaluate_log_likelihood( # noqa: D102
+ self,
+ prediction: NDArray,
+ list_of_covariance_multipliers: list[float], # noqa: FA102
) -> float:
return self._loop_for_log_likelihood(
prediction=prediction,
diff --git a/modules/performUQ/UCSD_UQ/defaultLogLikeScript.py b/modules/performUQ/UCSD_UQ/defaultLogLikeScript.py
index 09c1eb6c8..2a455bcdb 100644
--- a/modules/performUQ/UCSD_UQ/defaultLogLikeScript.py
+++ b/modules/performUQ/UCSD_UQ/defaultLogLikeScript.py
@@ -1,20 +1,31 @@
-import numpy as np
+import numpy as np # noqa: CPY001, D100, INP001
class CovError(Exception):
"""Raised when the number of covariance matrix terms are incorrect.
- Attributes:
+ Attributes
+ ----------
message -- explanation of the error
+
"""
def __init__(self, message):
self.message = message
-def log_likelihood(calibrationData, prediction, numExperiments, covarianceMatrixList, edpNamesList, edpLengthsList,
- covarianceMultiplierList, scaleFactors, shiftFactors):
- """ Compute the log-likelihood
+def log_likelihood(
+ calibrationData, # noqa: N803
+ prediction,
+ numExperiments, # noqa: N803
+ covarianceMatrixList, # noqa: N803
+ edpNamesList, # noqa: ARG001, N803
+ edpLengthsList, # noqa: N803
+ covarianceMultiplierList, # noqa: N803
+ scaleFactors, # noqa: N803
+ shiftFactors, # noqa: N803
+):
+ """Compute the log-likelihood
:param calibrationData: Calibration data consisting of the measured values of response. Each row contains the data
from one experiment. The length of each row equals the sum of the lengths of all response quantities.
@@ -56,49 +67,63 @@ def log_likelihood(calibrationData, prediction, numExperiments, covarianceMatrix
distribution and a user-supplied covariance structure. Block-diagonal covariance structures are supported. The value
of multipliers on the covariance block corresponding to each response quantity is also calibrated.
:rtype: float
- """
+ """ # noqa: D400
# Check if the correct number of covariance terms has been passed in
- numResponses = len(edpLengthsList)
+ numResponses = len(edpLengthsList) # noqa: N806
if len(covarianceMatrixList) != numExperiments * numResponses:
- print("ERROR: The expected number of covariance matrices is {}, but only {} were passed "
- "in.".format(numExperiments * numResponses, len(covarianceMatrixList)))
- raise CovError("ERROR: The expected number of covariance matrices is {}, but only {} were passed "
- "in.".format(numExperiments * numResponses, len(covarianceMatrixList)))
+ print( # noqa: T201
+ f'ERROR: The expected number of covariance matrices is {numExperiments * numResponses}, but only {len(covarianceMatrixList)} were passed '
+ 'in.'
+ )
+ raise CovError( # noqa: DOC501, TRY003
+ f'ERROR: The expected number of covariance matrices is {numExperiments * numResponses}, but only {len(covarianceMatrixList)} were passed ' # noqa: EM102
+ 'in.'
+ )
# Shift and normalize the prediction
- currentPosition = 0
+ currentPosition = 0 # noqa: N806
for j in range(len(edpLengthsList)):
- prediction[:, currentPosition:currentPosition + edpLengthsList[j]] = prediction[:, currentPosition:currentPosition + edpLengthsList[j]] + shiftFactors[j]
- prediction[:, currentPosition:currentPosition + edpLengthsList[j]] = prediction[:, currentPosition:currentPosition + edpLengthsList[j]] / scaleFactors[j]
- currentPosition = currentPosition + edpLengthsList[j]
+ prediction[:, currentPosition : currentPosition + edpLengthsList[j]] = ( # noqa: PLR6104
+ prediction[:, currentPosition : currentPosition + edpLengthsList[j]]
+ + shiftFactors[j]
+ )
+ prediction[:, currentPosition : currentPosition + edpLengthsList[j]] = ( # noqa: PLR6104
+ prediction[:, currentPosition : currentPosition + edpLengthsList[j]]
+ / scaleFactors[j]
+ )
+ currentPosition = currentPosition + edpLengthsList[j] # noqa: N806, PLR6104
# Compute the normalized residuals
- allResiduals = prediction - calibrationData
+ allResiduals = prediction - calibrationData # noqa: N806
# Loop over the normalized residuals to compute the log-likelihood
loglike = 0
- covListIndex = 0
+ covListIndex = 0 # noqa: N806
for i in range(numExperiments):
- currentPosition = 0
+ currentPosition = 0 # noqa: N806
for j in range(numResponses):
# Get the residuals corresponding to this response variable
length = edpLengthsList[j]
- residuals = allResiduals[i, currentPosition:currentPosition + length]
- currentPosition = currentPosition + length
+ residuals = allResiduals[i, currentPosition : currentPosition + length]
+ currentPosition = currentPosition + length # noqa: N806, PLR6104
# Get the covariance matrix corresponding to this response variable
cov = np.atleast_2d(covarianceMatrixList[covListIndex])
- covListIndex = covListIndex + 1
+ covListIndex = covListIndex + 1 # noqa: N806, PLR6104
# Multiply the covariance matrix by the value of the covariance multiplier
- cov = cov * covarianceMultiplierList[j]
+ cov = cov * covarianceMultiplierList[j] # noqa: PLR6104
if np.shape(cov)[0] == np.shape(cov)[1] == 1:
# If there is a single variance value that is constant for all residual terms, then this is the case of
# having a sample of i.i.d. zero-mean normally distributed observations, and the log-likelihood can be
# computed more efficiently
var = cov[0][0]
- ll = - length / 2 * np.log(var) - length / 2 * np.log(2 * np.pi) - 1 / (2 * var) * np.sum(residuals ** 2)
+ ll = (
+ -length / 2 * np.log(var)
+ - length / 2 * np.log(2 * np.pi)
+ - 1 / (2 * var) * np.sum(residuals**2)
+ )
else:
if np.shape(cov)[0] != np.shape(cov)[1]:
cov = np.diag(cov.flatten())
@@ -107,11 +132,11 @@ def log_likelihood(calibrationData, prediction, numExperiments, covarianceMatrix
# Mahalanobis distance]
# = -1/2*[t1 + t2 + t3]
t1 = length * np.log(2 * np.pi)
- eigenValues, eigenVectors = np.linalg.eigh(cov)
+ eigenValues, eigenVectors = np.linalg.eigh(cov) # noqa: N806
logdet = np.sum(np.log(eigenValues))
- eigenValuesReciprocal = 1. / eigenValues
+ eigenValuesReciprocal = 1.0 / eigenValues # noqa: N806
z = eigenVectors * np.sqrt(eigenValuesReciprocal)
- mahalanobisDistance = np.square(np.dot(residuals, z)).sum()
+ mahalanobisDistance = np.square(np.dot(residuals, z)).sum() # noqa: N806
ll = -0.5 * (t1 + logdet + mahalanobisDistance)
if not np.isnan(ll):
loglike += ll
diff --git a/modules/performUQ/UCSD_UQ/loglike_script.py b/modules/performUQ/UCSD_UQ/loglike_script.py
index 5be07baab..8732b7bec 100644
--- a/modules/performUQ/UCSD_UQ/loglike_script.py
+++ b/modules/performUQ/UCSD_UQ/loglike_script.py
@@ -1,18 +1,23 @@
-# from scipy.stats import multivariate_normal
+# from scipy.stats import multivariate_normal # noqa: CPY001, D100, INP001
# def log_likelihood(residuals, mean, cov):
# return multivariate_normal.logpdf(residuals, mean=mean, cov=cov)
import numpy as np
-def log_likelihood(residuals, mean, cov):
+
+def log_likelihood(residuals, mean, cov): # noqa: ARG001, D103
length = len(residuals)
if np.shape(cov)[0] == np.shape(cov)[1] == 1:
# If there is a single variance value that is constant for all residual terms, then this is the case of
# having a sample of i.i.d. zero-mean normally distributed observations, and the log-likelihood can be
# computed more efficiently
var = cov[0][0]
- ll = - length / 2 * np.log(var) - length / 2 * np.log(2 * np.pi) - 1 / (2 * var) * np.sum(residuals ** 2)
+ ll = (
+ -length / 2 * np.log(var)
+ - length / 2 * np.log(2 * np.pi)
+ - 1 / (2 * var) * np.sum(residuals**2)
+ )
else:
if np.shape(cov)[0] != np.shape(cov)[1]:
cov = np.diag(cov.flatten())
@@ -21,11 +26,11 @@ def log_likelihood(residuals, mean, cov):
# Mahalanobis distance]
# = -1/2*[t1 + t2 + t3]
t1 = length * np.log(2 * np.pi)
- eigenValues, eigenVectors = np.linalg.eigh(cov)
+ eigenValues, eigenVectors = np.linalg.eigh(cov) # noqa: N806
logdet = np.sum(np.log(eigenValues))
- eigenValuesReciprocal = 1. / eigenValues
+ eigenValuesReciprocal = 1.0 / eigenValues # noqa: N806
z = eigenVectors * np.sqrt(eigenValuesReciprocal)
- mahalanobisDistance = np.square(np.dot(residuals, z)).sum()
+ mahalanobisDistance = np.square(np.dot(residuals, z)).sum() # noqa: N806
ll = -0.5 * (t1 + logdet + mahalanobisDistance)
-
- return ll
\ No newline at end of file
+
+ return ll
diff --git a/modules/performUQ/UCSD_UQ/mainscript.py b/modules/performUQ/UCSD_UQ/mainscript.py
index fff3b705b..d3a5f0285 100644
--- a/modules/performUQ/UCSD_UQ/mainscript.py
+++ b/modules/performUQ/UCSD_UQ/mainscript.py
@@ -1,23 +1,20 @@
-"""
-authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, Aakash Bangalore Satish*
+"""authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, Aakash Bangalore Satish*
affiliation: University of California, San Diego, *SimCenter, University of California, Berkeley
-"""
+""" # noqa: CPY001, D205, D400, INP001
# ======================================================================================================================
-import sys
import json
-from pathlib import Path
import shlex
+import sys
+from pathlib import Path
-path_to_common_uq = Path(__file__).parent.parent / "common"
+path_to_common_uq = Path(__file__).parent.parent / 'common'
sys.path.append(str(path_to_common_uq))
-import uq_utilities
# ======================================================================================================================
-def main(input_args):
-
+def main(input_args): # noqa: D103
# # Initialize analysis
# path_to_UCSD_UQ_directory = Path(input_args[2]).resolve().parent
# path_to_working_directory = Path(input_args[3]).resolve()
@@ -27,28 +24,28 @@ def main(input_args):
# input_file_name = input_args[7]
# Initialize analysis
- path_to_UCSD_UQ_directory = Path(input_args[0]).resolve().parent
+ path_to_UCSD_UQ_directory = Path(input_args[0]).resolve().parent # noqa: N806, F841
path_to_working_directory = Path(input_args[1]).resolve()
path_to_template_directory = Path(input_args[2]).resolve()
run_type = input_args[3] # either "runningLocal" or "runningRemote"
driver_file_name = input_args[4]
input_file_name = input_args[5]
- Path("dakotaTab.out").unlink(missing_ok=True)
- Path("dakotaTabPrior.out").unlink(missing_ok=True)
+ Path('dakotaTab.out').unlink(missing_ok=True)
+ Path('dakotaTabPrior.out').unlink(missing_ok=True)
input_file_full_path = path_to_template_directory / input_file_name
- with open(input_file_full_path, 'r', encoding='utf-8') as f:
+ with open(input_file_full_path, encoding='utf-8') as f: # noqa: PTH123
inputs = json.load(f)
- uq_inputs = inputs["UQ"]
- if uq_inputs["uqType"] == "Metropolis Within Gibbs Sampler":
- import mainscript_hierarchical_bayesian
+ uq_inputs = inputs['UQ']
+ if uq_inputs['uqType'] == 'Metropolis Within Gibbs Sampler':
+ import mainscript_hierarchical_bayesian # noqa: PLC0415
main_function = mainscript_hierarchical_bayesian.main
else:
- import mainscript_tmcmc
+ import mainscript_tmcmc # noqa: PLC0415
main_function = mainscript_tmcmc.main
@@ -62,7 +59,7 @@ def main(input_args):
# ======================================================================================================================
-if __name__ == "__main__":
+if __name__ == '__main__':
input_args = sys.argv
main(input_args)
diff --git a/modules/performUQ/UCSD_UQ/mainscript_hierarchical_bayesian.py b/modules/performUQ/UCSD_UQ/mainscript_hierarchical_bayesian.py
index 246ab1264..72293e379 100644
--- a/modules/performUQ/UCSD_UQ/mainscript_hierarchical_bayesian.py
+++ b/modules/performUQ/UCSD_UQ/mainscript_hierarchical_bayesian.py
@@ -1,20 +1,19 @@
-import json
+import json # noqa: CPY001, D100, INP001
import sys
from pathlib import Path
import numpy as np
+import preprocess_hierarchical_bayesian
import scipy.linalg
import scipy.stats
-import preprocess_hierarchical_bayesian
-
-path_to_common_uq = Path(__file__).parent.parent / "common"
+path_to_common_uq = Path(__file__).parent.parent / 'common'
sys.path.append(str(path_to_common_uq))
-import uq_utilities
-import mwg_sampler
+import mwg_sampler # noqa: E402
+import uq_utilities # noqa: E402
-def generate_initial_states(
+def generate_initial_states( # noqa: D103
num_edp,
num_rv,
num_datasets,
@@ -33,23 +32,23 @@ def generate_initial_states(
restart_file_path = Path(restart_file)
with restart_file_path.open(mode='r', encoding='utf-8') as f:
restart_data = json.load(f)
- if "new_states" in restart_data:
+ if 'new_states' in restart_data:
list_of_initial_states_of_model_parameters = []
- states_list = restart_data["new_states"]
+ states_list = restart_data['new_states']
for state in states_list:
list_of_initial_states_of_model_parameters.append(
np.array(state).reshape((num_rv, 1))
)
- if "error_variances_scaled" in restart_data:
- list_of_initial_states_of_error_variance_per_dataset = (
- restart_data["error_variances_scaled"]
- )
- if "hyper_covariance" in restart_data:
+ if 'error_variances_scaled' in restart_data:
+ list_of_initial_states_of_error_variance_per_dataset = restart_data[
+ 'error_variances_scaled'
+ ]
+ if 'hyper_covariance' in restart_data:
initial_state_of_hypercovariance = np.array(
- restart_data["hyper_covariance"]
+ restart_data['hyper_covariance']
)
- if "hyper_mean" in restart_data:
- initial_state_of_hypermean = np.array(restart_data["hyper_mean"])
+ if 'hyper_mean' in restart_data:
+ initial_state_of_hypermean = np.array(restart_data['hyper_mean'])
return (
list_of_initial_states_of_model_parameters,
@@ -59,7 +58,7 @@ def generate_initial_states(
)
-def loglikelihood_function(residual, error_variance_sample):
+def loglikelihood_function(residual, error_variance_sample): # noqa: D103
mean = 0
var = error_variance_sample
standard_deviation = np.sqrt(var)
@@ -69,22 +68,22 @@ def loglikelihood_function(residual, error_variance_sample):
return ll
-def main(input_args):
+def main(input_args): # noqa: D103, PLR0914
# Initialize analysis
working_directory = Path(input_args[0]).resolve()
- template_directory = Path(input_args[1]).resolve()
+ template_directory = Path(input_args[1]).resolve() # noqa: F841
run_type = input_args[2] # either "runningLocal" or "runningRemote"
- workflow_driver = input_args[3]
+ workflow_driver = input_args[3] # noqa: F841
input_file = input_args[4]
# input_file_full_path = template_directory / input_file
- with open(input_file, 'r', encoding='utf-8') as f:
+ with open(input_file, encoding='utf-8') as f: # noqa: PTH123
inputs = json.load(f)
- uq_inputs = inputs["UQ"]
- rv_inputs = inputs["randomVariables"]
- edp_inputs = inputs["EDP"]
+ uq_inputs = inputs['UQ']
+ rv_inputs = inputs['randomVariables']
+ edp_inputs = inputs['EDP']
(
parallel_pool,
@@ -125,12 +124,12 @@ def main(input_args):
restart_file,
)
- # TODO: get_initial_states():
+ # TODO: get_initial_states(): # noqa: TD002
# either:
# read them from file or
# use LHS to explore the space and find the best starting points out of
# those sampled values for the different chains
- # TODO: get_initial_proposal_covariance_matrix():
+ # TODO: get_initial_proposal_covariance_matrix(): # noqa: TD002
# either:
# read them from file or
# adaptively tune the proposal covariance matrix by running the chain for
@@ -150,9 +149,7 @@ def main(input_args):
list_of_proposal_covariance_kernels = []
list_of_cholesky_of_proposal_covariance_matrix = []
for dataset_number in range(num_datasets):
- proposal_covariance_matrix = (
- proposal_scale_list[dataset_number] * cov_kernel
- )
+ proposal_covariance_matrix = proposal_scale_list[dataset_number] * cov_kernel
list_of_proposal_covariance_kernels.append(cov_kernel)
cholesky_of_proposal_covariance_matrix = scipy.linalg.cholesky(
@@ -167,9 +164,7 @@ def main(input_args):
list_of_prior_logpdf_values = []
iterable = []
for model_number in range(len(list_of_datasets)):
- initial_state = list_of_initial_states_of_model_parameters[
- model_number
- ]
+ initial_state = list_of_initial_states_of_model_parameters[model_number]
x = transformation_function(initial_state)
logpdf_of_initial_state = uq_utilities.multivariate_normal_logpdf(
initial_state,
@@ -195,21 +190,17 @@ def main(input_args):
list_of_loglikelihood_at_initial_state = []
list_of_prior_logpdf_at_initial_state = []
for dataset_number, dataset in enumerate(list_of_datasets):
- scaled_residual = (
- list_of_model_outputs[dataset_number] - dataset
- ) / np.std(dataset)
+ scaled_residual = (list_of_model_outputs[dataset_number] - dataset) / np.std(
+ dataset
+ )
error_variance_sample_scaled = (
- list_of_initial_states_of_error_variance_per_dataset[
- dataset_number
- ]
+ list_of_initial_states_of_error_variance_per_dataset[dataset_number]
)
log_likelihood_at_initial_state = loglikelihood_function(
scaled_residual,
error_variance_sample_scaled,
)
- prior_logpdf_at_initial_state = list_of_prior_logpdf_values[
- dataset_number
- ]
+ prior_logpdf_at_initial_state = list_of_prior_logpdf_values[dataset_number]
unnormalized_posterior_logpdf_at_initial_state = (
log_likelihood_at_initial_state + prior_logpdf_at_initial_state
)
@@ -219,24 +210,20 @@ def main(input_args):
list_of_loglikelihood_at_initial_state.append(
log_likelihood_at_initial_state
)
- list_of_prior_logpdf_at_initial_state.append(
- prior_logpdf_at_initial_state
- )
+ list_of_prior_logpdf_at_initial_state.append(prior_logpdf_at_initial_state)
- results_directory_name = "sampling_results"
+ results_directory_name = 'sampling_results'
results_directory_path = working_directory / results_directory_name
results_directory_path.mkdir(parents=True, exist_ok=False)
tabular_results_file_base_name = (
- working_directory / "posterior_samples_table.out"
+ working_directory / 'posterior_samples_table.out'
)
results_to_write = {}
- results_to_write["log_priors"] = list_of_prior_logpdf_at_initial_state
- results_to_write["log_likelihoods"] = (
- list_of_loglikelihood_at_initial_state
- )
- results_to_write["unnormalized_log_posteriors"] = (
+ results_to_write['log_priors'] = list_of_prior_logpdf_at_initial_state
+ results_to_write['log_likelihoods'] = list_of_loglikelihood_at_initial_state
+ results_to_write['unnormalized_log_posteriors'] = (
list_of_unnormalized_posterior_logpdf_at_initial_state
)
new_states_list = []
@@ -245,31 +232,31 @@ def main(input_args):
new_states_list.append(item)
else:
new_states_list.append(item.tolist())
- results_to_write["new_states"] = new_states_list
- results_to_write["error_variances_scaled"] = (
+ results_to_write['new_states'] = new_states_list
+ results_to_write['error_variances_scaled'] = (
list_of_initial_states_of_error_variance_per_dataset
)
- with open(results_directory_path / f"sample_0.json", "w", encoding='utf-8') as f:
+ with open(results_directory_path / 'sample_0.json', 'w', encoding='utf-8') as f: # noqa: PTH123
json.dump(results_to_write, f, indent=4)
adaptivity_results = {}
# adaptivity_results["list_of_acceptance_rates"] = (
# list_of_acceptance_rates
# )
- adaptivity_results["proposal_scale_list"] = proposal_scale_list
+ adaptivity_results['proposal_scale_list'] = proposal_scale_list
cov_kernels_list = []
for cov_kernel in list_of_proposal_covariance_kernels:
- cov_kernels_list.append(cov_kernel.tolist())
- adaptivity_results["list_of_proposal_covariance_kernels"] = (
- cov_kernels_list
- )
- with open(
- results_directory_path.parent / f"adaptivity_results_{0}.json", "w", encoding='utf-8'
+ cov_kernels_list.append(cov_kernel.tolist()) # noqa: PERF401
+ adaptivity_results['list_of_proposal_covariance_kernels'] = cov_kernels_list
+ with open( # noqa: PTH123
+ results_directory_path.parent / f'adaptivity_results_{0}.json',
+ 'w',
+ encoding='utf-8',
) as f:
json.dump(adaptivity_results, f, indent=4)
- samples = mwg_sampler.metropolis_within_gibbs_sampler(
+ samples = mwg_sampler.metropolis_within_gibbs_sampler( # noqa: F841
uq_inputs,
parallel_evaluation_function,
function_to_evaluate,
@@ -300,11 +287,12 @@ def main(input_args):
list_of_proposal_covariance_kernels,
)
- if run_type == "runningRemote":
- from mpi4py import MPI
+ if run_type == 'runningRemote':
+ from mpi4py import MPI # noqa: PLC0415
+
MPI.COMM_WORLD.Abort(0)
-if __name__ == "__main__":
+if __name__ == '__main__':
input_args = sys.argv
main(input_args)
diff --git a/modules/performUQ/UCSD_UQ/mainscript_tmcmc.py b/modules/performUQ/UCSD_UQ/mainscript_tmcmc.py
index 946280510..d09297dac 100644
--- a/modules/performUQ/UCSD_UQ/mainscript_tmcmc.py
+++ b/modules/performUQ/UCSD_UQ/mainscript_tmcmc.py
@@ -1,35 +1,57 @@
-"""
-authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, Aakash Bangalore Satish*
+"""authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, Aakash Bangalore Satish*
affiliation: University of California, San Diego, *SimCenter, University of California, Berkeley
-"""
+""" # noqa: CPY001, D205, D400, INP001
+
# ======================================================================================================================
import os
import sys
import time
from typing import TextIO
-import numpy as np
+import numpy as np
+from calibration_utilities import (
+ CalDataPreparer,
+ CovarianceMatrixPreparer,
+ DataTransformer,
+ LogLikelihoodHandler,
+ createLogFile,
+ make_distributions,
+ syncLogFile,
+)
from parseData import parseDataFunction
from runTMCMC import run_TMCMC
-from calibration_utilities import CovarianceMatrixPreparer, CalDataPreparer, DataTransformer, createLogFile, syncLogFile, make_distributions, LogLikelihoodHandler
# ======================================================================================================================
-def computeModelPosteriorProbabilities(modelPriorProbabilities, modelEvidences):
+
+def computeModelPosteriorProbabilities(modelPriorProbabilities, modelEvidences): # noqa: N802, N803, D103
denominator = np.dot(modelPriorProbabilities, modelEvidences)
- return modelPriorProbabilities*modelEvidences/denominator
+ return modelPriorProbabilities * modelEvidences / denominator
-def computeModelPosteriorProbabilitiesUsingLogEvidences(modelPriorProbabilities, modelLogEvidences):
+def computeModelPosteriorProbabilitiesUsingLogEvidences( # noqa: N802, D103
+ modelPriorProbabilities, # noqa: N803
+ modelLogEvidences, # noqa: N803
+):
deltas = modelLogEvidences - np.min(modelLogEvidences)
denominator = np.dot(modelPriorProbabilities, np.exp(deltas))
- return modelPriorProbabilities*np.exp(deltas)/denominator
+ return modelPriorProbabilities * np.exp(deltas) / denominator
+
# ======================================================================================================================
-class TMCMC_Data:
- def __init__(self, mainscriptPath: str, workdirMain: str, runType: str, workflowDriver: str, logFile: TextIO, numBurnInSteps: int = 10) -> None:
+
+class TMCMC_Data: # noqa: D101
+ def __init__(
+ self,
+ mainscriptPath: str, # noqa: N803
+ workdirMain: str, # noqa: N803
+ runType: str, # noqa: N803
+ workflowDriver: str, # noqa: N803
+ logFile: TextIO, # noqa: N803
+ numBurnInSteps: int = 10, # noqa: N803
+ ) -> None:
self.mainscriptPath = mainscriptPath
self.workdirMain = workdirMain
self.runType = runType
@@ -43,46 +65,52 @@ def __init__(self, mainscriptPath: str, workdirMain: str, runType: str, workflow
self.numBurnInSteps = numBurnInSteps
self.numSkipSteps = 1
- def getMPI_size(self):
- if self.runType == "runningRemote":
- from mpi4py import MPI
+ def getMPI_size(self): # noqa: N802, D102
+ if self.runType == 'runningRemote':
+ from mpi4py import MPI # noqa: PLC0415
+
self.comm = MPI.COMM_WORLD
self.MPI_size = self.comm.Get_size()
- def updateUQInfo(self, numberOfSamples, seedVal):
+ def updateUQInfo(self, numberOfSamples, seedVal): # noqa: N802, N803, D102
self.numberOfSamples = numberOfSamples
self.seedVal = seedVal
-
- def findNumProcessorsAvailable(self):
- if self.runType == "runningLocal":
- import multiprocessing as mp
+
+ def findNumProcessorsAvailable(self): # noqa: N802, D102
+ if self.runType == 'runningLocal':
+ import multiprocessing as mp # noqa: PLC0415
+
self.numProcessors = mp.cpu_count()
- elif self.runType == "runningRemote":
- from mpi4py import MPI
+ elif self.runType == 'runningRemote':
+ from mpi4py import MPI # noqa: PLC0415
+
self.comm = MPI.COMM_WORLD
self.numProcessors = self.comm.Get_size()
else:
self.numProcessors = 1
-
- def getNumChains(self, numberOfSamples, runType, numProcessors):
- if runType == "runningLocal":
+
+ def getNumChains(self, numberOfSamples, runType, numProcessors): # noqa: N802, N803, D102
+ if runType == 'runningLocal':
self.numChains = int(min(numProcessors, self.recommendedNumChains))
- elif runType == "runningRemote":
+ elif runType == 'runningRemote':
self.numChains = int(max(numProcessors, self.recommendedNumChains))
else:
self.numChains = self.recommendedNumChains
-
- if self.numChains < numberOfSamples:
- self.numChains = numberOfSamples
-
- def getNumStepsPerChainAfterBurnIn(self, numParticles, numChains):
- self.numStepsAfterBurnIn = int(np.ceil(numParticles/numChains)) * self.numSkipSteps
+
+ self.numChains = max(self.numChains, numberOfSamples)
+
+ def getNumStepsPerChainAfterBurnIn(self, numParticles, numChains): # noqa: N802, N803, D102
+ self.numStepsAfterBurnIn = (
+ int(np.ceil(numParticles / numChains)) * self.numSkipSteps
+ )
# self.numStepsPerChain = numBurnInSteps + numStepsAfterBurnIn
+
# ======================================================================================================================
+
# ======================================================================================================================
-def main(input_args):
+def main(input_args): # noqa: D103
t1 = time.time()
# Initialize analysis
@@ -93,20 +121,20 @@ def main(input_args):
# driver_file = input_args[4]
# input_json_filename = input_args[5]
- mainscript_path = os.path.abspath(__file__)
- working_directory = os.path.abspath(input_args[0])
- template_directory = os.path.abspath(input_args[1])
+ mainscript_path = os.path.abspath(__file__) # noqa: PTH100
+ working_directory = os.path.abspath(input_args[0]) # noqa: PTH100
+ template_directory = os.path.abspath(input_args[1]) # noqa: PTH100
run_type = input_args[2] # either "runningLocal" or "runningRemote"
driver_file = input_args[3]
input_json_filename = input_args[4]
- logfile_name = "logFileTMCMC.txt"
+ logfile_name = 'logFileTMCMC.txt'
logfile = createLogFile(where=working_directory, logfile_name=logfile_name)
# Remove dakotaTab and dakotaTabPrior files if they already exist in the working directory
try:
- os.remove('dakotaTab.out')
- os.remove('dakotTabPrior.out')
+ os.remove('dakotaTab.out') # noqa: PTH107
+ os.remove('dakotTabPrior.out') # noqa: PTH107
except OSError:
pass
@@ -115,116 +143,167 @@ def main(input_args):
# Process input json file
# input_json_filename_full_path = os.path.join(os.path.abspath(template_directory), input_json_filename)
input_json_filename_full_path = input_json_filename
- logfile.write("\n\n==========================")
- logfile.write("\nParsing the json input file {}".format(input_json_filename_full_path))
- (number_of_samples, seed_value, calibration_data_filename, loglikelihood_module, write_outputs, variables_list,
- edp_names_list, edp_lengths_list, models_dict, total_number_of_models_in_ensemble) = parseDataFunction(input_json_filename_full_path,
- logfile, working_directory,
- os.path.dirname(mainscript_path))
+ logfile.write('\n\n==========================')
+ logfile.write(f'\nParsing the json input file {input_json_filename_full_path}')
+ (
+ number_of_samples,
+ seed_value,
+ calibration_data_filename,
+ loglikelihood_module, # noqa: F841
+ write_outputs, # noqa: F841
+ variables_list,
+ edp_names_list,
+ edp_lengths_list,
+ models_dict, # noqa: F841
+ total_number_of_models_in_ensemble,
+ ) = parseDataFunction(
+ input_json_filename_full_path,
+ logfile,
+ working_directory,
+ os.path.dirname(mainscript_path), # noqa: PTH120
+ )
syncLogFile(logfile)
# # ================================================================================================================
# Initialize TMCMC object
- tmcmc_data_instance = TMCMC_Data(mainscript_path, working_directory, run_type, driver_file, logfile, numBurnInSteps=4)
- tmcmc_data_instance.updateUQInfo(number_of_samples, seed_value)
- tmcmc_data_instance.findNumProcessorsAvailable()
- tmcmc_data_instance.getNumChains(number_of_samples, run_type, tmcmc_data_instance.numProcessors)
- tmcmc_data_instance.getNumStepsPerChainAfterBurnIn(number_of_samples, tmcmc_data_instance.numChains)
+ tmcmc_data_instance = TMCMC_Data(
+ mainscript_path,
+ working_directory,
+ run_type,
+ driver_file,
+ logfile,
+ numBurnInSteps=4,
+ )
+ tmcmc_data_instance.updateUQInfo(number_of_samples, seed_value)
+ tmcmc_data_instance.findNumProcessorsAvailable()
+ tmcmc_data_instance.getNumChains(
+ number_of_samples, run_type, tmcmc_data_instance.numProcessors
+ )
+ tmcmc_data_instance.getNumStepsPerChainAfterBurnIn(
+ number_of_samples, tmcmc_data_instance.numChains
+ )
# # ================================================================================================================
# Read calibration data
- data_preparer_instance = CalDataPreparer(working_directory, template_directory, calibration_data_filename,
- edp_names_list, edp_lengths_list, logfile)
- calibration_data, number_of_experiments = data_preparer_instance.getCalibrationData()
+ data_preparer_instance = CalDataPreparer(
+ working_directory,
+ template_directory,
+ calibration_data_filename,
+ edp_names_list,
+ edp_lengths_list,
+ logfile,
+ )
+ calibration_data, number_of_experiments = (
+ data_preparer_instance.getCalibrationData()
+ )
# # ================================================================================================================
# Transform the data depending on the option chosen by the user
- transformation = "absMaxScaling"
- data_transformer_instance = DataTransformer(transformStrategy=transformation, logFile=logfile)
+ transformation = 'absMaxScaling'
+ data_transformer_instance = DataTransformer(
+ transformStrategy=transformation, logFile=logfile
+ )
- scale_factors, shift_factors = data_transformer_instance.computeScaleAndShiftFactors(calibration_data, edp_lengths_list)
- logfile.write("\n\n\tThe scale and shift factors computed are: ")
+ scale_factors, shift_factors = (
+ data_transformer_instance.computeScaleAndShiftFactors(
+ calibration_data, edp_lengths_list
+ )
+ )
+ logfile.write('\n\n\tThe scale and shift factors computed are: ')
for j in range(len(edp_names_list)):
logfile.write(
- "\n\t\tEDP: {}, scale factor: {}, shift factor: {}".format(edp_names_list[j], scale_factors[j], shift_factors[j])
+ f'\n\t\tEDP: {edp_names_list[j]}, scale factor: {scale_factors[j]}, shift factor: {shift_factors[j]}'
)
transformed_calibration_data = data_transformer_instance.transformData()
- logfile.write("\n\nThe transformed calibration data: \n{}".format(transformed_calibration_data))
+ logfile.write(
+ f'\n\nThe transformed calibration data: \n{transformed_calibration_data}'
+ )
# ======================================================================================================================
# Process covariance matrix options
- cov_matrix_options_instance = CovarianceMatrixPreparer(transformed_calibration_data, edp_lengths_list, edp_names_list,
- working_directory, number_of_experiments, logfile, run_type)
- defaultErrorVariances = cov_matrix_options_instance.getDefaultErrorVariances()
+ cov_matrix_options_instance = CovarianceMatrixPreparer(
+ transformed_calibration_data,
+ edp_lengths_list,
+ edp_names_list,
+ working_directory,
+ number_of_experiments,
+ logfile,
+ run_type,
+ )
+ defaultErrorVariances = cov_matrix_options_instance.getDefaultErrorVariances() # noqa: N806, F841
covariance_matrix_list = cov_matrix_options_instance.createCovarianceMatrix()
# ======================================================================================================================
# Get log-likelihood function
- LL_Handler = LogLikelihoodHandler(data=transformed_calibration_data,
- covariance_matrix_blocks_list=covariance_matrix_list,
- list_of_data_segment_lengths=edp_lengths_list,
- list_of_scale_factors=scale_factors,
- list_of_shift_factors=shift_factors,
- workdir_main=working_directory,
- full_path_to_tmcmc_code_directory=mainscript_path,
- log_likelihood_file_name="loglike_script.py")
+ LL_Handler = LogLikelihoodHandler( # noqa: N806
+ data=transformed_calibration_data,
+ covariance_matrix_blocks_list=covariance_matrix_list,
+ list_of_data_segment_lengths=edp_lengths_list,
+ list_of_scale_factors=scale_factors,
+ list_of_shift_factors=shift_factors,
+ workdir_main=working_directory,
+ full_path_to_tmcmc_code_directory=mainscript_path,
+ log_likelihood_file_name='loglike_script.py',
+ )
log_likelihood_function = LL_Handler.evaluate_log_likelihood
# ======================================================================================================================
# Start TMCMC workflow
- logfile.write("\n\n==========================")
- logfile.write("\nSetting up the TMCMC algorithm")
+ logfile.write('\n\n==========================')
+ logfile.write('\nSetting up the TMCMC algorithm')
# sys.path.append(workdirMain)
- logfile.write("\n\tResults path: {}".format(working_directory))
+ logfile.write(f'\n\tResults path: {working_directory}')
# number of particles: Np
number_of_samples = tmcmc_data_instance.numberOfSamples
- logfile.write("\n\tNumber of particles: {}".format(number_of_samples))
+ logfile.write(f'\n\tNumber of particles: {number_of_samples}')
# number of max MCMC steps
- number_of_MCMC_steps = tmcmc_data_instance.numBurnInSteps + tmcmc_data_instance.numStepsAfterBurnIn
- max_number_of_MCMC_steps = 10
- logfile.write("\n\tNumber of MCMC steps in first stage: {}".format(number_of_MCMC_steps))
+ number_of_MCMC_steps = ( # noqa: N806
+ tmcmc_data_instance.numBurnInSteps + tmcmc_data_instance.numStepsAfterBurnIn
+ )
+ max_number_of_MCMC_steps = 10 # noqa: N806
+ logfile.write(f'\n\tNumber of MCMC steps in first stage: {number_of_MCMC_steps}')
logfile.write(
- "\n\tMax. number of MCMC steps in any stage: {}".format(max_number_of_MCMC_steps)
+ f'\n\tMax. number of MCMC steps in any stage: {max_number_of_MCMC_steps}'
)
syncLogFile(logfile)
# ======================================================================================================================
# Initialize variables to store prior model probability and evidence
- model_prior_probabilities = np.ones((len(variables_list),))/len(variables_list)
+ model_prior_probabilities = np.ones((len(variables_list),)) / len(variables_list)
model_evidences = np.ones_like(model_prior_probabilities)
- logfile.write("\n\n==========================")
- logfile.write("\nLooping over each model")
+ logfile.write('\n\n==========================')
+ logfile.write('\nLooping over each model')
# For each model:
for model_number, parameters_of_model in enumerate(variables_list):
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tStarting analysis for model {}".format(model_number+1))
- logfile.write("\n\t==========================")
+ logfile.write('\n\n\t==========================')
+ logfile.write(f'\n\tStarting analysis for model {model_number + 1}')
+ logfile.write('\n\t==========================')
# Assign probability distributions to the parameters of the model
- logfile.write("\n\t\tAssigning probability distributions to the parameters")
+ logfile.write('\n\t\tAssigning probability distributions to the parameters')
all_distributions_list = make_distributions(variables=parameters_of_model)
# Run the Algorithm
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tRunning the TMCMC algorithm")
- logfile.write("\n\t==========================")
+ logfile.write('\n\n\t==========================')
+ logfile.write('\n\tRunning the TMCMC algorithm')
+ logfile.write('\n\t==========================')
# set the seed
np.random.seed(tmcmc_data_instance.seedVal)
- logfile.write("\n\tSeed: {}".format(tmcmc_data_instance.seedVal))
+ logfile.write(f'\n\tSeed: {tmcmc_data_instance.seedVal}')
syncLogFile(logfile)
- mytrace, log_evidence = run_TMCMC(
+ mytrace, log_evidence = run_TMCMC( # noqa: F841
number_of_samples,
number_of_samples,
all_distributions_list,
@@ -248,33 +327,33 @@ def main(input_args):
driver_file,
tmcmc_data_instance.parallelizeMCMC,
model_number,
- total_number_of_models_in_ensemble
+ total_number_of_models_in_ensemble,
)
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tTMCMC algorithm finished running")
- logfile.write("\n\t==========================")
+ logfile.write('\n\n\t==========================')
+ logfile.write('\n\tTMCMC algorithm finished running')
+ logfile.write('\n\t==========================')
syncLogFile(logfile)
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tStarting post-processing")
+ logfile.write('\n\n\t==========================')
+ logfile.write('\n\tStarting post-processing')
# Compute model evidence
- logfile.write("\n\n\t\tComputing the model evidence")
+ logfile.write('\n\n\t\tComputing the model evidence')
# evidence = 1
# for i in range(len(mytrace)):
# Wm = mytrace[i][2]
# evidence *= np.mean(Wm)
# logfile.write("\n\t\t\tModel evidence: {:g}".format(evidence))
evidence = np.exp(log_evidence)
- logfile.write("\n\t\t\tModel evidence: {:g}".format(evidence))
- logfile.write("\n\t\t\tModel log_evidence: {:g}".format(log_evidence))
+ logfile.write(f'\n\t\t\tModel evidence: {evidence:g}')
+ logfile.write(f'\n\t\t\tModel log_evidence: {log_evidence:g}')
syncLogFile(logfile)
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tPost processing finished")
- logfile.write("\n\t==========================")
+ logfile.write('\n\n\t==========================')
+ logfile.write('\n\tPost processing finished')
+ logfile.write('\n\t==========================')
syncLogFile(logfile)
@@ -289,31 +368,35 @@ def main(input_args):
model_evidences[model_number] = evidence
- logfile.write("\n\n\t==========================")
- logfile.write("\n\tCompleted analysis for model {}".format(model_number+1))
- logfile.write("\n\t==========================")
+ logfile.write('\n\n\t==========================')
+ logfile.write(f'\n\tCompleted analysis for model {model_number + 1}')
+ logfile.write('\n\t==========================')
syncLogFile(logfile)
- modelPosteriorProbabilities = computeModelPosteriorProbabilities(model_prior_probabilities, model_evidences)
+ modelPosteriorProbabilities = computeModelPosteriorProbabilities( # noqa: N806
+ model_prior_probabilities, model_evidences
+ )
- logfile.write("\n\n==========================")
- logfile.write("\nFinished looping over each model")
- logfile.write("\n==========================\n")
+ logfile.write('\n\n==========================')
+ logfile.write('\nFinished looping over each model')
+ logfile.write('\n==========================\n')
- logfile.write("\nThe posterior model probabilities are:")
+ logfile.write('\nThe posterior model probabilities are:')
for model_number in range(len(variables_list)):
- logfile.write(f"\nModel number {model_number+1}: {modelPosteriorProbabilities[model_number]*100:15g}%")
+ logfile.write(
+ f'\nModel number {model_number + 1}: {modelPosteriorProbabilities[model_number] * 100:15g}%'
+ )
# ======================================================================================================================
- logfile.write("\nUCSD_UQ engine workflow complete!\n")
- logfile.write("\nTime taken: {:0.2f} minutes\n\n".format((time.time() - t1) / 60))
+ logfile.write('\nUCSD_UQ engine workflow complete!\n')
+ logfile.write(f'\nTime taken: {(time.time() - t1) / 60:0.2f} minutes\n\n')
syncLogFile(logfile)
logfile.close()
- if run_type == "runningRemote":
+ if run_type == 'runningRemote':
tmcmc_data_instance.comm.Abort(0)
# ======================================================================================================================
@@ -321,8 +404,8 @@ def main(input_args):
# ======================================================================================================================
-if __name__ == "__main__":
- inputArgs = sys.argv
+if __name__ == '__main__':
+ inputArgs = sys.argv # noqa: N816
main(inputArgs)
-# ======================================================================================================================
\ No newline at end of file
+# ======================================================================================================================
diff --git a/modules/performUQ/UCSD_UQ/mwg_sampler.py b/modules/performUQ/UCSD_UQ/mwg_sampler.py
index a3e7dff3b..8e4cfdcd0 100644
--- a/modules/performUQ/UCSD_UQ/mwg_sampler.py
+++ b/modules/performUQ/UCSD_UQ/mwg_sampler.py
@@ -1,14 +1,14 @@
-import json
+import json # noqa: CPY001, D100, INP001
+from pathlib import Path
+
import numpy as np
import scipy
-from pathlib import Path
-
-path_to_common_uq = Path(__file__).parent.parent / "common"
-import sys
+path_to_common_uq = Path(__file__).parent.parent / 'common'
+import sys # noqa: E402
sys.path.append(str(path_to_common_uq))
-import uq_utilities
+import uq_utilities # noqa: E402
def _update_parameters_of_normal_inverse_wishart_distribution(
@@ -43,7 +43,7 @@ def _update_parameters_of_inverse_gamma_distribution(
return alpha_n, beta_n
-def _draw_one_sample(
+def _draw_one_sample( # noqa: PLR0913, PLR0914, PLR0917
sample_number,
random_state,
num_rv,
@@ -94,9 +94,9 @@ def _draw_one_sample(
move = cholesky_decomposition_of_proposal_covariance_matrix @ np.array(
standard_normal_random_variates
).reshape((-1, 1))
- current_state = np.array(
- list_of_current_states[dataset_number]
- ).reshape((-1, 1))
+ current_state = np.array(list_of_current_states[dataset_number]).reshape(
+ (-1, 1)
+ )
proposed_state = current_state + move
x = transformation_function(proposed_state)
model_iterable = [0, x]
@@ -126,22 +126,18 @@ def _draw_one_sample(
for dataset_number, dataset in enumerate(list_of_datasets):
proposed_state = list_of_proposed_states[dataset_number]
- prior_logpdf_at_proposed_state = (
- uq_utilities.multivariate_normal_logpdf(
- proposed_state,
- current_mean_sample,
- current_cov_sample,
- )
- )
- list_of_logpdf_of_proposed_states.append(
- prior_logpdf_at_proposed_state
+ prior_logpdf_at_proposed_state = uq_utilities.multivariate_normal_logpdf(
+ proposed_state,
+ current_mean_sample,
+ current_cov_sample,
)
- scaled_residual = (
- list_of_model_outputs[dataset_number] - dataset
- ) / np.std(dataset)
- error_variance_sample_scaled = (
- list_of_current_error_variance_samples_scaled[dataset_number]
+ list_of_logpdf_of_proposed_states.append(prior_logpdf_at_proposed_state)
+ scaled_residual = (list_of_model_outputs[dataset_number] - dataset) / np.std(
+ dataset
)
+ error_variance_sample_scaled = list_of_current_error_variance_samples_scaled[
+ dataset_number
+ ]
log_likelihood_at_proposed_state = loglikelihood_function(
scaled_residual,
error_variance_sample_scaled,
@@ -150,9 +146,7 @@ def _draw_one_sample(
log_likelihood_at_proposed_state + prior_logpdf_at_proposed_state
)
unnormalized_posterior_logpdf_at_current_state = (
- list_of_unnormalized_posterior_logpdf_at_current_state[
- dataset_number
- ]
+ list_of_unnormalized_posterior_logpdf_at_current_state[dataset_number]
)
log_hastings_ratio = (
unnormalized_posterior_logpdf_at_proposed_state
@@ -160,9 +154,7 @@ def _draw_one_sample(
)
list_of_log_hastings_ratios.append(log_hastings_ratio)
list_of_log_likelihoods.append(log_likelihood_at_proposed_state)
- standard_uniform_random_variate = prngs_algorithm[
- dataset_number
- ].uniform()
+ standard_uniform_random_variate = prngs_algorithm[dataset_number].uniform()
proposed_state = list_of_proposed_states[dataset_number]
current_state = list_of_current_states[dataset_number]
if (log_hastings_ratio >= 0) | (
@@ -174,9 +166,7 @@ def _draw_one_sample(
)
list_of_log_likes.append(log_likelihood_at_proposed_state)
list_of_log_priors.append(prior_logpdf_at_proposed_state)
- num_accepts_list[dataset_number] = (
- num_accepts_list[dataset_number] + 1
- )
+ num_accepts_list[dataset_number] = num_accepts_list[dataset_number] + 1 # noqa: PLR6104
else:
new_state = current_state
list_of_log_posteriors.append(
@@ -190,9 +180,7 @@ def _draw_one_sample(
)
new_state = np.array(new_state).reshape((-1, 1))
list_of_new_states.append(new_state)
- list_of_parameter_samples.append(
- transformation_function(new_state).tolist()
- )
+ list_of_parameter_samples.append(transformation_function(new_state).tolist())
sse = scaled_residual @ scaled_residual
list_of_sse.append(sse)
@@ -222,7 +210,7 @@ def _draw_one_sample(
).reshape((-1, 1))
s = np.zeros((num_rv, num_rv))
for new_state in list_of_new_states:
- s = s + (new_state - theta_bar) @ (new_state - theta_bar).T
+ s = s + (new_state - theta_bar) @ (new_state - theta_bar).T # noqa: PLR6104
mu_n, lambda_n, nu_n, psi_n = (
_update_parameters_of_normal_inverse_wishart_distribution(
niw_prior_parameters,
@@ -232,10 +220,10 @@ def _draw_one_sample(
)
)
updated_parameters = {}
- updated_parameters["mu_n"] = mu_n.flatten().tolist()
- updated_parameters["lambda_n"] = lambda_n
- updated_parameters["nu_n"] = nu_n
- updated_parameters["psi_n"] = psi_n.tolist()
+ updated_parameters['mu_n'] = mu_n.flatten().tolist()
+ updated_parameters['lambda_n'] = lambda_n
+ updated_parameters['nu_n'] = nu_n
+ updated_parameters['psi_n'] = psi_n.tolist()
covariance_sample = scipy.stats.invwishart(
df=nu_n,
@@ -247,62 +235,58 @@ def _draw_one_sample(
).rvs(random_state=prngs_algorithm[-1])
one_sample = {}
- one_sample["new_states"] = list_of_new_states
- one_sample["error_variances_scaled"] = (
- list_of_error_variance_samples_scaled
- )
- one_sample["hyper_covariance"] = covariance_sample
- one_sample["hyper_mean"] = mean_sample
+ one_sample['new_states'] = list_of_new_states
+ one_sample['error_variances_scaled'] = list_of_error_variance_samples_scaled
+ one_sample['hyper_covariance'] = covariance_sample
+ one_sample['hyper_mean'] = mean_sample
results_to_write = {}
- results_to_write["log_priors"] = list_of_logpdf_of_proposed_states
- results_to_write["log_likelihoods"] = list_of_log_likes
- results_to_write["unnormalized_log_posteriors"] = list_of_log_posteriors
+ results_to_write['log_priors'] = list_of_logpdf_of_proposed_states
+ results_to_write['log_likelihoods'] = list_of_log_likes
+ results_to_write['unnormalized_log_posteriors'] = list_of_log_posteriors
new_states_list = []
for item in list_of_new_states:
if isinstance(item, list):
new_states_list.append(item)
else:
new_states_list.append(item.tolist())
- results_to_write["new_states"] = new_states_list
- results_to_write["error_variances_scaled"] = (
+ results_to_write['new_states'] = new_states_list
+ results_to_write['error_variances_scaled'] = (
list_of_error_variance_samples_scaled
)
- results_to_write["hyper_covariance"] = covariance_sample.tolist()
- results_to_write["hyper_mean"] = mean_sample.tolist()
- results_to_write[
- "updated_parameters_of_normal_inverse_wishart_distribution"
- ] = updated_parameters
+ results_to_write['hyper_covariance'] = covariance_sample.tolist()
+ results_to_write['hyper_mean'] = mean_sample.tolist()
+ results_to_write['updated_parameters_of_normal_inverse_wishart_distribution'] = (
+ updated_parameters
+ )
for dataset_number in range(num_datasets):
x = list_of_parameter_samples[dataset_number]
x.append(list_of_error_variance_samples[dataset_number])
y = list_of_model_outputs[dataset_number]
list_of_strings_to_write = []
- list_of_strings_to_write.append(f"{sample_number+1}")
- list_of_strings_to_write.append(f"{dataset_number+1}")
+ list_of_strings_to_write.append(f'{sample_number + 1}') # noqa: FURB113
+ list_of_strings_to_write.append(f'{dataset_number + 1}')
x_string_list = []
for x_val in x:
- x_string_list.append(f"{x_val}")
- list_of_strings_to_write.append("\t".join(x_string_list))
+ x_string_list.append(f'{x_val}') # noqa: PERF401
+ list_of_strings_to_write.append('\t'.join(x_string_list))
y_string_list = []
for y_val in y:
- y_string_list.append(f"{y_val}")
- list_of_strings_to_write.append("\t".join(y_string_list))
+ y_string_list.append(f'{y_val}') # noqa: PERF401
+ list_of_strings_to_write.append('\t'.join(y_string_list))
tabular_results_file_name = (
- uq_utilities._get_tabular_results_file_name_for_dataset(
+ uq_utilities._get_tabular_results_file_name_for_dataset( # noqa: SLF001
tabular_results_file_base_name, dataset_number
)
)
- string_to_write = "\t".join(list_of_strings_to_write) + "\n"
- uq_utilities._write_to_tabular_results_file(
+ string_to_write = '\t'.join(list_of_strings_to_write) + '\n'
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
tabular_results_file_name, string_to_write
)
- with open(
- results_directory_path / f"sample_{sample_number+1}.json", "w"
- ) as f:
+ with open(results_directory_path / f'sample_{sample_number + 1}.json', 'w') as f: # noqa: PLW1514, PTH123
json.dump(results_to_write, f, indent=4)
return one_sample, results_to_write
@@ -317,23 +301,22 @@ def _get_tabular_results_file_name_for_hyperparameters(
tabular_results_file = (
tabular_results_parent
- / f"{tabular_results_stem}_hyperparameters{tabular_results_extension}"
+ / f'{tabular_results_stem}_hyperparameters{tabular_results_extension}'
)
- return tabular_results_file
+ return tabular_results_file # noqa: RET504
-def get_states_from_samples_list(samples_list, dataset_number):
+def get_states_from_samples_list(samples_list, dataset_number): # noqa: D103
sample_values = []
for sample_number in range(len(samples_list)):
- sample_values.append(
- samples_list[sample_number]["new_states"][dataset_number].flatten()
+ sample_values.append( # noqa: PERF401
+ samples_list[sample_number]['new_states'][dataset_number].flatten()
)
return sample_values
def tune(scale, acc_rate):
- """
- Tunes the scaling parameter for the proposal distribution
+ """Tunes the scaling parameter for the proposal distribution
according to the acceptance rate over the last tune_interval:
Rate Variance adaptation
---- -------------------
@@ -343,23 +326,23 @@ def tune(scale, acc_rate):
>0.5 x 1.1
>0.75 x 2
>0.95 x 10
- """
- if acc_rate < 0.01:
+ """ # noqa: D205, D400
+ if acc_rate < 0.01: # noqa: PLR2004
return scale * 0.01
- elif acc_rate < 0.05:
+ elif acc_rate < 0.05: # noqa: RET505, PLR2004
return scale * 0.1
- elif acc_rate < 0.2:
+ elif acc_rate < 0.2: # noqa: PLR2004
return scale * 0.5
- elif acc_rate > 0.95:
+ elif acc_rate > 0.95: # noqa: PLR2004
return scale * 100.0
- elif acc_rate > 0.75:
+ elif acc_rate > 0.75: # noqa: PLR2004
return scale * 10.0
- elif acc_rate > 0.5:
+ elif acc_rate > 0.5: # noqa: PLR2004
return scale * 2
return scale
-def metropolis_within_gibbs_sampler(
+def metropolis_within_gibbs_sampler( # noqa: C901, D103, PLR0913, PLR0914, PLR0917
uq_inputs,
parallel_evaluation_function,
function_to_evaluate,
@@ -384,82 +367,78 @@ def metropolis_within_gibbs_sampler(
current_mean_sample,
current_covariance_sample,
list_of_current_error_variance_samples_scaled,
- parent_distribution,
+ parent_distribution, # noqa: ARG001
num_accepts_list,
proposal_scale_list,
list_of_proposal_covariance_kernels,
):
num_datasets = len(list_of_datasets)
- random_state = uq_inputs["Random State"]
+ random_state = uq_inputs['Random State']
tuning_interval = 200
- if "Tuning Interval" in uq_inputs:
- tuning_interval = uq_inputs["Tuning Interval"]
+ if 'Tuning Interval' in uq_inputs:
+ tuning_interval = uq_inputs['Tuning Interval']
tuning_period = 1000
- if "Tuning Period" in uq_inputs:
- tuning_period = uq_inputs["Tuning Period"]
- num_samples = uq_inputs["Sample Size"] + tuning_period
+ if 'Tuning Period' in uq_inputs:
+ tuning_period = uq_inputs['Tuning Period']
+ num_samples = uq_inputs['Sample Size'] + tuning_period
parent_distribution_prng = (
uq_utilities.get_list_of_pseudo_random_number_generators(
10 * random_state, 1
)[0]
)
- initial_list_of_proposal_covariance_kernels = (
- list_of_proposal_covariance_kernels
- )
+ initial_list_of_proposal_covariance_kernels = list_of_proposal_covariance_kernels # noqa: F841
for dataset_number in range(num_datasets):
tabular_results_file_name = (
- uq_utilities._get_tabular_results_file_name_for_dataset(
+ uq_utilities._get_tabular_results_file_name_for_dataset( # noqa: SLF001
tabular_results_file_base_name, dataset_number
)
)
rv_string_list = []
for rv in rv_inputs:
- rv_string_list.append(rv["name"])
+ rv_string_list.append(rv['name']) # noqa: PERF401
error_var_string_list = []
edp_string_list = []
edp = edp_inputs[dataset_number]
error_var_string_list.append(f'{edp["name"]}.PredictionErrorVariance')
edp_components_list = []
- for edp_component in range(edp["length"]):
- edp_components_list.append(f'{edp["name"]}_{edp_component+1}')
- edp_string_list.append("\t".join(edp_components_list))
+ for edp_component in range(edp['length']):
+ edp_components_list.append(f'{edp["name"]}_{edp_component + 1}') # noqa: PERF401
+ edp_string_list.append('\t'.join(edp_components_list))
list_of_header_strings = []
- list_of_header_strings.append("eval_id")
- list_of_header_strings.append("interface")
- list_of_header_strings.append("\t".join(rv_string_list))
- list_of_header_strings.append("\t".join(error_var_string_list))
- list_of_header_strings.append("\t".join(edp_string_list))
- string_to_write = "\t".join(list_of_header_strings) + "\n"
+ list_of_header_strings.append('eval_id') # noqa: FURB113
+ list_of_header_strings.append('interface')
+ list_of_header_strings.append('\t'.join(rv_string_list))
+ list_of_header_strings.append('\t'.join(error_var_string_list))
+ list_of_header_strings.append('\t'.join(edp_string_list))
+ string_to_write = '\t'.join(list_of_header_strings) + '\n'
tabular_results_file_name.touch()
- uq_utilities._write_to_tabular_results_file(
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
tabular_results_file_name, string_to_write
)
list_of_hyperparameter_header_strings = []
- list_of_hyperparameter_header_strings.append("eval_id")
- list_of_hyperparameter_header_strings.append("interface")
+ list_of_hyperparameter_header_strings.append('eval_id') # noqa: FURB113
+ list_of_hyperparameter_header_strings.append('interface')
rv_mean_string_list = []
rv_names_list = []
for rv in rv_inputs:
rv_mean_string_list.append(f'mean_{rv["name"]}')
- rv_names_list.append(rv["name"])
- list_of_hyperparameter_header_strings.append(
- "\t".join(rv_mean_string_list)
- )
+ rv_names_list.append(rv['name'])
+ list_of_hyperparameter_header_strings.append('\t'.join(rv_mean_string_list))
rv_covariance_string_list = []
for i in range(len(rv_names_list)):
for j in range(i, len(rv_names_list)):
- rv_covariance_string_list.append(
- f"cov_{rv_names_list[i]}_{rv_names_list[j]}"
+ rv_covariance_string_list.append( # noqa: PERF401
+ f'cov_{rv_names_list[i]}_{rv_names_list[j]}'
)
list_of_hyperparameter_header_strings.append(
- "\t".join(rv_covariance_string_list)
+ '\t'.join(rv_covariance_string_list)
)
hyperparameter_header_string = (
- "\t".join(list_of_hyperparameter_header_strings) + "\n"
+ '\t'.join(list_of_hyperparameter_header_strings) + '\n'
)
hyperparameter_tabular_results_file_name = (
_get_tabular_results_file_name_for_hyperparameters(
@@ -467,23 +446,23 @@ def metropolis_within_gibbs_sampler(
)
)
hyperparameter_tabular_results_file_name.touch()
- uq_utilities._write_to_tabular_results_file(
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
hyperparameter_tabular_results_file_name,
hyperparameter_header_string,
)
list_of_predictive_distribution_sample_header_strings = []
- list_of_predictive_distribution_sample_header_strings.append("eval_id")
- list_of_predictive_distribution_sample_header_strings.append("interface")
+ list_of_predictive_distribution_sample_header_strings.append('eval_id') # noqa: FURB113
+ list_of_predictive_distribution_sample_header_strings.append('interface')
list_of_predictive_distribution_sample_header_strings.append(
- "\t".join(rv_names_list)
+ '\t'.join(rv_names_list)
)
predictive_distribution_sample_header_string = (
- "\t".join(list_of_predictive_distribution_sample_header_strings) + "\n"
+ '\t'.join(list_of_predictive_distribution_sample_header_strings) + '\n'
)
tabular_results_file_base_name.touch()
- uq_utilities._write_to_tabular_results_file(
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
tabular_results_file_base_name,
predictive_distribution_sample_header_string,
)
@@ -518,18 +497,18 @@ def metropolis_within_gibbs_sampler(
num_accepts_list,
)
samples.append(one_sample)
- list_of_current_states = one_sample["new_states"]
+ list_of_current_states = one_sample['new_states']
list_of_current_error_variance_samples_scaled = one_sample[
- "error_variances_scaled"
+ 'error_variances_scaled'
]
- current_mean_sample = one_sample["hyper_mean"]
- current_covariance_sample = one_sample["hyper_covariance"]
+ current_mean_sample = one_sample['hyper_mean']
+ current_covariance_sample = one_sample['hyper_covariance']
list_of_unnormalized_posterior_logpdf_at_current_state = results[
- "unnormalized_log_posteriors"
+ 'unnormalized_log_posteriors'
]
- list_of_loglikelihood_at_current_state = results["log_likelihoods"]
- list_of_prior_logpdf_at_current_state = results["log_priors"]
+ list_of_loglikelihood_at_current_state = results['log_likelihoods']
+ list_of_prior_logpdf_at_current_state = results['log_priors']
if (
(sample_number >= tuning_interval)
@@ -544,97 +523,81 @@ def metropolis_within_gibbs_sampler(
proposal_scale = proposal_scale_list[dataset_number]
proposal_scale = tune(proposal_scale, acc_rate)
proposal_scale_list[dataset_number] = proposal_scale
- cov_kernel = list_of_proposal_covariance_kernels[
- dataset_number
- ]
+ cov_kernel = list_of_proposal_covariance_kernels[dataset_number]
if num_accepts > num_rv:
- states = get_states_from_samples_list(
- samples, dataset_number
- )
+ states = get_states_from_samples_list(samples, dataset_number)
samples_array = np.array(states[-tuning_interval:]).T
try:
cov_kernel = np.cov(samples_array)
- except Exception as exc:
- print(
- f"Sample number: {sample_number}, dataset number:"
- f" {dataset_number}, Exception in covariance"
- f" calculation: {exc}"
+ except Exception as exc: # noqa: BLE001
+ print( # noqa: T201
+ f'Sample number: {sample_number}, dataset number:'
+ f' {dataset_number}, Exception in covariance'
+ f' calculation: {exc}'
)
cov_kernel = list_of_proposal_covariance_kernels[
dataset_number
]
proposal_covariance_matrix = cov_kernel * proposal_scale
try:
- cholesky_of_proposal_covariance_matrix = (
- scipy.linalg.cholesky(
- proposal_covariance_matrix, lower=True
- )
+ cholesky_of_proposal_covariance_matrix = scipy.linalg.cholesky(
+ proposal_covariance_matrix, lower=True
)
- except Exception as exc:
- print(
- f"Sample number: {sample_number}, dataset number:"
- f" {dataset_number}, Exception in cholesky"
- f" calculation: {exc}"
+ except Exception as exc: # noqa: BLE001
+ print( # noqa: T201
+ f'Sample number: {sample_number}, dataset number:'
+ f' {dataset_number}, Exception in cholesky'
+ f' calculation: {exc}'
)
- cov_kernel = list_of_proposal_covariance_kernels[
- dataset_number
- ]
+ cov_kernel = list_of_proposal_covariance_kernels[dataset_number]
proposal_covariance_matrix = cov_kernel * proposal_scale
- cholesky_of_proposal_covariance_matrix = (
- scipy.linalg.cholesky(
- proposal_covariance_matrix, lower=True
- )
+ cholesky_of_proposal_covariance_matrix = scipy.linalg.cholesky(
+ proposal_covariance_matrix, lower=True
)
- list_of_cholesky_of_proposal_covariance_matrix[
- dataset_number
- ] = cholesky_of_proposal_covariance_matrix
- list_of_proposal_covariance_kernels[dataset_number] = (
- cov_kernel
+ list_of_cholesky_of_proposal_covariance_matrix[dataset_number] = (
+ cholesky_of_proposal_covariance_matrix
)
+ list_of_proposal_covariance_kernels[dataset_number] = cov_kernel
num_accepts_list = [0] * num_datasets
adaptivity_results = {}
- adaptivity_results["list_of_acceptance_rates"] = (
- list_of_acceptance_rates
- )
- adaptivity_results["proposal_scale_list"] = proposal_scale_list
+ adaptivity_results['list_of_acceptance_rates'] = list_of_acceptance_rates
+ adaptivity_results['proposal_scale_list'] = proposal_scale_list
cov_kernels_list = []
for cov_kernel in list_of_proposal_covariance_kernels:
- cov_kernels_list.append(cov_kernel.tolist())
- adaptivity_results["list_of_proposal_covariance_kernels"] = (
+ cov_kernels_list.append(cov_kernel.tolist()) # noqa: PERF401
+ adaptivity_results['list_of_proposal_covariance_kernels'] = (
cov_kernels_list
)
- with open(
+ with open( # noqa: PLW1514, PTH123
results_directory_path.parent
- / f"adaptivity_results_{sample_number}.json",
- "w",
+ / f'adaptivity_results_{sample_number}.json',
+ 'w',
) as f:
json.dump(adaptivity_results, f, indent=4)
hyper_mean_string_list = []
hyper_mean = current_mean_sample
for val in hyper_mean:
- hyper_mean_string_list.append(f"{val}")
+ hyper_mean_string_list.append(f'{val}') # noqa: PERF401
hyper_covariance_string_list = []
hyper_covariance = current_covariance_sample
for i in range(len(rv_names_list)):
for j in range(i, len(rv_names_list)):
- hyper_covariance_string_list.append(
- f"{hyper_covariance[i][j]}"
- )
+ hyper_covariance_string_list.append(f'{hyper_covariance[i][j]}') # noqa: PERF401
list_of_hyperparameter_value_strings = []
- list_of_hyperparameter_value_strings.append(f"{sample_number+1}")
- list_of_hyperparameter_value_strings.append("0")
+ list_of_hyperparameter_value_strings.append(f'{sample_number + 1}') # noqa: FURB113
+ list_of_hyperparameter_value_strings.append('0')
list_of_hyperparameter_value_strings.append(
- "\t".join(hyper_mean_string_list)
+ '\t'.join(hyper_mean_string_list)
)
list_of_hyperparameter_value_strings.append(
- "\t".join(hyper_covariance_string_list)
+ '\t'.join(hyper_covariance_string_list)
)
hyperparameter_value_string = (
- "\t".join(list_of_hyperparameter_value_strings) + "\n"
+ '\t'.join(list_of_hyperparameter_value_strings) + '\n'
)
- uq_utilities._write_to_tabular_results_file(
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
hyperparameter_tabular_results_file_name,
hyperparameter_value_string,
)
@@ -652,27 +615,25 @@ def metropolis_within_gibbs_sampler(
- n_samples_for_mean_of_updated_predictive_distribution_parameters,
num_samples,
):
- with open(results_directory_path / f"sample_{i+1}.json", "r") as f:
+ with open(results_directory_path / f'sample_{i + 1}.json') as f: # noqa: PLW1514, PTH123
data = json.load(f)
updated_parameters = data[
- "updated_parameters_of_normal_inverse_wishart_distribution"
+ 'updated_parameters_of_normal_inverse_wishart_distribution'
]
- mu_n.append(updated_parameters["mu_n"])
- lambda_n.append(updated_parameters["lambda_n"])
- nu_n.append(updated_parameters["nu_n"])
- psi_n.append(updated_parameters["psi_n"])
+ mu_n.append(updated_parameters['mu_n'])
+ lambda_n.append(updated_parameters['lambda_n'])
+ nu_n.append(updated_parameters['nu_n'])
+ psi_n.append(updated_parameters['psi_n'])
mu_n_mean = np.mean(np.array(mu_n), axis=0)
lambda_n_mean = np.mean(np.array(lambda_n), axis=0)
nu_n_mean = np.mean(np.array(nu_n), axis=0)
psi_n_mean = np.mean(np.array(psi_n), axis=0)
- df = nu_n_mean - num_datasets + 1
+ df = nu_n_mean - num_datasets + 1 # noqa: PD901
loc = mu_n_mean
shape = (lambda_n_mean + 1) / (lambda_n_mean * df) * psi_n_mean
- predictive_distribution = scipy.stats.multivariate_t(
- loc=loc, shape=shape, df=df
- )
+ predictive_distribution = scipy.stats.multivariate_t(loc=loc, shape=shape, df=df)
for sample_number in range(num_samples):
sample_from_predictive_t_distribution = predictive_distribution.rvs(
random_state=parent_distribution_prng
@@ -680,33 +641,28 @@ def metropolis_within_gibbs_sampler(
sample_from_predictive_distribution = transformation_function(
sample_from_predictive_t_distribution
)
- while (
- np.sum(np.isfinite(sample_from_predictive_distribution)) < num_rv
- ):
- sample_from_predictive_t_distribution = (
- predictive_distribution.rvs(
- random_state=parent_distribution_prng
- )
+ while np.sum(np.isfinite(sample_from_predictive_distribution)) < num_rv:
+ sample_from_predictive_t_distribution = predictive_distribution.rvs(
+ random_state=parent_distribution_prng
)
sample_from_predictive_distribution = transformation_function(
sample_from_predictive_t_distribution
)
predictive_distribution_sample_values_list = []
for val in sample_from_predictive_distribution:
- predictive_distribution_sample_values_list.append(f"{val}")
+ predictive_distribution_sample_values_list.append(f'{val}') # noqa: PERF401
list_of_predictive_distribution_sample_value_strings = []
- list_of_predictive_distribution_sample_value_strings.append(
- f"{sample_number+1}"
+ list_of_predictive_distribution_sample_value_strings.append( # noqa: FURB113
+ f'{sample_number + 1}'
)
- list_of_predictive_distribution_sample_value_strings.append("0")
+ list_of_predictive_distribution_sample_value_strings.append('0')
list_of_predictive_distribution_sample_value_strings.append(
- "\t".join(predictive_distribution_sample_values_list)
+ '\t'.join(predictive_distribution_sample_values_list)
)
predictive_distribution_sample_value_string = (
- "\t".join(list_of_predictive_distribution_sample_value_strings)
- + "\n"
+ '\t'.join(list_of_predictive_distribution_sample_value_strings) + '\n'
)
- uq_utilities._write_to_tabular_results_file(
+ uq_utilities._write_to_tabular_results_file( # noqa: SLF001
tabular_results_file_base_name,
predictive_distribution_sample_value_string,
)
diff --git a/modules/performUQ/UCSD_UQ/parseData.py b/modules/performUQ/UCSD_UQ/parseData.py
index b12f43270..a5cfc6f81 100644
--- a/modules/performUQ/UCSD_UQ/parseData.py
+++ b/modules/performUQ/UCSD_UQ/parseData.py
@@ -1,44 +1,42 @@
-"""
-authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, and Aakash Bangalore Satish*
+"""authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, Prof. J.P. Conte, and Aakash Bangalore Satish*
affiliation: University of California, San Diego, *SimCenter, University of California, Berkeley
-"""
+""" # noqa: CPY001, D205, D400, INP001
+import itertools
import json
import os
import sys
-import time
from importlib import import_module
-from shutil import copyfile
-import numpy as np
-import itertools
class DataProcessingError(Exception):
"""Raised when errors found when processing user-supplied calibration and covariance data.
- Attributes:
+ Attributes
+ ----------
message -- explanation of the error
+
"""
def __init__(self, message):
self.message = message
-def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
+def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir): # noqa: C901, N802, N803, D103, PLR0915
# Read in the json object
- logFile.write("\n\tReading the json file")
- with open(dakotaJsonFile, "r") as f:
- jsonInputs = json.load(f)
- logFile.write(" ... Done")
+ logFile.write('\n\tReading the json file')
+ with open(dakotaJsonFile) as f: # noqa: PLW1514, PTH123
+ jsonInputs = json.load(f) # noqa: N806
+ logFile.write(' ... Done')
# Read in the data of the objects within the json file
- logFile.write("\n\tParsing the inputs read in from json file")
- applications = jsonInputs["Applications"]
- edpInputs = jsonInputs["EDP"]
- uqInputs = jsonInputs["UQ"]
- femInputs = jsonInputs["FEM"]
- rvInputs = jsonInputs["randomVariables"]
+ logFile.write('\n\tParsing the inputs read in from json file')
+ applications = jsonInputs['Applications']
+ edpInputs = jsonInputs['EDP'] # noqa: N806
+ uqInputs = jsonInputs['UQ'] # noqa: N806
+ femInputs = jsonInputs['FEM'] # noqa: N806, F841
+ rvInputs = jsonInputs['randomVariables'] # noqa: N806
# localAppDirInputs = jsonInputs['localAppDir']
# pythonInputs = jsonInputs['python']
# remoteAppDirInputs = jsonInputs['remoteAppDir']
@@ -52,135 +50,125 @@ def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
# numCol = spreadsheet['numCol']
# numRow = spreadsheet['numRow']
# summary = uqResultsInputs['summary']
- workingDir = jsonInputs["workingDir"]
+ workingDir = jsonInputs['workingDir'] # noqa: N806, F841
# Processing UQ inputs
- logFile.write("\n\t\tProcessing UQ inputs")
- seedValue = uqInputs["seed"]
- nSamples = uqInputs["numParticles"]
- #maxRunTime = uqInputs["maxRunTime"]
- if "maxRunTime" in uqInputs.keys():
- maxRunTime = uqInputs["maxRunTime"]
+ logFile.write('\n\t\tProcessing UQ inputs')
+ seedValue = uqInputs['seed'] # noqa: N806
+ nSamples = uqInputs['numParticles'] # noqa: N806
+ # maxRunTime = uqInputs["maxRunTime"]
+ if 'maxRunTime' in uqInputs.keys(): # noqa: SIM118
+ maxRunTime = uqInputs['maxRunTime'] # noqa: N806
else:
- maxRunTime = float("inf")
- logLikelihoodFile = uqInputs["logLikelihoodFile"]
- calDataFile = uqInputs["calDataFile"]
+ maxRunTime = float('inf') # noqa: N806, F841
+ logLikelihoodFile = uqInputs['logLikelihoodFile'] # noqa: N806
+ calDataFile = uqInputs['calDataFile'] # noqa: N806
- parallelizeMCMC = True
- if "parallelExecution" in uqInputs:
- parallelizeMCMC = uqInputs["parallelExecution"]
+ parallelizeMCMC = True # noqa: N806
+ if 'parallelExecution' in uqInputs:
+ parallelizeMCMC = uqInputs['parallelExecution'] # noqa: N806, F841
- logFile.write("\n\t\t\tProcessing the log-likelihood script options")
+ logFile.write('\n\t\t\tProcessing the log-likelihood script options')
# If log-likelihood script is provided, use that, otherwise, use default log-likelihood function
- if len(logLikelihoodFile) > 0: # if the log-likelihood file is not an empty string
+ if (
+ len(logLikelihoodFile) > 0
+ ): # if the log-likelihood file is not an empty string
logFile.write(
- "\n\t\t\t\tSearching for a user-defined log-likelihood script '{}'".format(
- logLikelihoodFile
- )
+ f"\n\t\t\t\tSearching for a user-defined log-likelihood script '{logLikelihoodFile}'"
)
- if os.path.exists(os.path.join(tmpSimCenterDir, logLikelihoodFile)):
+ if os.path.exists(os.path.join(tmpSimCenterDir, logLikelihoodFile)): # noqa: PTH110, PTH118
logFile.write(
- "\n\t\t\t\tFound log-likelihood file '{}' in {}.".format(
- logLikelihoodFile, tmpSimCenterDir
- )
+ f"\n\t\t\t\tFound log-likelihood file '{logLikelihoodFile}' in {tmpSimCenterDir}."
)
- logLikeModuleName = os.path.splitext(logLikelihoodFile)[0]
+ logLikeModuleName = os.path.splitext(logLikelihoodFile)[0] # noqa: PTH122, N806
try:
import_module(logLikeModuleName)
except:
logFile.write(
- "\n\t\t\t\tERROR: The log-likelihood script '{}' cannot be imported.".format(
- os.path.join(tmpSimCenterDir, logLikelihoodFile)
- )
+ f"\n\t\t\t\tERROR: The log-likelihood script '{os.path.join(tmpSimCenterDir, logLikelihoodFile)}' cannot be imported." # noqa: PTH118
)
raise
else:
logFile.write(
- "\n\t\t\t\tERROR: The log-likelihood script '{}' cannot be found in {}.".format(
- logLikelihoodFile, tmpSimCenterDir
- )
+ f"\n\t\t\t\tERROR: The log-likelihood script '{logLikelihoodFile}' cannot be found in {tmpSimCenterDir}."
)
- raise FileNotFoundError(
- "ERROR: The log-likelihood script '{}' cannot be found in {}.".format(
- logLikelihoodFile, tmpSimCenterDir
- )
+ raise FileNotFoundError( # noqa: TRY003
+ f"ERROR: The log-likelihood script '{logLikelihoodFile}' cannot be found in {tmpSimCenterDir}." # noqa: EM102
)
else:
- defaultLogLikeFileName = "defaultLogLikeScript.py"
- defaultLogLikeDirectoryPath = mainscriptDir
+ defaultLogLikeFileName = 'defaultLogLikeScript.py' # noqa: N806
+ defaultLogLikeDirectoryPath = mainscriptDir # noqa: N806
sys.path.append(defaultLogLikeDirectoryPath)
- logLikeModuleName = os.path.splitext(defaultLogLikeFileName)[0]
- logFile.write("\n\t\t\t\tLog-likelihood script not provided.")
+ logLikeModuleName = os.path.splitext(defaultLogLikeFileName)[0] # noqa: PTH122, N806
+ logFile.write('\n\t\t\t\tLog-likelihood script not provided.')
logFile.write(
- "\n\t\t\t\tUsing the default log-likelihood script: \n\t\t\t\t\t{}".format(
- os.path.join(defaultLogLikeDirectoryPath, defaultLogLikeFileName)
- )
+ f'\n\t\t\t\tUsing the default log-likelihood script: \n\t\t\t\t\t{os.path.join(defaultLogLikeDirectoryPath, defaultLogLikeFileName)}' # noqa: PTH118
)
try:
import_module(logLikeModuleName)
except:
logFile.write(
- "\n\t\t\t\tERROR: The log-likelihood script '{}' cannot be imported.".format(
- os.path.join(tmpSimCenterDir, logLikelihoodFile)
- )
+ f"\n\t\t\t\tERROR: The log-likelihood script '{os.path.join(tmpSimCenterDir, logLikelihoodFile)}' cannot be imported." # noqa: PTH118
)
raise
- logLikeModule = import_module(logLikeModuleName)
+ logLikeModule = import_module(logLikeModuleName) # noqa: N806
# Processing EDP inputs
- logFile.write("\n\n\t\tProcessing EDP inputs")
- edpNamesList = []
- edpLengthsList = []
+ logFile.write('\n\n\t\tProcessing EDP inputs')
+ edpNamesList = [] # noqa: N806
+ edpLengthsList = [] # noqa: N806
# Get list of EDPs and their lengths
for edp in edpInputs:
- edpNamesList.append(edp["name"])
- edpLengthsList.append(edp["length"])
+ edpNamesList.append(edp['name'])
+ edpLengthsList.append(edp['length'])
- logFile.write("\n\t\t\tThe EDPs defined are:")
- printString = "\n\t\t\t\t"
+ logFile.write('\n\t\t\tThe EDPs defined are:')
+ printString = '\n\t\t\t\t' # noqa: N806
for i in range(len(edpInputs)):
- printString += "Name: '{}', Length: {}\n\t\t\t\t".format(
- edpNamesList[i], edpLengthsList[i]
+ printString += ( # noqa: N806
+ f"Name: '{edpNamesList[i]}', Length: {edpLengthsList[i]}\n\t\t\t\t"
)
logFile.write(printString)
# logFile.write("\tExpected length of each line in data file: {}".format(lineLength))
# Processing model inputs
- logFile.write("\n\n\t\tProcessing application inputs")
+ logFile.write('\n\n\t\tProcessing application inputs')
# Processing number of models
# Check if this is a multi-model analysis
- runMultiModel = False
- modelsDict = {}
- modelIndicesList = []
- modelRVNamesList = []
- applications = jsonInputs["Applications"]
- for app, appInputs in applications.items():
- logFile.write(f"\n\t\t\tApp: {app}")
- if app.lower() not in ["events"]:
- appl = appInputs["Application"].lower()
+ runMultiModel = False # noqa: N806, F841
+ modelsDict = {} # noqa: N806
+ modelIndicesList = [] # noqa: N806
+ modelRVNamesList = [] # noqa: N806
+ applications = jsonInputs['Applications']
+ for app, appInputs in applications.items(): # noqa: N806
+ logFile.write(f'\n\t\t\tApp: {app}')
+ if app.lower() != 'events':
+ appl = appInputs['Application'].lower()
else:
- appl = appInputs[0]["Application"].lower()
- if appl in ["multimodel"]:
+ appl = appInputs[0]['Application'].lower()
+ if appl == 'multimodel':
# runMultiModel = True
- logFile.write(f'\n\t\t\t\tFound a multimodel application - {app}: {appInputs["Application"]}')
- modelRVName = jsonInputs[app]["modelToRun"][3:]
- appModels = jsonInputs[app]["models"]
- nM = len(appModels)
+ logFile.write(
+ f'\n\t\t\t\tFound a multimodel application - {app}: {appInputs["Application"]}'
+ )
+ modelRVName = jsonInputs[app]['modelToRun'][3:] # noqa: N806
+ appModels = jsonInputs[app]['models'] # noqa: N806
+ nM = len(appModels) # noqa: N806
logFile.write(f'\n\t\t\t\t\tThere are {nM} {app} models')
- modelData = {}
- modelData["nModels"] = nM
- modelData["values"] = [i+1 for i in range(nM)]
- modelData["weights"] = [model["belief"] for model in appModels]
- modelData["name"] = modelRVName
+ modelData = {} # noqa: N806
+ modelData['nModels'] = nM
+ modelData['values'] = [i + 1 for i in range(nM)]
+ modelData['weights'] = [model['belief'] for model in appModels]
+ modelData['name'] = modelRVName
modelsDict[app] = modelData
- modelIndicesList.append(modelData["values"])
+ modelIndicesList.append(modelData['values'])
modelRVNamesList.append(modelRVName)
else:
logFile.write('\n\t\t\t\tNot a multimodel application')
- nModels = 1
- for _, data in modelsDict.items():
- nModels = nModels*data["nModels"]
- cartesianProductOfModelIndices = list(itertools.product(*modelIndicesList))
+ nModels = 1 # noqa: N806
+ for _, data in modelsDict.items(): # noqa: PERF102
+ nModels = nModels * data['nModels'] # noqa: N806, PLR6104
+ cartesianProductOfModelIndices = list(itertools.product(*modelIndicesList)) # noqa: N806
# logFile.write("\n\t\t\tNO LONGER Getting the number of models")
# inputFileList = []
# nModels = femInputs['numInputs']
@@ -192,148 +180,156 @@ def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
# else:
# inputFileList.append(femInputs['inputFile'])
# logFile.write('\n\t\t\t\tThe number of models is: {}'.format(nModels))
- writeFEMOutputs = True
+ writeFEMOutputs = True # noqa: N806
# Variables
- variablesList = []
+ variablesList = [] # noqa: N806
for _ in range(nModels):
- variablesList.append({
- "names": [],
- "distributions": [],
- "Par1": [],
- "Par2": [],
- "Par3": [],
- "Par4": [],
- })
+ variablesList.append( # noqa: PERF401
+ {
+ 'names': [],
+ 'distributions': [],
+ 'Par1': [],
+ 'Par2': [],
+ 'Par3': [],
+ 'Par4': [],
+ }
+ )
- logFile.write("\n\n\t\t\tLooping over the models")
+ logFile.write('\n\n\t\t\tLooping over the models')
for ind in range(nModels):
- logFile.write("\n\t\t\t\tModel number: {}".format(ind))
+ logFile.write(f'\n\t\t\t\tModel number: {ind}')
# Processing RV inputs
- logFile.write("\n\t\t\t\t\tCreating priors for model number {}".format(ind))
- logFile.write("\n\t\t\t\t\t\tProcessing RV inputs")
+ logFile.write(f'\n\t\t\t\t\tCreating priors for model number {ind}')
+ logFile.write('\n\t\t\t\t\t\tProcessing RV inputs')
for i, rv in enumerate(rvInputs):
- variablesList[ind]["names"].append(rv["name"])
- variablesList[ind]["distributions"].append(rv["distribution"])
- paramString = ""
- if rv["distribution"] == "Uniform":
- variablesList[ind]["Par1"].append(rv["lowerbound"])
- variablesList[ind]["Par2"].append(rv["upperbound"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(
- rv["lowerbound"], rv["upperbound"]
+ variablesList[ind]['names'].append(rv['name'])
+ variablesList[ind]['distributions'].append(rv['distribution'])
+ paramString = '' # noqa: N806
+ if rv['distribution'] == 'Uniform':
+ variablesList[ind]['Par1'].append(rv['lowerbound'])
+ variablesList[ind]['Par2'].append(rv['upperbound'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format( # noqa: N806
+ rv['lowerbound'], rv['upperbound']
)
- elif rv["distribution"] == "Normal":
- variablesList[ind]["Par1"].append(rv["mean"])
- variablesList[ind]["Par2"].append(rv["stdDev"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(rv["mean"], rv["stdDev"])
- elif rv["distribution"] == "Half-Normal":
- variablesList[ind]["Par1"].append(rv["Standard Deviation"])
- variablesList[ind]["Par2"].append(rv["Upper Bound"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(
- rv["Standard Deviation"], rv["Upper Bound"]
+ elif rv['distribution'] == 'Normal':
+ variablesList[ind]['Par1'].append(rv['mean'])
+ variablesList[ind]['Par2'].append(rv['stdDev'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format(rv['mean'], rv['stdDev']) # noqa: N806
+ elif rv['distribution'] == 'Half-Normal':
+ variablesList[ind]['Par1'].append(rv['Standard Deviation'])
+ variablesList[ind]['Par2'].append(rv['Upper Bound'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format( # noqa: N806
+ rv['Standard Deviation'], rv['Upper Bound']
)
- elif rv["distribution"] == "Truncated-Normal":
- variablesList[ind]["Par1"].append(rv["Mean"])
- variablesList[ind]["Par2"].append(rv["Standard Deviation"])
- variablesList[ind]["Par3"].append(rv["a"])
- variablesList[ind]["Par4"].append(rv["b"])
- paramString = "params: {}, {}, {}, {}".format(
- rv["Mean"], rv["Standard Deviation"], rv["a"], rv["b"]
+ elif rv['distribution'] == 'Truncated-Normal':
+ variablesList[ind]['Par1'].append(rv['Mean'])
+ variablesList[ind]['Par2'].append(rv['Standard Deviation'])
+ variablesList[ind]['Par3'].append(rv['a'])
+ variablesList[ind]['Par4'].append(rv['b'])
+ paramString = 'params: {}, {}, {}, {}'.format( # noqa: N806
+ rv['Mean'], rv['Standard Deviation'], rv['a'], rv['b']
)
- elif rv["distribution"] == "Beta":
- variablesList[ind]["Par1"].append(rv["alphas"])
- variablesList[ind]["Par2"].append(rv["betas"])
- variablesList[ind]["Par3"].append(rv["lowerbound"])
- variablesList[ind]["Par4"].append(rv["upperbound"])
- paramString = "params: {}, {}, {}, {}".format(
- rv["alphas"], rv["betas"], rv["lowerbound"], rv["upperbound"]
+ elif rv['distribution'] == 'Beta':
+ variablesList[ind]['Par1'].append(rv['alphas'])
+ variablesList[ind]['Par2'].append(rv['betas'])
+ variablesList[ind]['Par3'].append(rv['lowerbound'])
+ variablesList[ind]['Par4'].append(rv['upperbound'])
+ paramString = 'params: {}, {}, {}, {}'.format( # noqa: N806
+ rv['alphas'], rv['betas'], rv['lowerbound'], rv['upperbound']
)
- elif rv["distribution"] == "Lognormal":
+ elif rv['distribution'] == 'Lognormal':
# meanValue = rv["mean"]
# stdevValue = rv["stdDev"]
# mu = np.log(
# pow(meanValue, 2) / np.sqrt(pow(stdevValue, 2) + pow(meanValue, 2))
# )
# sig = np.sqrt(np.log(pow(stdevValue / meanValue, 2) + 1))
- mu = rv["lambda"]
- sigma = rv["zeta"]
- variablesList[ind]["Par1"].append(mu)
- variablesList[ind]["Par2"].append(sigma)
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(mu, sigma)
- elif rv["distribution"] == "Gumbel":
- variablesList[ind]["Par1"].append(rv["alphaparam"])
- variablesList[ind]["Par2"].append(rv["betaparam"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(rv["alphaparam"], rv["betaparam"])
- elif rv["distribution"] == "Weibull":
- variablesList[ind]["Par1"].append(rv["shapeparam"])
- variablesList[ind]["Par2"].append(rv["scaleparam"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(
- rv["shapeparam"], rv["scaleparam"]
+ mu = rv['lambda']
+ sigma = rv['zeta']
+ variablesList[ind]['Par1'].append(mu)
+ variablesList[ind]['Par2'].append(sigma)
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = f'params: {mu}, {sigma}' # noqa: N806
+ elif rv['distribution'] == 'Gumbel':
+ variablesList[ind]['Par1'].append(rv['alphaparam'])
+ variablesList[ind]['Par2'].append(rv['betaparam'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format( # noqa: N806
+ rv['alphaparam'], rv['betaparam']
+ )
+ elif rv['distribution'] == 'Weibull':
+ variablesList[ind]['Par1'].append(rv['shapeparam'])
+ variablesList[ind]['Par2'].append(rv['scaleparam'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format( # noqa: N806
+ rv['shapeparam'], rv['scaleparam']
)
- elif rv["distribution"] == "Exponential":
- variablesList[ind]["Par1"].append(rv["lambda"])
- variablesList[ind]["Par2"].append(None)
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}".format(rv["lambda"])
- elif rv["distribution"] == "Gamma":
- variablesList[ind]["Par1"].append(rv["k"])
- variablesList[ind]["Par2"].append(rv["lambda"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(rv["k"], rv["lambda"])
- elif rv["distribution"] == "Chisquare":
- variablesList[ind]["Par1"].append(rv["k"])
- variablesList[ind]["Par2"].append(None)
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}".format(rv["k"])
- elif rv["distribution"] == "Truncated exponential":
- variablesList[ind]["Par1"].append(rv["lambda"])
- variablesList[ind]["Par2"].append(rv["a"])
- variablesList[ind]["Par3"].append(rv["b"])
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}, {}".format(
- rv["lambda"], rv["a"], rv["b"]
+ elif rv['distribution'] == 'Exponential':
+ variablesList[ind]['Par1'].append(rv['lambda'])
+ variablesList[ind]['Par2'].append(None)
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}'.format(rv['lambda']) # noqa: N806
+ elif rv['distribution'] == 'Gamma':
+ variablesList[ind]['Par1'].append(rv['k'])
+ variablesList[ind]['Par2'].append(rv['lambda'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}'.format(rv['k'], rv['lambda']) # noqa: N806
+ elif rv['distribution'] == 'Chisquare':
+ variablesList[ind]['Par1'].append(rv['k'])
+ variablesList[ind]['Par2'].append(None)
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}'.format(rv['k']) # noqa: N806
+ elif rv['distribution'] == 'Truncated exponential':
+ variablesList[ind]['Par1'].append(rv['lambda'])
+ variablesList[ind]['Par2'].append(rv['a'])
+ variablesList[ind]['Par3'].append(rv['b'])
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'params: {}, {}, {}'.format( # noqa: N806
+ rv['lambda'], rv['a'], rv['b']
)
- elif rv["distribution"] == "Discrete":
- if "multimodel" in rv["name"].lower():
+ elif rv['distribution'] == 'Discrete':
+ if 'multimodel' in rv['name'].lower():
try:
- index = modelRVNamesList.index(rv["name"])
- variablesList[ind]["Par1"].append(cartesianProductOfModelIndices[ind][index])
- variablesList[ind]["Par2"].append(None)
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "value: {}".format(
+ index = modelRVNamesList.index(rv['name'])
+ variablesList[ind]['Par1'].append(
cartesianProductOfModelIndices[ind][index]
)
+ variablesList[ind]['Par2'].append(None)
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = ( # noqa: N806
+ f'value: {cartesianProductOfModelIndices[ind][index]}'
+ )
except ValueError:
- logFile.write(f"{rv['name']} not found in list of model RV names")
-
+ logFile.write(
+ f"{rv['name']} not found in list of model RV names"
+ )
+
else:
- variablesList[ind]["Par1"].append(rv["Values"])
- variablesList[ind]["Par2"].append(rv["Weights"])
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "values: {}, weights: {}".format(
- rv["Values"], rv["Weights"]
+ variablesList[ind]['Par1'].append(rv['Values'])
+ variablesList[ind]['Par2'].append(rv['Weights'])
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = 'values: {}, weights: {}'.format( # noqa: N806
+ rv['Values'], rv['Weights']
)
logFile.write(
- "\n\t\t\t\t\t\t\tRV number: {}, name: {}, dist: {}, {}".format(
- i, rv["name"], rv["distribution"], paramString
+ '\n\t\t\t\t\t\t\tRV number: {}, name: {}, dist: {}, {}'.format(
+ i, rv['name'], rv['distribution'], paramString
)
)
# if runMultiModel:
@@ -341,8 +337,8 @@ def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
# Adding one prior distribution per EDP for the error covariance multiplier term
logFile.write(
- "\n\t\t\t\t\t\tAdding one prior distribution per EDP for the error covariance multiplier "
- "term"
+ '\n\t\t\t\t\t\tAdding one prior distribution per EDP for the error covariance multiplier '
+ 'term'
)
# logFile.write("\n\t\t\tThe prior on the error covariance multipliers is an inverse gamma distribution \n"
# "\t\twith parameters a and b set to 100. This corresponds to a variable whose mean \n"
@@ -353,22 +349,22 @@ def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
a = 3
b = 2
for i, edp in enumerate(edpInputs):
- name = edp["name"] + ".CovMultiplier"
- variablesList[ind]["names"].append(name)
- variablesList[ind]["distributions"].append("InvGamma")
- variablesList[ind]["Par1"].append(a)
- variablesList[ind]["Par2"].append(b)
- variablesList[ind]["Par3"].append(None)
- variablesList[ind]["Par4"].append(None)
- paramString = "params: {}, {}".format(a, b)
+ name = edp['name'] + '.CovMultiplier'
+ variablesList[ind]['names'].append(name)
+ variablesList[ind]['distributions'].append('InvGamma')
+ variablesList[ind]['Par1'].append(a)
+ variablesList[ind]['Par2'].append(b)
+ variablesList[ind]['Par3'].append(None)
+ variablesList[ind]['Par4'].append(None)
+ paramString = f'params: {a}, {b}' # noqa: N806
logFile.write(
- "\n\t\t\t\t\t\t\tEDP number: {}, name: {}, dist: {}, {}".format(
- i, name, "InvGamma", paramString
+ '\n\t\t\t\t\t\t\tEDP number: {}, name: {}, dist: {}, {}'.format(
+ i, name, 'InvGamma', paramString
)
)
-
- logFile.write("\n\n\tCompleted parsing the inputs")
- logFile.write("\n\n==========================")
+
+ logFile.write('\n\n\tCompleted parsing the inputs')
+ logFile.write('\n\n==========================')
logFile.flush()
os.fsync(logFile.fileno())
return (
@@ -381,5 +377,5 @@ def parseDataFunction(dakotaJsonFile, logFile, tmpSimCenterDir, mainscriptDir):
edpNamesList,
edpLengthsList,
modelsDict,
- nModels
+ nModels,
)
diff --git a/modules/performUQ/UCSD_UQ/pdfs.py b/modules/performUQ/UCSD_UQ/pdfs.py
index d49ae6004..ceda795ba 100644
--- a/modules/performUQ/UCSD_UQ/pdfs.py
+++ b/modules/performUQ/UCSD_UQ/pdfs.py
@@ -1,54 +1,53 @@
-"""
-@author: Mukesh, Maitreya, Conte, Aakash
-
-"""
+"""@author: Mukesh, Maitreya, Conte, Aakash""" # noqa: CPY001, D400, INP001
import numpy as np
-import scipy.stats as stats
+from scipy import stats
-class Dist:
+class Dist: # noqa: D101
def __init__(self, dist_name, params=None, moments=None, data=None):
self.dist_name = dist_name
self.params = params
self.moments = moments
self.data = data
if (params is None) and (moments is None) and (data is None):
- raise RuntimeError(
- "Atleast one of parameters, moments, or data must be specified when creating a random variable"
+ raise RuntimeError( # noqa: TRY003
+ 'Atleast one of parameters, moments, or data must be specified when creating a random variable' # noqa: EM101
)
-class Uniform:
+class Uniform: # noqa: D101
# Method with in this uniform class
def __init__(
- self, lower, upper
+ self,
+ lower,
+ upper,
): # method receives instance as first argument automatically
# the below are the instance variables
self.lower = lower
self.upper = upper
# Method to generate random numbers
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return (self.upper - self.lower) * np.random.rand(N) + self.lower
# Method to compute log of the pdf at x
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
if (x - self.upper) * (x - self.lower) <= 0:
lp = np.log(1 / (self.upper - self.lower))
else:
- lp = -np.Inf
+ lp = -np.inf
return lp
-class Halfnormal:
+class Halfnormal: # noqa: D101
def __init__(self, sig):
self.sig = sig
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.sig * np.abs(np.random.randn(N))
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
if x >= 0:
lp = (
-np.log(self.sig)
@@ -56,35 +55,35 @@ def log_pdf_eval(self, x):
- ((x * x) / (2 * self.sig * self.sig))
)
else:
- lp = -np.Inf
+ lp = -np.inf
return lp
-class Normal:
+class Normal: # noqa: D101
def __init__(self, mu, sig):
self.mu = mu
self.sig = sig
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.sig * np.random.randn(N) + self.mu
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
lp = (
-0.5 * np.log(2 * np.pi)
- np.log(self.sig)
- 0.5 * (((x - self.mu) / self.sig) ** 2)
)
- return lp
+ return lp # noqa: RET504
-class TrunNormal:
+class TrunNormal: # noqa: D101
def __init__(self, mu, sig, a, b):
self.mu = mu
self.sig = sig
self.a = a
self.b = b
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return stats.truncnorm(
(self.a - self.mu) / self.sig,
(self.b - self.mu) / self.sig,
@@ -92,28 +91,28 @@ def generate_rns(self, N):
scale=self.sig,
).rvs(N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
lp = stats.truncnorm(
(self.a - self.mu) / self.sig,
(self.b - self.mu) / self.sig,
loc=self.mu,
scale=self.sig,
).logpdf(x)
- return lp
+ return lp # noqa: RET504
-class mvNormal:
- def __init__(self, mu, E):
+class mvNormal: # noqa: D101
+ def __init__(self, mu, E): # noqa: N803
self.mu = mu
self.E = E
self.d = len(mu)
self.logdetE = np.log(np.linalg.det(self.E))
self.Einv = np.linalg.inv(E)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return np.random.multivariate_normal(self.mu, self.E, N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
xc = x - self.mu
return (
-(0.5 * self.d * np.log(2 * np.pi))
@@ -122,35 +121,37 @@ def log_pdf_eval(self, x):
)
-class InvGamma:
+class InvGamma: # noqa: D101
def __init__(self, a, b):
self.a = a
self.b = b
self.dist = stats.invgamma(self.a, scale=self.b)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class BetaDist:
+class BetaDist: # noqa: D101
def __init__(self, alpha, beta, lowerbound, upperbound):
self.alpha = alpha
self.beta = beta
self.lowerbound = lowerbound
self.upperbound = upperbound
- self.dist = stats.beta(self.alpha, self.beta, self.lowerbound, self.upperbound)
+ self.dist = stats.beta(
+ self.alpha, self.beta, self.lowerbound, self.upperbound
+ )
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class LogNormDist:
+class LogNormDist: # noqa: D101
def __init__(self, mu, sigma):
# self.sigma = np.sqrt(np.log(zeta**2/lamda**2 + 1))
# self.mu = np.log(lamda) - 1/2*self.sigma**2
@@ -159,53 +160,53 @@ def __init__(self, mu, sigma):
self.scale = np.exp(mu)
self.dist = stats.lognorm(s=self.s, loc=self.loc, scale=self.scale)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class GumbelDist:
+class GumbelDist: # noqa: D101
def __init__(self, alpha, beta):
self.alpha = alpha
self.beta = beta
self.dist = stats.gumbel_r(loc=self.beta, scale=(1 / self.alpha))
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class WeibullDist:
+class WeibullDist: # noqa: D101
def __init__(self, shape, scale):
self.shape = shape
self.scale = scale
self.dist = stats.weibull_min(c=self.shape, scale=self.scale)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class ExponentialDist:
+class ExponentialDist: # noqa: D101
def __init__(self, lamda):
self.lamda = lamda
self.scale = 1 / self.lamda
self.dist = stats.expon(scale=self.scale)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class TruncatedExponentialDist:
+class TruncatedExponentialDist: # noqa: D101
def __init__(self, lamda, lower, upper):
self.lower = lower
self.upper = upper
@@ -215,14 +216,14 @@ def __init__(self, lamda, lower, upper):
self.b = (self.upper - self.lower) / self.scale
self.dist = stats.truncexpon(b=self.b, loc=self.loc, scale=self.scale)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class GammaDist:
+class GammaDist: # noqa: D101
def __init__(self, k, lamda):
self.k = k
self.lamda = lamda
@@ -231,26 +232,26 @@ def __init__(self, k, lamda):
self.scale = 1 / self.beta
self.dist = stats.gamma(a=self.alpha, scale=self.scale)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class ChiSquareDist:
+class ChiSquareDist: # noqa: D101
def __init__(self, k):
self.k = k
self.dist = stats.chi2(k=self.k)
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.dist.rvs(size=N)
- def log_pdf_eval(self, x):
+ def log_pdf_eval(self, x): # noqa: D102
return self.dist.logpdf(x)
-class DiscreteDist:
+class DiscreteDist: # noqa: D101
def __init__(self, values, weights):
self.values = values
self.weights = weights
@@ -258,10 +259,10 @@ def __init__(self, values, weights):
self.log_probabilities = np.log(self.weights) - np.log(np.sum(self.weights))
self.rng = np.random.default_rng()
- def generate_rns(self, N):
+ def generate_rns(self, N): # noqa: N803, D102
return self.rng.choice(self.values, N, p=self.probabilities)
- def U2X(self, u):
+ def U2X(self, u): # noqa: N802, D102
cumsum_prob = np.cumsum(self.probabilities)
cumsum_prob = np.insert(cumsum_prob, 0, 0)
cumsum_prob = cumsum_prob[:-1]
@@ -271,19 +272,20 @@ def U2X(self, u):
x[i] = self.values[np.where(cumsum_prob <= cdf_val)[0][-1]]
return x
- def log_pdf_eval(self, u):
+ def log_pdf_eval(self, u): # noqa: D102
x = self.U2X(u)
lp = np.zeros_like(x)
for i, x_comp in enumerate(x):
lp[i] = self.log_probabilities[np.where(self.values == x_comp)]
return lp
-class ConstantInteger:
+
+class ConstantInteger: # noqa: D101
def __init__(self, value) -> None:
self.value = value
-
- def generate_rns(self, N):
+
+ def generate_rns(self, N): # noqa: N803, D102
return np.array([self.value for _ in range(N)], dtype=int)
-
- def log_pdf_eval(self, x):
+
+ def log_pdf_eval(self, x): # noqa: ARG002, D102, PLR6301
return 0.0
diff --git a/modules/performUQ/UCSD_UQ/preprocess_hierarchical_bayesian.py b/modules/performUQ/UCSD_UQ/preprocess_hierarchical_bayesian.py
index eeb7a162a..64c4b9ae2 100644
--- a/modules/performUQ/UCSD_UQ/preprocess_hierarchical_bayesian.py
+++ b/modules/performUQ/UCSD_UQ/preprocess_hierarchical_bayesian.py
@@ -1,29 +1,29 @@
-import argparse
+import argparse # noqa: CPY001, D100, INP001
import json
import shutil
import sys
from pathlib import Path
-from typing import Literal, Union
+from typing import Literal
import numpy as np
-path_to_common_uq = Path(__file__).parent.parent / "common"
+path_to_common_uq = Path(__file__).parent.parent / 'common'
sys.path.append(str(path_to_common_uq))
-import uq_utilities
+import uq_utilities # noqa: E402
InputsType = tuple[
Path,
Path,
- Literal["runningLocal", "runningRemote"],
+ Literal['runningLocal', 'runningRemote'],
Path,
dict,
]
-class CommandLineArguments:
+class CommandLineArguments: # noqa: D101
working_directory_path: Path
template_directory_path: Path
- run_type: Union[Literal["runningLocal"], Literal["runningRemote"]]
+ run_type: Literal['runningLocal', 'runningRemote']
driver_file: Path
input_file: Path
@@ -36,7 +36,7 @@ def _handle_arguments(
run_type = command_line_arguments.run_type
driver_file = command_line_arguments.driver_file
input_file = command_line_arguments.input_file
- with open(input_file, "r") as f:
+ with open(input_file) as f: # noqa: PLW1514, PTH123
inputs = json.load(f)
return (
working_directory_path,
@@ -50,69 +50,68 @@ def _handle_arguments(
def _create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description=(
- "Preprocess the inputs to the hierarchical Bayesian updating"
- " algorithm"
+ 'Preprocess the inputs to the hierarchical Bayesian updating'
+ ' algorithm'
)
)
parser.add_argument(
- "working_directory_path",
+ 'working_directory_path',
help=(
- "path to the working directory where the analysis will be"
- " conducted"
+ 'path to the working directory where the analysis will be' ' conducted'
),
type=Path,
)
parser.add_argument(
- "template_directory_path",
+ 'template_directory_path',
help=(
- "path to the template directory containing the model, data, and"
- " any other files required for the analysis"
+ 'path to the template directory containing the model, data, and'
+ ' any other files required for the analysis'
),
type=Path,
)
parser.add_argument(
- "run_type",
+ 'run_type',
help=(
- "string indicating whether the analysis is being run locally or"
+ 'string indicating whether the analysis is being run locally or'
" remotely on DesignSafe's computing infrastructure"
),
type=str,
)
parser.add_argument(
- "driver_file",
+ 'driver_file',
help=(
- "path to the driver file containing the commands to perform one"
- " evaluation of the model"
+ 'path to the driver file containing the commands to perform one'
+ ' evaluation of the model'
),
type=Path,
)
parser.add_argument(
- "input_file",
+ 'input_file',
help=(
- "path to the JSON file containing the user provided inputs to run"
- " the hierarchical Bayesian analysis"
+ 'path to the JSON file containing the user provided inputs to run'
+ ' the hierarchical Bayesian analysis'
),
type=Path,
)
return parser
-def _print_start_message(demarcation_string: str = "=", start_space: str = ""):
+def _print_start_message(demarcation_string: str = '=', start_space: str = ''):
msg = f"'{Path(__file__).name}' started running"
- print()
- print(start_space + demarcation_string * len(msg))
- print(start_space + msg)
- print()
+ print() # noqa: T201
+ print(start_space + demarcation_string * len(msg)) # noqa: T201
+ print(start_space + msg) # noqa: T201
+ print() # noqa: T201
-def _print_end_message(demarcation_string: str = "=", start_space: str = ""):
+def _print_end_message(demarcation_string: str = '=', start_space: str = ''):
msg = f"'{Path(__file__).name}' finished running"
- print()
- print(start_space + msg)
- print(start_space + demarcation_string * len(msg))
+ print() # noqa: T201
+ print(start_space + msg) # noqa: T201
+ print(start_space + demarcation_string * len(msg)) # noqa: T201
-def main(arguments: InputsType):
+def main(arguments: InputsType): # noqa: D103
(
working_directory_path,
template_directory_path,
@@ -121,12 +120,12 @@ def main(arguments: InputsType):
inputs,
) = arguments
# applications_inputs = inputs["Applications"]
- edp_inputs = inputs["EDP"]
+ edp_inputs = inputs['EDP']
# fem_inputs = inputs["FEM"]
- uq_inputs = inputs["UQ"]
- correlation_matrix_inputs = inputs["correlationMatrix"]
+ uq_inputs = inputs['UQ']
+ correlation_matrix_inputs = inputs['correlationMatrix']
# local_applications_directory = inputs["localAppDir"]
- rv_inputs = inputs["randomVariables"]
+ rv_inputs = inputs['randomVariables']
# remote_applications_directory = inputs["remoteAppDir"]
# run_type = inputs["runType"]
# working_directory = inputs["workingDir"]
@@ -145,8 +144,8 @@ def main(arguments: InputsType):
num_rv = len(rv_inputs)
num_edp = len(edp_inputs)
- list_of_dataset_subdirs = uq_inputs["List Of Dataset Subdirectories"]
- calibration_data_file_name = uq_inputs["Calibration Data File Name"]
+ list_of_dataset_subdirs = uq_inputs['List Of Dataset Subdirectories']
+ calibration_data_file_name = uq_inputs['Calibration Data File Name']
list_of_models = []
list_of_model_evaluation_functions = []
@@ -154,7 +153,7 @@ def main(arguments: InputsType):
list_of_dataset_lengths = []
for sample_number, dir_name_string in enumerate(list_of_dataset_subdirs):
- dir_name_string = list_of_dataset_subdirs[sample_number]
+ dir_name_string = list_of_dataset_subdirs[sample_number] # noqa: PLW2901
dir_name = Path(dir_name_string).stem
source_dir_name = template_directory_path / dir_name
destination_dir_name = working_directory_path / dir_name
@@ -168,7 +167,7 @@ def main(arguments: InputsType):
destination_dir_name / calibration_data_file_name, dtype=float
)
list_of_datasets.append(data)
- list_of_dataset_lengths.append(edp_data["length"])
+ list_of_dataset_lengths.append(edp_data['length'])
model = uq_utilities.get_default_model(
list_of_rv_data=rv_inputs,
@@ -176,7 +175,7 @@ def main(arguments: InputsType):
list_of_dir_names_to_copy_files_from=list_of_dir_names_to_copy_files_from,
run_directory=working_directory_path,
driver_filename=str(driver_file),
- workdir_prefix=f"{dir_name}.workdir",
+ workdir_prefix=f'{dir_name}.workdir',
)
list_of_models.append(model)
@@ -188,7 +187,7 @@ def main(arguments: InputsType):
# parallel_evaluation_function = parallel_pool.run
function_to_evaluate = uq_utilities.model_evaluation_function
- restart_file_name = Path(uq_inputs["Restart File Name"]).name
+ restart_file_name = Path(uq_inputs['Restart File Name']).name
restart_file_path = template_directory_path / restart_file_name
if not restart_file_path.is_file():
restart_file_path = None
@@ -211,15 +210,15 @@ def _parse_arguments(args) -> InputsType:
command_line_arguments = CommandLineArguments()
parser.parse_args(args=args, namespace=command_line_arguments)
arguments = _handle_arguments(command_line_arguments)
- return arguments
+ return arguments # noqa: RET504
-def preprocess_arguments(args):
+def preprocess_arguments(args): # noqa: D103
arguments = _parse_arguments(args)
return main(arguments=arguments)
-if __name__ == "__main__":
+if __name__ == '__main__':
_print_start_message()
(
parallel_evaluation_function,
diff --git a/modules/performUQ/UCSD_UQ/processInputs.py b/modules/performUQ/UCSD_UQ/processInputs.py
index 126898e27..1054001d7 100644
--- a/modules/performUQ/UCSD_UQ/processInputs.py
+++ b/modules/performUQ/UCSD_UQ/processInputs.py
@@ -1,69 +1,71 @@
-import sys
-import os
+import argparse # noqa: CPY001, D100, INP001
import json
-import stat
-import subprocess
+import os
import platform
-import argparse
+import stat
+import subprocess # noqa: S404
if __name__ == '__main__':
-
parser = argparse.ArgumentParser()
parser.add_argument('--workflowInput')
- parser.add_argument('--workflowOutput')
+ parser.add_argument('--workflowOutput')
parser.add_argument('--driverFile')
parser.add_argument('--runType')
- args,unknowns = parser.parse_known_args()
+ args, unknowns = parser.parse_known_args()
+
+ inputFile = args.workflowInput # noqa: N816
+ runType = args.runType # noqa: N816
+ workflowDriver = args.driverFile # noqa: N816
+ outputFile = args.workflowOutput # noqa: N816
- inputFile = args.workflowInput
- runType = args.runType
- workflowDriver = args.driverFile
- outputFile = args.workflowOutput
+ cwd = os.getcwd() # noqa: PTH109
+ workdir_main = str(Path(cwd).parents[0]) # noqa: F821
- cwd = os.getcwd()
- workdir_main = str(Path(cwd).parents[0])
-
- #mainScriptPath = inputArgs[0]
- #tmpSimCenterDir = inputArgs[1]
- #templateDir = inputArgs[2]
- #runType = inputArgs[3] # either "runningLocal" or "runningRemote"
+ # mainScriptPath = inputArgs[0]
+ # tmpSimCenterDir = inputArgs[1]
+ # templateDir = inputArgs[2]
+ # runType = inputArgs[3] # either "runningLocal" or "runningRemote"
+
+ mainScriptPath = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N816
+ templateDir = cwd # noqa: N816
+ tmpSimCenterDir = str(Path(cwd).parents[0]) # noqa: N816, F821
- mainScriptPath = os.path.dirname(os.path.realpath(__file__))
- templateDir = cwd
- tmpSimCenterDir = str(Path(cwd).parents[0])
-
# Change permission of workflow driver
- if platform.system() != "Windows":
- workflowDriverFile = os.path.join(templateDir, workflowDriver)
- if runType in ['runningLocal']:
- os.chmod(workflowDriverFile, stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
- st = os.stat(workflowDriverFile)
- os.chmod(workflowDriverFile, st.st_mode | stat.S_IEXEC)
- pythonCommand = "python3"
+ if platform.system() != 'Windows':
+ workflowDriverFile = os.path.join(templateDir, workflowDriver) # noqa: PTH118, N816
+ if runType == 'runningLocal':
+ os.chmod( # noqa: PTH101
+ workflowDriverFile,
+ stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH,
+ )
+ st = os.stat(workflowDriverFile) # noqa: PTH116
+ os.chmod(workflowDriverFile, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+ pythonCommand = 'python3' # noqa: N816
else:
- pythonCommand = "python"
- workflowDriver = workflowDriver + ".bat"
+ pythonCommand = 'python' # noqa: N816
+ workflowDriver = workflowDriver + '.bat' # noqa: N816, PLR6104
- if runType in ["runningLocal"]:
+ if runType == 'runningLocal':
# Get path to python from dakota.json file
- dakotaJsonFile = os.path.join(os.path.abspath(templateDir), inputFile)
- with open(dakotaJsonFile, 'r') as f:
- jsonInputs = json.load(f)
+ dakotaJsonFile = os.path.join(os.path.abspath(templateDir), inputFile) # noqa: PTH100, PTH118, N816
+ with open(dakotaJsonFile) as f: # noqa: PLW1514, PTH123
+ jsonInputs = json.load(f) # noqa: N816
- if "python" in jsonInputs.keys():
- pythonCommand = jsonInputs["python"]
+ if 'python' in jsonInputs.keys(): # noqa: SIM118
+ pythonCommand = jsonInputs['python'] # noqa: N816
# Get the path to the mainscript.py of TMCMC
# mainScriptDir = os.path.split(mainScriptPath)[0]
- mainScript = os.path.join(mainScriptPath, "mainscript.py")
- command = "{} {} {} {} {} {} {}".format(pythonCommand, mainScript, tmpSimCenterDir, templateDir, runType, workflowDriver, inputFile)
+ mainScript = os.path.join(mainScriptPath, 'mainscript.py') # noqa: PTH118, N816
+ command = f'{pythonCommand} {mainScript} {tmpSimCenterDir} {templateDir} {runType} {workflowDriver} {inputFile}'
try:
- result = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
- returnCode = 0
+ result = subprocess.check_output( # noqa: S602
+ command, stderr=subprocess.STDOUT, shell=True
+ )
+ returnCode = 0 # noqa: N816
except subprocess.CalledProcessError as e:
result = e.output
- returnCode = e.returncode
-
+ returnCode = e.returncode # noqa: N816
diff --git a/modules/performUQ/UCSD_UQ/runFEM.py b/modules/performUQ/UCSD_UQ/runFEM.py
index 8c1fc1be8..d2c92799e 100644
--- a/modules/performUQ/UCSD_UQ/runFEM.py
+++ b/modules/performUQ/UCSD_UQ/runFEM.py
@@ -1,92 +1,104 @@
-"""
-authors: Dr. Frank McKenna*, Aakash Bangalore Satish*, Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati,
+"""authors: Dr. Frank McKenna*, Aakash Bangalore Satish*, Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati,
and Prof. J.P. Conte
affiliation: SimCenter*; University of California, San Diego
-"""
+""" # noqa: CPY001, D205, D400, INP001
import os
-import subprocess
import shutil
+import subprocess # noqa: S404
+
import numpy as np
-def copytree(src, dst, symlinks=False, ignore=None):
- if not os.path.exists(dst):
- os.makedirs(dst)
+def copytree(src, dst, symlinks=False, ignore=None): # noqa: FBT002, D103
+ if not os.path.exists(dst): # noqa: PTH110
+ os.makedirs(dst) # noqa: PTH103
for item in os.listdir(src):
- s = os.path.join(src, item)
- d = os.path.join(dst, item)
- if os.path.isdir(s):
+ s = os.path.join(src, item) # noqa: PTH118
+ d = os.path.join(dst, item) # noqa: PTH118
+ if os.path.isdir(s): # noqa: PTH112
copytree(s, d, symlinks, ignore)
else:
try:
- if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1:
+ if (
+ not os.path.exists(d) # noqa: PTH110
+ or os.stat(s).st_mtime - os.stat(d).st_mtime > 1 # noqa: PTH116
+ ):
shutil.copy2(s, d)
- except Exception as ex:
- msg = f"Could not copy {s}. The following error occurred: \n{ex}"
- return msg
- return "0"
-
-
-def runFEM(particleNumber, parameterSampleValues, variables, workdirMain, log_likelihood_function, calibrationData, numExperiments,
- covarianceMatrixList, edpNamesList, edpLengthsList, scaleFactors, shiftFactors, workflowDriver):
- """
- this function runs FE model (model.tcl) for each parameter value (par)
+ except Exception as ex: # noqa: BLE001
+ msg = f'Could not copy {s}. The following error occurred: \n{ex}'
+ return msg # noqa: RET504
+ return '0'
+
+
+def runFEM( # noqa: N802
+ particleNumber, # noqa: N803
+ parameterSampleValues, # noqa: N803
+ variables,
+ workdirMain, # noqa: N803
+ log_likelihood_function,
+ calibrationData, # noqa: ARG001, N803
+ numExperiments, # noqa: ARG001, N803
+ covarianceMatrixList, # noqa: ARG001, N803
+ edpNamesList, # noqa: N803
+ edpLengthsList, # noqa: N803
+ scaleFactors, # noqa: ARG001, N803
+ shiftFactors, # noqa: ARG001, N803
+ workflowDriver, # noqa: N803
+):
+ """This function runs FE model (model.tcl) for each parameter value (par)
model.tcl should take parameter input
model.tcl should output 'output$PN.txt' -> column vector of size 'Ny'
- """
-
- workdirName = ("workdir." + str(particleNumber + 1))
- analysisPath = os.path.join(workdirMain, workdirName)
+ """ # noqa: D205, D400, D401, D404
+ workdirName = 'workdir.' + str(particleNumber + 1) # noqa: N806
+ analysisPath = os.path.join(workdirMain, workdirName) # noqa: PTH118, N806
- if os.path.isdir(analysisPath):
- os.chmod(os.path.join(analysisPath, workflowDriver), 0o777)
+ if os.path.isdir(analysisPath): # noqa: PTH112
+ os.chmod(os.path.join(analysisPath, workflowDriver), 0o777) # noqa: S103, PTH101, PTH118
shutil.rmtree(analysisPath)
-
- os.mkdir(analysisPath)
+
+ os.mkdir(analysisPath) # noqa: PTH102
# copy templatefiles
- templateDir = os.path.join(workdirMain, "templatedir")
+ templateDir = os.path.join(workdirMain, 'templatedir') # noqa: PTH118, N806
copytree(templateDir, analysisPath)
# change to analysis directory
os.chdir(analysisPath)
# write input file and covariance multiplier values list
- covarianceMultiplierList = []
- parameterNames = variables["names"]
- with open("params.in", "w") as f:
- f.write('{}\n'.format(len(parameterSampleValues) - len(edpNamesList)))
+ covarianceMultiplierList = [] # noqa: N806
+ parameterNames = variables['names'] # noqa: N806
+ with open('params.in', 'w') as f: # noqa: PLW1514, PTH123
+ f.write(f'{len(parameterSampleValues) - len(edpNamesList)}\n')
for i in range(len(parameterSampleValues)):
name = str(parameterNames[i])
value = str(parameterSampleValues[i])
if name.split('.')[-1] != 'CovMultiplier':
- f.write('{} {}\n'.format(name, value))
+ f.write(f'{name} {value}\n')
else:
covarianceMultiplierList.append(parameterSampleValues[i])
- #subprocess.run(workflowDriver, stderr=subprocess.PIPE, shell=True)
-
- returnCode = subprocess.call(
- os.path.join(analysisPath, workflowDriver),
- shell=True,
- stdout=subprocess.DEVNULL,
- stderr=subprocess.STDOUT,
- ) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
-
+ # subprocess.run(workflowDriver, stderr=subprocess.PIPE, shell=True)
+ returnCode = subprocess.call( # noqa: S602, N806, F841
+ os.path.join(analysisPath, workflowDriver), # noqa: PTH118
+ shell=True,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.STDOUT,
+ ) # subprocess.check_call(workflow_run_command, shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
# Read in the model prediction
- if os.path.exists('results.out'):
- with open('results.out', 'r') as f:
+ if os.path.exists('results.out'): # noqa: PTH110
+ with open('results.out') as f: # noqa: PLW1514, PTH123
prediction = np.atleast_2d(np.genfromtxt(f)).reshape((1, -1))
preds = prediction.copy()
- os.chdir("../")
+ os.chdir('../')
ll = log_likelihood_function(prediction, covarianceMultiplierList)
else:
- os.chdir("../")
- preds = np.atleast_2d([-np.inf]*sum(edpLengthsList)).reshape((1, -1))
+ os.chdir('../')
+ preds = np.atleast_2d([-np.inf] * sum(edpLengthsList)).reshape((1, -1))
ll = -np.inf
- return (ll, preds)
\ No newline at end of file
+ return (ll, preds)
diff --git a/modules/performUQ/UCSD_UQ/runTMCMC.py b/modules/performUQ/UCSD_UQ/runTMCMC.py
index f07bd561e..c689e5fc4 100644
--- a/modules/performUQ/UCSD_UQ/runTMCMC.py
+++ b/modules/performUQ/UCSD_UQ/runTMCMC.py
@@ -1,106 +1,152 @@
-"""
-authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, and Prof. J.P. Conte
+"""authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, and Prof. J.P. Conte
affiliation: University of California, San Diego
modified: Aakash Bangalore Satish, NHERI SimCenter, UC Berkeley
-"""
+""" # noqa: CPY001, D205, D400, INP001
-import numpy as np
-import tmcmcFunctions
+import csv
import multiprocessing as mp
+import os
from multiprocessing import Pool
-from runFEM import runFEM
+
+import numpy as np
+import tmcmcFunctions
from numpy.random import SeedSequence, default_rng
-import os
-import csv
+from runFEM import runFEM
-def write_stage_start_info_to_logfile(logfile, stage_number, beta, effective_sample_size,
- scale_factor_for_proposal_covariance, log_evidence, number_of_samples):
+def write_stage_start_info_to_logfile( # noqa: D103
+ logfile,
+ stage_number,
+ beta,
+ effective_sample_size,
+ scale_factor_for_proposal_covariance,
+ log_evidence,
+ number_of_samples,
+):
logfile.write('\n\n\t\t==========================')
- logfile.write("\n\t\tStage number: {}".format(stage_number))
+ logfile.write(f'\n\t\tStage number: {stage_number}')
if stage_number == 0:
- logfile.write("\n\t\tSampling from prior")
- logfile.write("\n\t\tbeta = 0")
+ logfile.write('\n\t\tSampling from prior')
+ logfile.write('\n\t\tbeta = 0')
else:
- logfile.write("\n\t\tbeta = %9.8g" % beta)
- logfile.write("\n\t\tESS = %d" % effective_sample_size)
- logfile.write("\n\t\tscalem = %.2g" % scale_factor_for_proposal_covariance)
- logfile.write(f"\n\t\tlog-evidence = {log_evidence:<9.8g}")
- logfile.write("\n\n\t\tNumber of model evaluations in this stage: {}".format(number_of_samples))
+ logfile.write('\n\t\tbeta = %9.8g' % beta) # noqa: UP031
+ logfile.write('\n\t\tESS = %d' % effective_sample_size)
+ logfile.write('\n\t\tscalem = %.2g' % scale_factor_for_proposal_covariance) # noqa: UP031
+ logfile.write(f'\n\t\tlog-evidence = {log_evidence:<9.8g}')
+ logfile.write(
+ f'\n\n\t\tNumber of model evaluations in this stage: {number_of_samples}'
+ )
logfile.flush()
os.fsync(logfile.fileno())
-def write_eval_data_to_logfile(logfile, parallelize_MCMC, run_type, proc_count=1, MPI_size=1, stage_num=0):
+
+def write_eval_data_to_logfile( # noqa: D103
+ logfile,
+ parallelize_MCMC, # noqa: N803
+ run_type,
+ proc_count=1,
+ MPI_size=1, # noqa: N803
+ stage_num=0,
+):
if stage_num == 0:
- logfile.write("\n\n\t\tRun type: {}".format(run_type))
+ logfile.write(f'\n\n\t\tRun type: {run_type}')
if parallelize_MCMC:
- if run_type == "runningLocal":
+ if run_type == 'runningLocal':
if stage_num == 0:
- logfile.write("\n\n\t\tCreated multiprocessing pool for runType: {}".format(run_type))
+ logfile.write(
+ f'\n\n\t\tCreated multiprocessing pool for runType: {run_type}'
+ )
else:
- logfile.write("\n\n\t\tLocal run - MCMC steps")
- logfile.write("\n\t\t\tNumber of processors being used: {}".format(proc_count))
+ logfile.write('\n\n\t\tLocal run - MCMC steps')
+ logfile.write(f'\n\t\t\tNumber of processors being used: {proc_count}')
else:
if stage_num == 0:
- logfile.write("\n\n\t\tCreated mpi4py executor pool for runType: {}".format(run_type))
+ logfile.write(
+ f'\n\n\t\tCreated mpi4py executor pool for runType: {run_type}'
+ )
else:
- logfile.write("\n\n\t\tRemote run - MCMC steps")
- logfile.write("\n\t\t\tmax_workers: {}".format(MPI_size))
+ logfile.write('\n\n\t\tRemote run - MCMC steps')
+ logfile.write(f'\n\t\t\tmax_workers: {MPI_size}')
else:
if stage_num == 0:
- logfile.write("\n\n\t\tNot parallelized")
+ logfile.write('\n\n\t\tNot parallelized')
else:
- logfile.write("\n\n\t\tLocal run - MCMC steps, not parallelized")
- logfile.write("\n\t\t\tNumber of processors being used: {}".format(1))
-
-
-def create_headings(logfile, model_number, model_parameters, edp_names_list, edp_lengths_list, writeOutputs):
+ logfile.write('\n\n\t\tLocal run - MCMC steps, not parallelized')
+ logfile.write(f'\n\t\t\tNumber of processors being used: {1}')
+
+
+def create_headings( # noqa: D103
+ logfile,
+ model_number,
+ model_parameters,
+ edp_names_list,
+ edp_lengths_list,
+ writeOutputs, # noqa: N803
+):
# Create the headings, which will be the first line of the file
headings = 'eval_id\tinterface\t'
if model_number == 0:
- logfile.write("\n\t\t\tCreating headings")
+ logfile.write('\n\t\t\tCreating headings')
for v in model_parameters['names']:
- headings += '{}\t'.format(v)
+ headings += f'{v}\t'
if writeOutputs: # create headings for outputs
for i, edp in enumerate(edp_names_list):
if edp_lengths_list[i] == 1:
- headings += '{}\t'.format(edp)
+ headings += f'{edp}\t'
else:
for comp in range(edp_lengths_list[i]):
- headings += '{}_{}\t'.format(edp, comp + 1)
+ headings += f'{edp}_{comp + 1}\t'
headings += '\n'
-
- return headings
-
-def get_prediction_from_workdirs(i, working_directory):
- workdir_string = ("workdir." + str(i + 1))
- prediction = np.atleast_2d(np.genfromtxt(os.path.join(working_directory, workdir_string,
- 'results.out'))).reshape((1, -1))
- return prediction
+ return headings
-def write_data_to_tab_files(logfile, working_directory, model_number, model_parameters,
- edp_names_list, edp_lengths_list, number_of_samples, dataToWrite,
- tab_file_name, predictions):
-
- tab_file_full_path = os.path.join(working_directory, tab_file_name)
+def get_prediction_from_workdirs(i, working_directory): # noqa: D103
+ workdir_string = 'workdir.' + str(i + 1)
+ prediction = np.atleast_2d(
+ np.genfromtxt(os.path.join(working_directory, workdir_string, 'results.out')) # noqa: PTH118
+ ).reshape((1, -1))
+ return prediction # noqa: RET504
+
+
+def write_data_to_tab_files( # noqa: D103
+ logfile,
+ working_directory,
+ model_number,
+ model_parameters,
+ edp_names_list,
+ edp_lengths_list,
+ number_of_samples,
+ dataToWrite, # noqa: N803
+ tab_file_name,
+ predictions,
+):
+ tab_file_full_path = os.path.join(working_directory, tab_file_name) # noqa: PTH118
write_outputs = True
- headings = create_headings(logfile, model_number, model_parameters, edp_names_list, edp_lengths_list, write_outputs)
-
- logfile.write("\n\t\t\tWriting to file {}".format(tab_file_full_path))
- with open(tab_file_full_path, "a+") as f:
+ headings = create_headings(
+ logfile,
+ model_number,
+ model_parameters,
+ edp_names_list,
+ edp_lengths_list,
+ write_outputs,
+ )
+
+ logfile.write(f'\n\t\t\tWriting to file {tab_file_full_path}')
+ with open(tab_file_full_path, 'a+') as f: # noqa: PLW1514, PTH123
if model_number == 0:
f.write(headings)
for i in range(number_of_samples):
- row_string = f"{i + 1 + number_of_samples*model_number}\t{model_number+1}\t"
+ row_string = (
+ f'{i + 1 + number_of_samples * model_number}\t{model_number + 1}\t'
+ )
for j in range(len(model_parameters['names'])):
- row_string += f"{dataToWrite[i, j]}\t"
+ row_string += f'{dataToWrite[i, j]}\t'
if write_outputs: # write the output data
prediction = predictions[i, :]
for pred in prediction:
- row_string += f"{pred}\t"
- row_string += "\n"
+ row_string += f'{pred}\t'
+ row_string += '\n'
f.write(row_string)
logfile.write('\n\t\t==========================')
@@ -108,61 +154,125 @@ def write_data_to_tab_files(logfile, working_directory, model_number, model_para
os.fsync(logfile.fileno())
-def write_data_to_csvfile(logfile, total_number_of_models_in_ensemble, stage_number, model_number,
- working_directory, data_to_write):
- logfile.write("\n\n\t\tWriting samples from stage {} to csv file".format(stage_number - 1))
+def write_data_to_csvfile( # noqa: D103
+ logfile,
+ total_number_of_models_in_ensemble,
+ stage_number,
+ model_number,
+ working_directory,
+ data_to_write,
+):
+ logfile.write(
+ f'\n\n\t\tWriting samples from stage {stage_number - 1} to csv file'
+ )
if total_number_of_models_in_ensemble > 1:
- string_to_append = f'resultsStage{stage_number - 1}_Model_{model_number+1}.csv'
+ string_to_append = (
+ f'resultsStage{stage_number - 1}_Model_{model_number + 1}.csv'
+ )
else:
string_to_append = f'resultsStage{stage_number - 1}.csv'
- resultsFilePath = os.path.join(os.path.abspath(working_directory), string_to_append)
+ resultsFilePath = os.path.join( # noqa: PTH118, N806
+ os.path.abspath(working_directory), # noqa: PTH100
+ string_to_append,
+ )
- with open(resultsFilePath, 'w', newline='') as csvfile:
- csvWriter = csv.writer(csvfile)
+ with open(resultsFilePath, 'w', newline='') as csvfile: # noqa: PLW1514, PTH123
+ csvWriter = csv.writer(csvfile) # noqa: N806
csvWriter.writerows(data_to_write)
- logfile.write("\n\t\t\tWrote to file {}".format(resultsFilePath))
+ logfile.write(f'\n\t\t\tWrote to file {resultsFilePath}')
# Finished writing data
-def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, number_of_MCMC_steps, max_number_of_MCMC_steps,
- log_likelihood_function, model_parameters, working_directory, seed,
- calibration_data, number_of_experiments, covariance_matrix_list, edp_names_list, edp_lengths_list, scale_factors,
- shift_factors, run_type, logfile, MPI_size, driver_file, parallelize_MCMC=True,
- model_number=0, total_number_of_models_in_ensemble=1):
- """ Runs TMCMC Algorithm """
-
+def run_TMCMC( # noqa: N802, PLR0913, PLR0917
+ number_of_samples,
+ number_of_chains,
+ all_distributions_list,
+ number_of_MCMC_steps, # noqa: N803
+ max_number_of_MCMC_steps, # noqa: N803
+ log_likelihood_function,
+ model_parameters,
+ working_directory,
+ seed,
+ calibration_data,
+ number_of_experiments,
+ covariance_matrix_list,
+ edp_names_list,
+ edp_lengths_list,
+ scale_factors,
+ shift_factors,
+ run_type,
+ logfile,
+ MPI_size, # noqa: N803
+ driver_file,
+ parallelize_MCMC=True, # noqa: FBT002, N803
+ model_number=0,
+ total_number_of_models_in_ensemble=1,
+):
+ """Runs TMCMC Algorithm""" # noqa: D400, D401
# Initialize (beta, effective sample size)
beta = 0
effective_sample_size = number_of_samples
mytrace = []
# Initialize other TMCMC variables
- number_of_MCMC_steps = number_of_MCMC_steps
- adaptively_calculate_num_MCMC_steps = True
- adaptively_scale_proposal_covariance = True
+ number_of_MCMC_steps = number_of_MCMC_steps # noqa: N806, PLW0127
+ adaptively_calculate_num_MCMC_steps = True # noqa: N806
+ adaptively_scale_proposal_covariance = True
scale_factor_for_proposal_covariance = 1 # cov scale factor
# model_evidence = 1 # model evidence
stage_number = 0 # stage number of TMCMC
log_evidence = 0
- write_stage_start_info_to_logfile(logfile, stage_number, beta, effective_sample_size,
- scale_factor_for_proposal_covariance, log_evidence, number_of_samples)
+ write_stage_start_info_to_logfile(
+ logfile,
+ stage_number,
+ beta,
+ effective_sample_size,
+ scale_factor_for_proposal_covariance,
+ log_evidence,
+ number_of_samples,
+ )
# initial samples
- sample_values = tmcmcFunctions.initial_population(number_of_samples, all_distributions_list)
+ sample_values = tmcmcFunctions.initial_population(
+ number_of_samples, all_distributions_list
+ )
# Evaluate posterior at Sm
- prior_pdf_values = np.array([tmcmcFunctions.log_prior(s, all_distributions_list) for s in sample_values]).squeeze()
+ prior_pdf_values = np.array(
+ [tmcmcFunctions.log_prior(s, all_distributions_list) for s in sample_values]
+ ).squeeze()
unnormalized_posterior_pdf_values = prior_pdf_values # prior = post for beta = 0
- iterables = [(ind, sample_values[ind], model_parameters, working_directory, log_likelihood_function, calibration_data,
- number_of_experiments, covariance_matrix_list, edp_names_list, edp_lengths_list,
- scale_factors, shift_factors, driver_file) for ind in range(number_of_samples)]
+ iterables = [
+ (
+ ind,
+ sample_values[ind],
+ model_parameters,
+ working_directory,
+ log_likelihood_function,
+ calibration_data,
+ number_of_experiments,
+ covariance_matrix_list,
+ edp_names_list,
+ edp_lengths_list,
+ scale_factors,
+ shift_factors,
+ driver_file,
+ )
+ for ind in range(number_of_samples)
+ ]
# Evaluate log-likelihood at current samples Sm
- if run_type == "runningLocal":
+ if run_type == 'runningLocal':
processor_count = mp.cpu_count()
pool = Pool(processes=processor_count)
- write_eval_data_to_logfile(logfile, parallelize_MCMC, run_type, proc_count=processor_count, stage_num=stage_number)
+ write_eval_data_to_logfile(
+ logfile,
+ parallelize_MCMC,
+ run_type,
+ proc_count=processor_count,
+ stage_num=stage_number,
+ )
outputs = pool.starmap(runFEM, iterables)
log_likelihoods_list = []
predictions_list = []
@@ -170,9 +280,16 @@ def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, numbe
log_likelihoods_list.append(output[0])
predictions_list.append(output[1])
else:
- from mpi4py.futures import MPIPoolExecutor
+ from mpi4py.futures import MPIPoolExecutor # noqa: PLC0415
+
executor = MPIPoolExecutor(max_workers=MPI_size)
- write_eval_data_to_logfile(logfile, parallelize_MCMC, run_type, MPI_size=MPI_size, stage_num=stage_number)
+ write_eval_data_to_logfile(
+ logfile,
+ parallelize_MCMC,
+ run_type,
+ MPI_size=MPI_size,
+ stage_num=stage_number,
+ )
outputs = list(executor.starmap(runFEM, iterables))
log_likelihoods_list = []
predictions_list = []
@@ -183,13 +300,26 @@ def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, numbe
prediction_values = np.array(predictions_list).reshape((number_of_samples, -1))
total_number_of_model_evaluations = number_of_samples
- logfile.write("\n\n\t\tTotal number of model evaluations so far: {}".format(total_number_of_model_evaluations))
+ logfile.write(
+ f'\n\n\t\tTotal number of model evaluations so far: {total_number_of_model_evaluations}'
+ )
# Write the results of the first stage to a file named dakotaTabPrior.out for quoFEM to be able to read the results
- logfile.write("\n\n\t\tWriting prior samples to 'dakotaTabPrior.out' for quoFEM to read the results")
- write_data_to_tab_files(logfile, working_directory, model_number, model_parameters,
- edp_names_list, edp_lengths_list, number_of_samples, dataToWrite=sample_values,
- tab_file_name="dakotaTabPrior.out", predictions=prediction_values)
+ logfile.write(
+ "\n\n\t\tWriting prior samples to 'dakotaTabPrior.out' for quoFEM to read the results"
+ )
+ write_data_to_tab_files(
+ logfile,
+ working_directory,
+ model_number,
+ model_parameters,
+ edp_names_list,
+ edp_lengths_list,
+ number_of_samples,
+ dataToWrite=sample_values,
+ tab_file_name='dakotaTabPrior.out',
+ predictions=prediction_values,
+ )
total_log_evidence = 0
@@ -199,10 +329,14 @@ def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, numbe
# plausible weights of Sm corresponding to new beta
# beta, Wm, ESS = tmcmcFunctions.compute_beta(beta, Lm, ESS, threshold=0.95)
# beta, Wm, ESS = tmcmcFunctions.compute_beta(beta, Lm, ESS, threshold=0.5)
- beta, log_evidence, weights, effective_sample_size = tmcmcFunctions.compute_beta_evidence(beta, log_likelihood_values, logfile, threshold=1.0)
+ beta, log_evidence, weights, effective_sample_size = (
+ tmcmcFunctions.compute_beta_evidence(
+ beta, log_likelihood_values, logfile, threshold=1.0
+ )
+ )
# beta, log_evidence, weights, effective_sample_size = tmcmcFunctions.compute_beta_evidence_old(beta, log_likelihood_values, logfile, int(effective_sample_size/2), threshold=1.0)
- total_log_evidence = total_log_evidence + log_evidence
+ total_log_evidence = total_log_evidence + log_evidence # noqa: PLR6104
# seed to reproduce results
ss = SeedSequence(seed)
@@ -212,55 +346,128 @@ def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, numbe
# model_evidence = model_evidence * (sum(weights) / number_of_samples)
# Calculate covariance matrix using Wm_n
- weighted_sample_covariance_matrix = np.cov(sample_values, aweights=weights, rowvar=False)
+ weighted_sample_covariance_matrix = np.cov(
+ sample_values, aweights=weights, rowvar=False
+ )
# logFile.write("\nCovariance matrix: {}".format(Cm))
# Resample ###################################################
# Resampling using plausible weights
# SmcapIDs = np.random.choice(range(N), N, p=Wm / sum(Wm))
rng = default_rng(child_seeds[-1])
- resample_ids = rng.choice(range(number_of_samples), number_of_samples, p=weights)
+ resample_ids = rng.choice(
+ range(number_of_samples), number_of_samples, p=weights
+ )
resampled_values = sample_values[resample_ids]
resampled_log_likelihood_values = log_likelihood_values[resample_ids]
- resampled_unnormalized_posterior_pdf_values = unnormalized_posterior_pdf_values[resample_ids]
- resampled_prediction_values = np.atleast_2d(prediction_values[resample_ids, :])
+ resampled_unnormalized_posterior_pdf_values = (
+ unnormalized_posterior_pdf_values[resample_ids]
+ )
+ resampled_prediction_values = np.atleast_2d(
+ prediction_values[resample_ids, :]
+ )
# save to trace
# stage m: samples, likelihood, weights, next stage ESS, next stage beta, resampled samples
- mytrace.append([sample_values, log_likelihood_values, weights, effective_sample_size, beta, resampled_values])
+ mytrace.append(
+ [
+ sample_values,
+ log_likelihood_values,
+ weights,
+ effective_sample_size,
+ beta,
+ resampled_values,
+ ]
+ )
# Write Data to '.csv' files
data_to_write = np.hstack((sample_values, prediction_values))
- write_data_to_csvfile(logfile, total_number_of_models_in_ensemble, stage_number, model_number,
- working_directory, data_to_write)
+ write_data_to_csvfile(
+ logfile,
+ total_number_of_models_in_ensemble,
+ stage_number,
+ model_number,
+ working_directory,
+ data_to_write,
+ )
# Perturb ###################################################
# perform MCMC starting at each Smcap (total: N) for Nm_steps
- scaled_proposal_covariance_matrix = (scale_factor_for_proposal_covariance ** 2) * weighted_sample_covariance_matrix # Proposal dist covariance matrix
-
- number_of_model_evaluations_in_this_stage = number_of_chains * number_of_MCMC_steps
- write_stage_start_info_to_logfile(logfile, stage_number, beta, effective_sample_size,
- scale_factor_for_proposal_covariance, log_evidence, number_of_model_evaluations_in_this_stage)
+ scaled_proposal_covariance_matrix = (
+ scale_factor_for_proposal_covariance**2
+ ) * weighted_sample_covariance_matrix # Proposal dist covariance matrix
+
+ number_of_model_evaluations_in_this_stage = (
+ number_of_chains * number_of_MCMC_steps
+ )
+ write_stage_start_info_to_logfile(
+ logfile,
+ stage_number,
+ beta,
+ effective_sample_size,
+ scale_factor_for_proposal_covariance,
+ log_evidence,
+ number_of_model_evaluations_in_this_stage,
+ )
number_of_accepted_states_in_this_stage = 0
- iterables = [(sample_num, scaled_proposal_covariance_matrix, number_of_MCMC_steps, resampled_values[sample_num],
- resampled_log_likelihood_values[sample_num], resampled_unnormalized_posterior_pdf_values[sample_num], beta,
- number_of_accepted_states_in_this_stage, all_distributions_list, log_likelihood_function, model_parameters,
- working_directory, default_rng(child_seeds[sample_num]),
- calibration_data, number_of_experiments, covariance_matrix_list,
- edp_names_list, edp_lengths_list, scale_factors,
- shift_factors, driver_file, resampled_prediction_values[sample_num, :].reshape((1, -1)))
- for sample_num in range(number_of_samples)]
-
- if run_type == "runningLocal":
- write_eval_data_to_logfile(logfile, parallelize_MCMC, run_type, proc_count=processor_count, stage_num=stage_number)
+ iterables = [
+ (
+ sample_num,
+ scaled_proposal_covariance_matrix,
+ number_of_MCMC_steps,
+ resampled_values[sample_num],
+ resampled_log_likelihood_values[sample_num],
+ resampled_unnormalized_posterior_pdf_values[sample_num],
+ beta,
+ number_of_accepted_states_in_this_stage,
+ all_distributions_list,
+ log_likelihood_function,
+ model_parameters,
+ working_directory,
+ default_rng(child_seeds[sample_num]),
+ calibration_data,
+ number_of_experiments,
+ covariance_matrix_list,
+ edp_names_list,
+ edp_lengths_list,
+ scale_factors,
+ shift_factors,
+ driver_file,
+ resampled_prediction_values[sample_num, :].reshape((1, -1)),
+ )
+ for sample_num in range(number_of_samples)
+ ]
+
+ if run_type == 'runningLocal':
+ write_eval_data_to_logfile(
+ logfile,
+ parallelize_MCMC,
+ run_type,
+ proc_count=processor_count,
+ stage_num=stage_number,
+ )
results = pool.starmap(tmcmcFunctions.MCMC_MH, iterables)
else:
- write_eval_data_to_logfile(logfile, parallelize_MCMC, run_type, MPI_size=MPI_size, stage_num=stage_number)
+ write_eval_data_to_logfile(
+ logfile,
+ parallelize_MCMC,
+ run_type,
+ MPI_size=MPI_size,
+ stage_num=stage_number,
+ )
results = list(executor.starmap(tmcmcFunctions.MCMC_MH, iterables))
- samples_list, loglikes_list, posterior_pdf_vals_list, num_accepts, all_proposals, all_PLP, preds_list = zip(*results)
+ (
+ samples_list,
+ loglikes_list,
+ posterior_pdf_vals_list,
+ num_accepts,
+ all_proposals,
+ all_PLP, # noqa: N806
+ preds_list,
+ ) = zip(*results)
# for next beta
sample_values = np.asarray(samples_list)
log_likelihood_values = np.asarray(loglikes_list)
@@ -270,46 +477,88 @@ def run_TMCMC(number_of_samples, number_of_chains, all_distributions_list, numbe
num_accepts = np.asarray(num_accepts)
number_of_accepted_states_in_this_stage = sum(num_accepts)
all_proposals = np.asarray(all_proposals)
- all_PLP = np.asarray(all_PLP)
+ all_PLP = np.asarray(all_PLP) # noqa: N806
- total_number_of_model_evaluations += number_of_model_evaluations_in_this_stage
- logfile.write("\n\n\t\tTotal number of model evaluations so far: {}".format(total_number_of_model_evaluations))
+ total_number_of_model_evaluations += (
+ number_of_model_evaluations_in_this_stage
+ )
+ logfile.write(
+ f'\n\n\t\tTotal number of model evaluations so far: {total_number_of_model_evaluations}'
+ )
# total observed acceptance rate
- R = number_of_accepted_states_in_this_stage / number_of_model_evaluations_in_this_stage
- logfile.write(f"\n\n\t\tacceptance rate = {R:<9.6g}")
- if adaptively_scale_proposal_covariance: # scale factor based on observed acceptance ratio
+ R = ( # noqa: N806
+ number_of_accepted_states_in_this_stage
+ / number_of_model_evaluations_in_this_stage
+ )
+ logfile.write(f'\n\n\t\tacceptance rate = {R:<9.6g}')
+ if (
+ adaptively_scale_proposal_covariance
+ ): # scale factor based on observed acceptance ratio
scale_factor_for_proposal_covariance = (1 / 9) + ((8 / 9) * R)
- if adaptively_calculate_num_MCMC_steps: # Calculate Nm_steps based on observed acceptance rate
+ if (
+ adaptively_calculate_num_MCMC_steps
+ ): # Calculate Nm_steps based on observed acceptance rate
# increase max Nmcmc with stage number
- number_of_MCMC_steps = min(number_of_MCMC_steps + 1, max_number_of_MCMC_steps)
- logfile.write("\n\t\tadapted max MCMC steps = %d" % number_of_MCMC_steps)
-
- acc_rate = max(1. / number_of_model_evaluations_in_this_stage, R)
- number_of_MCMC_steps = min(number_of_MCMC_steps, 1 + int(np.log(1 - 0.99) / np.log(1 - acc_rate)))
- logfile.write("\n\t\tnext MCMC Nsteps = %d" % number_of_MCMC_steps)
+ number_of_MCMC_steps = min( # noqa: N806
+ number_of_MCMC_steps + 1, max_number_of_MCMC_steps
+ )
+ logfile.write('\n\t\tadapted max MCMC steps = %d' % number_of_MCMC_steps)
+
+ acc_rate = max(1.0 / number_of_model_evaluations_in_this_stage, R)
+ number_of_MCMC_steps = min( # noqa: N806
+ number_of_MCMC_steps,
+ 1 + int(np.log(1 - 0.99) / np.log(1 - acc_rate)),
+ )
+ logfile.write('\n\t\tnext MCMC Nsteps = %d' % number_of_MCMC_steps)
logfile.write('\n\t\t==========================')
# save to trace
- mytrace.append([sample_values, log_likelihood_values, np.ones(len(weights)), 'notValid', 1, 'notValid'])
+ mytrace.append(
+ [
+ sample_values,
+ log_likelihood_values,
+ np.ones(len(weights)),
+ 'notValid',
+ 1,
+ 'notValid',
+ ]
+ )
# Write last stage data to '.csv' file
data_to_write = np.hstack((sample_values, prediction_values))
- write_data_to_csvfile(logfile, total_number_of_models_in_ensemble, stage_number, model_number,
- working_directory, data_to_write)
-
- write_data_to_tab_files(logfile, working_directory, model_number, model_parameters,
- edp_names_list, edp_lengths_list, number_of_samples, dataToWrite=sample_values,
- tab_file_name="dakotaTab.out", predictions=prediction_values)
+ write_data_to_csvfile(
+ logfile,
+ total_number_of_models_in_ensemble,
+ stage_number,
+ model_number,
+ working_directory,
+ data_to_write,
+ )
+
+ write_data_to_tab_files(
+ logfile,
+ working_directory,
+ model_number,
+ model_parameters,
+ edp_names_list,
+ edp_lengths_list,
+ number_of_samples,
+ dataToWrite=sample_values,
+ tab_file_name='dakotaTab.out',
+ predictions=prediction_values,
+ )
if parallelize_MCMC == 'yes':
- if run_type == "runningLocal":
+ if run_type == 'runningLocal':
pool.close()
- logfile.write("\n\tClosed multiprocessing pool for runType: {}".format(run_type))
+ logfile.write(f'\n\tClosed multiprocessing pool for runType: {run_type}')
else:
executor.shutdown()
- logfile.write("\n\tShutdown mpi4py executor pool for runType: {}".format(run_type))
+ logfile.write(
+ f'\n\tShutdown mpi4py executor pool for runType: {run_type}'
+ )
return mytrace, total_log_evidence
diff --git a/modules/performUQ/UCSD_UQ/tmcmcFunctions.py b/modules/performUQ/UCSD_UQ/tmcmcFunctions.py
index fb5c8c8b9..e2ab26de0 100644
--- a/modules/performUQ/UCSD_UQ/tmcmcFunctions.py
+++ b/modules/performUQ/UCSD_UQ/tmcmcFunctions.py
@@ -1,95 +1,98 @@
-"""
-authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, and Prof. J.P. Conte
+"""authors: Mukesh Kumar Ramancha, Maitreya Manoj Kurumbhati, and Prof. J.P. Conte
affiliation: University of California, San Diego
-"""
+""" # noqa: CPY001, D205, D400, INP001
import numpy as np
from runFEM import runFEM
from scipy.special import logsumexp
-def initial_population(N, p):
- IniPop = np.zeros((N, len(p)))
+def initial_population(N, p): # noqa: N803, D103
+ IniPop = np.zeros((N, len(p))) # noqa: N806
for i in range(len(p)):
IniPop[:, i] = p[i].generate_rns(N)
return IniPop
-def log_prior(s, p):
- logP = 0
+def log_prior(s, p): # noqa: D103
+ logP = 0 # noqa: N806
for i in range(len(s)):
- logP = logP + p[i].log_pdf_eval(s[i])
+ logP = logP + p[i].log_pdf_eval(s[i]) # noqa: N806, PLR6104
return logP
-def propose(current, covariance, n):
+def propose(current, covariance, n): # noqa: D103
return np.random.multivariate_normal(current, covariance, n)
-def compute_beta(beta, likelihoods, prev_ESS, threshold):
+def compute_beta(beta, likelihoods, prev_ESS, threshold): # noqa: N803, D103
old_beta = beta
min_beta = beta
max_beta = 2.0
# rN = int(len(likelihoods) * 0.95) #pymc3 uses 0.5
- rN = threshold * prev_ESS # purdue prof uses 0.95
+ rN = threshold * prev_ESS # purdue prof uses 0.95 # noqa: N806
new_beta = beta
- while max_beta - min_beta > 1e-3:
+ while max_beta - min_beta > 1e-3: # noqa: PLR2004
new_beta = 0.5 * (max_beta + min_beta)
# plausible weights of Sm corresponding to new beta
inc_beta = new_beta - old_beta
- Wm = np.exp(inc_beta * (likelihoods - likelihoods.max()))
- ESS = int(1 / np.sum((Wm / sum(Wm)) ** 2))
- if ESS == rN:
+ Wm = np.exp(inc_beta * (likelihoods - likelihoods.max())) # noqa: N806
+ ESS = int(1 / np.sum((Wm / sum(Wm)) ** 2)) # noqa: N806
+ if rN == ESS:
break
- elif ESS < rN:
+ elif rN > ESS: # noqa: RET508
max_beta = new_beta
else:
min_beta = new_beta
- if new_beta < 1e-3:
+ if new_beta < 1e-3: # noqa: PLR2004
new_beta = 1e-3
inc_beta = new_beta - old_beta
- Wm = np.exp(inc_beta * (likelihoods - likelihoods.max()))
+ Wm = np.exp(inc_beta * (likelihoods - likelihoods.max())) # noqa: N806
- if new_beta >= 0.95:
+ if new_beta >= 0.95: # noqa: PLR2004
new_beta = 1
# plausible weights of Sm corresponding to new beta
inc_beta = new_beta - old_beta
- Wm = np.exp(inc_beta * (likelihoods - likelihoods.max()))
+ Wm = np.exp(inc_beta * (likelihoods - likelihoods.max())) # noqa: N806
return new_beta, Wm, ESS
-def compute_beta_evidence_old(
- beta, log_likelihoods, log_evidence, prev_ESS, threshold
+def compute_beta_evidence_old( # noqa: D103
+ beta,
+ log_likelihoods,
+ log_evidence,
+ prev_ESS, # noqa: N803
+ threshold,
):
old_beta = beta
min_beta = beta
max_beta = 2.0
- N = len(log_likelihoods)
- min_ESS = np.ceil(0.1 * N)
- rN = max(threshold * prev_ESS, min_ESS)
+ N = len(log_likelihoods) # noqa: N806
+ min_ESS = np.ceil(0.1 * N) # noqa: N806
+ rN = max(threshold * prev_ESS, min_ESS) # noqa: N806
new_beta = 0.5 * (max_beta + min_beta)
inc_beta = new_beta - old_beta
- log_Wm = inc_beta * log_likelihoods
- log_Wm_n = log_Wm - logsumexp(log_Wm)
- ESS = int(np.exp(-logsumexp(log_Wm_n * 2)))
+ log_Wm = inc_beta * log_likelihoods # noqa: N806
+ log_Wm_n = log_Wm - logsumexp(log_Wm) # noqa: N806
+ ESS = int(np.exp(-logsumexp(log_Wm_n * 2))) # noqa: N806
- while max_beta - min_beta > 1e-6: # min step size
+ while max_beta - min_beta > 1e-6: # min step size # noqa: PLR2004
new_beta = 0.5 * (max_beta + min_beta)
# plausible weights of Sm corresponding to new beta
inc_beta = new_beta - old_beta
- log_Wm = inc_beta * log_likelihoods
- log_Wm_n = log_Wm - logsumexp(log_Wm)
- ESS = int(np.exp(-logsumexp(log_Wm_n * 2)))
+ log_Wm = inc_beta * log_likelihoods # noqa: N806
+ log_Wm_n = log_Wm - logsumexp(log_Wm) # noqa: N806
+ ESS = int(np.exp(-logsumexp(log_Wm_n * 2))) # noqa: N806
- if ESS == rN:
+ if rN == ESS:
break
- elif ESS < rN:
+ elif rN > ESS: # noqa: RET508
max_beta = new_beta
else:
min_beta = new_beta
@@ -99,11 +102,11 @@ def compute_beta_evidence_old(
# plausible weights of Sm corresponding to new beta
inc_beta = new_beta - old_beta
- log_Wm = inc_beta * log_likelihoods
- log_Wm_n = log_Wm - logsumexp(log_Wm)
+ log_Wm = inc_beta * log_likelihoods # noqa: N806
+ log_Wm_n = log_Wm - logsumexp(log_Wm) # noqa: N806
- Wm = np.exp(log_Wm)
- Wm_n = np.exp(log_Wm_n)
+ Wm = np.exp(log_Wm) # noqa: N806, F841
+ Wm_n = np.exp(log_Wm_n) # noqa: N806
# update model evidence
# evidence = evidence * (sum(Wm)/N)
@@ -114,32 +117,32 @@ def compute_beta_evidence_old(
# MCMC
-def MCMC_MH_old(
- ParticleNum,
- Em,
- Nm_steps,
+def MCMC_MH_old( # noqa: D103, N802, PLR0913, PLR0917
+ ParticleNum, # noqa: N803
+ Em, # noqa: N803
+ Nm_steps, # noqa: N803
current,
likelihood_current,
posterior_current,
beta,
- numAccepts,
- AllPars,
+ numAccepts, # noqa: N803
+ AllPars, # noqa: N803
log_likelihood,
variables,
- resultsLocation,
+ resultsLocation, # noqa: N803
rng,
- calibrationData,
- numExperiments,
- covarianceMatrixList,
- edpNamesList,
- edpLengthsList,
- normalizingFactors,
- locShiftList,
- workflowDriver,
+ calibrationData, # noqa: N803
+ numExperiments, # noqa: N803
+ covarianceMatrixList, # noqa: N803
+ edpNamesList, # noqa: N803
+ edpLengthsList, # noqa: N803
+ normalizingFactors, # noqa: N803
+ locShiftList, # noqa: N803
+ workflowDriver, # noqa: N803
prediction_current,
):
all_proposals = []
- all_PLP = []
+ all_PLP = [] # noqa: N806
# deltas = propose(np.zeros(len(current)), Em, Nm_steps)
deltas = rng.multivariate_normal(np.zeros(len(current)), Em, Nm_steps)
@@ -149,9 +152,7 @@ def MCMC_MH_old(
proposal = current + delta
prior_proposal = log_prior(proposal, AllPars)
- if np.isfinite(
- prior_proposal
- ): # proposal satisfies the prior constraints
+ if np.isfinite(prior_proposal): # proposal satisfies the prior constraints
# likelihood_proposal = log_likelihood(ParticleNum, proposal, variables, resultsLocation)
likelihood_proposal, prediction_proposal = runFEM(
ParticleNum,
@@ -170,28 +171,24 @@ def MCMC_MH_old(
)
if np.isnan(likelihood_proposal):
- likelihood_proposal = -np.Inf
+ likelihood_proposal = -np.inf
posterior_proposal = prior_proposal + likelihood_proposal * beta
else:
- likelihood_proposal = -np.Inf # dont run the FE model
- posterior_proposal = -np.Inf
- prediction_proposal = -np.Inf*np.ones_like(prediction_current)
+ likelihood_proposal = -np.inf # dont run the FE model
+ posterior_proposal = -np.inf
+ prediction_proposal = -np.inf * np.ones_like(prediction_current)
log_acceptance = posterior_proposal - posterior_current
all_proposals.append(proposal)
- all_PLP.append(
- [prior_proposal, likelihood_proposal, posterior_proposal]
- )
+ all_PLP.append([prior_proposal, likelihood_proposal, posterior_proposal])
# if np.isfinite(log_acceptance) and (np.log(np.random.uniform()) < log_acceptance):
- if np.isfinite(log_acceptance) and (
- np.log(rng.uniform()) < log_acceptance
- ):
+ if np.isfinite(log_acceptance) and (np.log(rng.uniform()) < log_acceptance):
# accept
current = proposal
posterior_current = posterior_proposal
likelihood_current = likelihood_proposal
- numAccepts += 1
+ numAccepts += 1 # noqa: N806
prediction_current = prediction_proposal
# gather all last samples
@@ -207,32 +204,32 @@ def MCMC_MH_old(
# MCMC
-def MCMC_MH(
- ParticleNum,
- Em,
- Nm_steps,
+def MCMC_MH( # noqa: D103, N802, PLR0913, PLR0917
+ ParticleNum, # noqa: N803
+ Em, # noqa: N803
+ Nm_steps, # noqa: N803
current,
likelihood_current,
posterior_current,
beta,
- numAccepts,
- AllPars,
+ numAccepts, # noqa: N803
+ AllPars, # noqa: N803
log_likelihood,
variables,
- resultsLocation,
+ resultsLocation, # noqa: N803
rng,
- calibrationData,
- numExperiments,
- covarianceMatrixList,
- edpNamesList,
- edpLengthsList,
- normalizingFactors,
- locShiftList,
- workflowDriver,
+ calibrationData, # noqa: N803
+ numExperiments, # noqa: N803
+ covarianceMatrixList, # noqa: N803
+ edpNamesList, # noqa: N803
+ edpLengthsList, # noqa: N803
+ normalizingFactors, # noqa: N803
+ locShiftList, # noqa: N803
+ workflowDriver, # noqa: N803
prediction_current,
):
all_proposals = []
- all_PLP = []
+ all_PLP = [] # noqa: N806
# deltas = propose(np.zeros(len(current)), Em, Nm_steps)
deltas = rng.multivariate_normal(np.zeros(len(current)), Em, Nm_steps)
@@ -242,9 +239,7 @@ def MCMC_MH(
proposal = current + delta
prior_proposal = log_prior(proposal, AllPars)
- if np.isfinite(
- prior_proposal
- ): # proposal satisfies the prior constraints
+ if np.isfinite(prior_proposal): # proposal satisfies the prior constraints
# likelihood_proposal = log_likelihood(ParticleNum, proposal, variables, resultsLocation)
likelihood_proposal, prediction_proposal = runFEM(
ParticleNum,
@@ -263,28 +258,24 @@ def MCMC_MH(
)
if np.isnan(likelihood_proposal):
- likelihood_proposal = -np.Inf
+ likelihood_proposal = -np.inf
posterior_proposal = prior_proposal + likelihood_proposal * beta
else:
- likelihood_proposal = -np.Inf # dont run the FE model
- posterior_proposal = -np.Inf
- prediction_proposal = -np.Inf*np.ones_like(prediction_current)
+ likelihood_proposal = -np.inf # dont run the FE model
+ posterior_proposal = -np.inf
+ prediction_proposal = -np.inf * np.ones_like(prediction_current)
log_acceptance = posterior_proposal - posterior_current
all_proposals.append(proposal)
- all_PLP.append(
- [prior_proposal, likelihood_proposal, posterior_proposal]
- )
+ all_PLP.append([prior_proposal, likelihood_proposal, posterior_proposal])
# if np.isfinite(log_acceptance) and (np.log(np.random.uniform()) < log_acceptance):
- if np.isfinite(log_acceptance) and (
- np.log(rng.uniform()) < log_acceptance
- ):
+ if np.isfinite(log_acceptance) and (np.log(rng.uniform()) < log_acceptance):
# accept
current = proposal
posterior_current = posterior_proposal
likelihood_current = likelihood_proposal
- numAccepts += 1
+ numAccepts += 1 # noqa: N806
prediction_current = prediction_proposal
# gather all last samples
@@ -340,7 +331,7 @@ def MCMC_MH(
# return new_beta, log_evidence, Wm_n, ESS
-def get_weights(dBeta, log_likelihoods):
+def get_weights(dBeta, log_likelihoods): # noqa: N803, D103
log_weights = dBeta * log_likelihoods
log_sum_weights = logsumexp(log_weights)
log_weights_normalized = log_weights - log_sum_weights
@@ -351,16 +342,14 @@ def get_weights(dBeta, log_likelihoods):
return weights_normalized, cov_weights, std_weights_normalized
-def compute_beta_evidence(beta, log_likelihoods, logFile, threshold=1.0):
+def compute_beta_evidence(beta, log_likelihoods, logFile, threshold=1.0): # noqa: N803, D103
max_beta = 1.0
- dBeta = min(max_beta, 1.0 - beta)
+ dBeta = min(max_beta, 1.0 - beta) # noqa: N806
- weights, cov_weights, std_weights = get_weights(
- dBeta, log_likelihoods
- )
+ weights, cov_weights, std_weights = get_weights(dBeta, log_likelihoods)
while cov_weights > (threshold) or (std_weights == 0):
- dBeta = dBeta * 0.99
+ dBeta = dBeta * 0.99 # noqa: N806, PLR6104
# while (cov_weights > (threshold+0.00000005) or (std_weights == 0)):
# if ((cov_weights > (threshold+1.0)) or (std_weights == 0)):
@@ -382,25 +371,21 @@ def compute_beta_evidence(beta, log_likelihoods, logFile, threshold=1.0):
# if ((cov_weights > (threshold+0.00000005)) or (std_weights == 0)):
# dBeta = dBeta*0.99999999
- if dBeta < 1e-3:
- dBeta = 1e-3
- weights, cov_weights, std_weights = get_weights(
- dBeta, log_likelihoods
- )
+ if dBeta < 1e-3: # noqa: PLR2004
+ dBeta = 1e-3 # noqa: N806
+ weights, cov_weights, std_weights = get_weights(dBeta, log_likelihoods)
break
- weights, cov_weights, std_weights = get_weights(
- dBeta, log_likelihoods
- )
+ weights, cov_weights, std_weights = get_weights(dBeta, log_likelihoods)
- beta = beta + dBeta
- if beta > 0.95:
+ beta = beta + dBeta # noqa: PLR6104
+ if beta > 0.95: # noqa: PLR2004
beta = 1
log_evidence = logsumexp(dBeta * log_likelihoods) - np.log(len(log_likelihoods))
try:
- ESS = int(1 / np.sum((weights / np.sum(weights)) ** 2))
+ ESS = int(1 / np.sum((weights / np.sum(weights)) ** 2)) # noqa: N806
except OverflowError as err:
- ESS = 0
+ ESS = 0 # noqa: N806
logFile.write(str(err))
return beta, log_evidence, weights, ESS
diff --git a/modules/performUQ/UQpy/UQpyEngine.py b/modules/performUQ/UQpy/UQpyEngine.py
index d80818336..261199ab2 100644
--- a/modules/performUQ/UQpy/UQpyEngine.py
+++ b/modules/performUQ/UQpy/UQpyEngine.py
@@ -1,70 +1,90 @@
-# written: UQ team @ SimCenter
+# written: UQ team @ SimCenter # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
-from __future__ import division, print_function
import sys
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
import os
+import platform
import stat
+import subprocess # noqa: S404
import sys
-import platform
-import subprocess
-import argparse
-import click
-@click.command()
-@click.option("--workflowInput", required=True, help="Path to JSON file containing the details of FEM and UQ tools.")
-@click.option("--workflowOutput", required=True, help="Path to JSON file containing the details for post-processing.")
-@click.option("--driverFile", required=True, help="ASCII file containing the details on how to run the FEM application.")
-@click.option("--runType", required=True, type=click.Choice(['runningLocal','runningRemote']))
+import click
-def main(workflowinput, workflowoutput, driverfile, runtype):
+@click.command()
+@click.option(
+ '--workflowInput',
+ required=True,
+ help='Path to JSON file containing the details of FEM and UQ tools.',
+)
+@click.option(
+ '--workflowOutput',
+ required=True,
+ help='Path to JSON file containing the details for post-processing.',
+)
+@click.option(
+ '--driverFile',
+ required=True,
+ help='ASCII file containing the details on how to run the FEM application.',
+)
+@click.option(
+ '--runType', required=True, type=click.Choice(['runningLocal', 'runningRemote'])
+)
+def main(workflowinput, workflowoutput, driverfile, runtype): # noqa: ARG001, D103
python = sys.executable
# get os type
- osType = platform.system()
- if runtype in ['runningLocal',]:
- if (sys.platform == 'darwin' or sys.platform == "linux" or sys.platform == "linux2"):
- osType = 'Linux'
+ osType = platform.system() # noqa: N806
+ if runtype == 'runningLocal':
+ if (
+ sys.platform == 'darwin'
+ or sys.platform == 'linux'
+ or sys.platform == 'linux2'
+ ):
+ osType = 'Linux' # noqa: N806
else:
- driverfile = driverfile + ".bat"
- osType = 'Windows'
- elif runtype in ['runningRemote',]:
- osType = 'Linux'
-
+ driverfile = driverfile + '.bat' # noqa: PLR6104
+ osType = 'Windows' # noqa: N806
+ elif runtype == 'runningRemote':
+ osType = 'Linux' # noqa: N806
+
+ thisScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
- thisScriptDir = os.path.dirname(os.path.realpath(__file__))
+ os.chmod( # noqa: PTH101
+ f'{thisScriptDir}/preprocessUQpy.py',
+ stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH,
+ )
- os.chmod("{}/preprocessUQpy.py".format(thisScriptDir), stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
-
# 1. Create the UQy analysis python script
- preprocessorCommand = "'{}' '{}/preprocessUQpy.py' --workflowInput {} --driverFile {} --runType {} --osType {}".format(python, thisScriptDir,
- workflowinput,
- driverfile,
- runtype,
- osType)
+ preprocessorCommand = f"'{python}' '{thisScriptDir}/preprocessUQpy.py' --workflowInput {workflowinput} --driverFile {driverfile} --runType {runtype} --osType {osType}" # noqa: N806
+
+ subprocess.run(preprocessorCommand, shell=True, check=False) # noqa: S602
- subprocess.run(preprocessorCommand, shell=True)
+ if runtype == 'runningLocal':
+ os.chmod( # noqa: PTH101
+ driverfile, stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH
+ )
- if runtype in ['runningLocal']:
- os.chmod(driverfile, stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
-
# 2. Run the python script
- UQpycommand = python + " UQpyAnalysis.py" + " 1> uqpy.log 2>&1 "
-
- #Change permission of workflow driver
- st = os.stat(driverfile)
- os.chmod(driverfile, st.st_mode | stat.S_IEXEC)
-
- if runtype in ['runningLocal']:
- print('running UQpy: ', UQpycommand)
- subprocess.run(UQpycommand, stderr=subprocess.STDOUT, shell=True)
+ UQpycommand = python + ' UQpyAnalysis.py' + ' 1> uqpy.log 2>&1 ' # noqa: N806
+
+ # Change permission of workflow driver
+ st = os.stat(driverfile) # noqa: PTH116
+ os.chmod(driverfile, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+
+ if runtype == 'runningLocal':
+ print('running UQpy: ', UQpycommand) # noqa: T201
+ subprocess.run( # noqa: S602
+ UQpycommand, stderr=subprocess.STDOUT, shell=True, check=False
+ )
+
if __name__ == '__main__':
- main()
+ main()
diff --git a/modules/performUQ/UQpy/preprocessUQpy.py b/modules/performUQ/UQpy/preprocessUQpy.py
index 9a957c910..d0c00a484 100755
--- a/modules/performUQ/UQpy/preprocessUQpy.py
+++ b/modules/performUQ/UQpy/preprocessUQpy.py
@@ -1,56 +1,67 @@
-import click
-import os
-
+import click # noqa: CPY001, D100, EXE002, INP001
from src.quofemDTOs import Model
from src.runmodel.RunModelDTOs import RunModelDTO
@click.command()
-@click.option('--workflowInput', type=click.Path(exists=True, readable=True), required=True,
- help="Path to JSON file containing the details of FEM and UQ tools.")
-@click.option('--driverFile', type=click.Path(exists=True, readable=True),
- help="ASCII file containing the details on how to run the FEM application.")
-@click.option('--runType', type=click.Choice(['runningLocal', 'runningRemote']),
- default='runningLocal', help="Choose between local or cluster execution of workflow.")
-@click.option('--osType', type=click.Choice(['Linux', 'Windows']),
- help="Type of operating system the workflow will run on.")
-
-def preprocess(workflowinput, driverfile, runtype, ostype):
-
+@click.option(
+ '--workflowInput',
+ type=click.Path(exists=True, readable=True),
+ required=True,
+ help='Path to JSON file containing the details of FEM and UQ tools.',
+)
+@click.option(
+ '--driverFile',
+ type=click.Path(exists=True, readable=True),
+ help='ASCII file containing the details on how to run the FEM application.',
+)
+@click.option(
+ '--runType',
+ type=click.Choice(['runningLocal', 'runningRemote']),
+ default='runningLocal',
+ help='Choose between local or cluster execution of workflow.',
+)
+@click.option(
+ '--osType',
+ type=click.Choice(['Linux', 'Windows']),
+ help='Type of operating system the workflow will run on.',
+)
+def preprocess(workflowinput, driverfile, runtype, ostype): # noqa: ARG001, D103
# 1. Parse the input JSON file
model = Model.parse_file(workflowinput)
# 2. Generate code
code = []
- code.append("import time\n")
- code.append("t1 = time.time()\n")
+ code.append('import time\n') # noqa: FURB113
+ code.append('t1 = time.time()\n')
# Create commands for defining distributions
- code.append("#\n# Creating the random variable distributions\n#")
+ code.append('#\n# Creating the random variable distributions\n#')
marginals_code = 'marginals = JointIndependent(['
for distribution in model.randomVariables:
- (distribution_code, input) = distribution.init_to_text()
+ (distribution_code, input) = distribution.init_to_text() # noqa: A001
code.append(distribution_code)
marginals_code += input + ', '
marginals_code += '])'
- code.append(marginals_code)
- code.append(f"numRV = {len(model.randomVariables)}\n")
+ code.append(marginals_code) # noqa: FURB113
+ code.append(f'numRV = {len(model.randomVariables)}\n')
# Create files and commands for runmodel
- runmodel_code = RunModelDTO.create_runmodel_with_variables_driver(variables=model.randomVariables,
- driver_filename=driverfile)
- code.append("#\n# Creating the model\n#")
+ runmodel_code = RunModelDTO.create_runmodel_with_variables_driver(
+ variables=model.randomVariables, driver_filename=driverfile
+ )
+ code.append('#\n# Creating the model\n#') # noqa: FURB113
code.append(runmodel_code)
# Create commands for the UQ method
(uqmethod_code, _) = model.UQ.methodData.generate_code()
- code.append("#\n# Defining and running the UQ analysis\n#")
+ code.append('#\n# Defining and running the UQ analysis\n#') # noqa: FURB113
code.append(uqmethod_code)
# 3. Write code to analysis script
- with open("UQpyAnalysis.py", 'w') as outfile:
- outfile.write("\n".join(code))
+ with open('UQpyAnalysis.py', 'w') as outfile: # noqa: FURB103, PLW1514, PTH123
+ outfile.write('\n'.join(code))
-if __name__ == "__main__":
+if __name__ == '__main__':
preprocess()
diff --git a/modules/performUQ/UQpy/src/UQpyDTO.py b/modules/performUQ/UQpy/src/UQpyDTO.py
index f902cf0dc..dbb1d129d 100644
--- a/modules/performUQ/UQpy/src/UQpyDTO.py
+++ b/modules/performUQ/UQpy/src/UQpyDTO.py
@@ -1,24 +1,23 @@
-from py_linq import Enumerable
+from py_linq import Enumerable # noqa: CPY001, D100, INP001
from pydantic import BaseModel
-class UQpyDTO(BaseModel):
-
+class UQpyDTO(BaseModel): # noqa: D101
@staticmethod
- def is_primitive(obj):
+ def is_primitive(obj): # noqa: D102
return not hasattr(obj, '__dict__')
# def init_to_text(self) -> (str, str):
# pass
- def generate_code(self):
- prerequisite_list = ""
+ def generate_code(self): # noqa: D102
+ prerequisite_list = ''
fields = Enumerable(self.__dict__.items())
objects = fields.where(lambda x: not UQpyDTO.is_primitive(x[1]))
- for (key, value) in objects:
+ for key, value in objects:
(prerequisite_str, input_str) = value.generate_code()
- prerequisite_list += prerequisite_str + "\n"
+ prerequisite_list += prerequisite_str + '\n'
self.__dict__[key] = input_str
(prerequisite_str, input_str) = self.init_to_text()
- prerequisite_list += prerequisite_str + "\n"
+ prerequisite_list += prerequisite_str + '\n'
return prerequisite_list, input_str
diff --git a/modules/performUQ/UQpy/src/distributions/UniformDTOs.py b/modules/performUQ/UQpy/src/distributions/UniformDTOs.py
index 3304323d2..d4cfddb2c 100644
--- a/modules/performUQ/UQpy/src/distributions/UniformDTOs.py
+++ b/modules/performUQ/UQpy/src/distributions/UniformDTOs.py
@@ -1,37 +1,39 @@
-from typing import Literal, Union
+from typing import Literal, Union # noqa: CPY001, D100, INP001
import numpy as np
-from pydantic import BaseModel, validator, PositiveFloat, Field
+from pydantic import BaseModel, Field, PositiveFloat, validator
from typing_extensions import Annotated
-class RVCommonData(BaseModel):
+class RVCommonData(BaseModel): # noqa: D101
name: str
value: str
- refCount: int
+ refCount: int # noqa: N815
-class UniformParameters(RVCommonData):
- variableClass: Literal["Uncertain"]
- distribution: Literal["Uniform"]
- inputType: Literal["Parameters"]
+class UniformParameters(RVCommonData): # noqa: D101
+ variableClass: Literal['Uncertain'] # noqa: N815
+ distribution: Literal['Uniform']
+ inputType: Literal['Parameters'] # noqa: N815
lowerbound: float = 0.0
upperbound: float = 1.0
@validator('upperbound')
- def upper_bound_not_bigger_than_lower_bound(v, values):
+ def upper_bound_not_bigger_than_lower_bound(v, values): # noqa: N805, D102
if 'lowerbound' in values and v <= values['lowerbound']:
- raise ValueError(
- f"The upper bound must be bigger than the lower bound {values['lowerbound']}. Got a value of {v}.")
+ raise ValueError( # noqa: TRY003
+ f"The upper bound must be bigger than the lower bound {values['lowerbound']}. Got a value of {v}." # noqa: EM102
+ )
return v
- def init_to_text(self):
- from UQpy.distributions.collection.Uniform import Uniform
+ def init_to_text(self): # noqa: D102
+ from UQpy.distributions.collection.Uniform import Uniform # noqa: PLC0415
+
c = Uniform
- class_name = c.__module__.split(".")[-1]
- import_statement = "from " + c.__module__ + " import " + class_name + "\n"
- import_statement_2 = "from UQpy.distributions import JointIndependent \n"
+ class_name = c.__module__.split('.')[-1]
+ import_statement = 'from ' + c.__module__ + ' import ' + class_name + '\n'
+ import_statement_2 = 'from UQpy.distributions import JointIndependent \n'
scipy_inputs = self._to_scipy()
input_str = self.name
initializer = f"{self.name} = {class_name}(loc={scipy_inputs['loc']}, scale={scipy_inputs['scale']})"
@@ -41,38 +43,41 @@ def init_to_text(self):
def _to_scipy(self):
loc = self.lowerbound
scale = self.upperbound - self.lowerbound
- return {"loc": loc, "scale": scale}
+ return {'loc': loc, 'scale': scale}
-class UniformMoments(RVCommonData):
- variableClass: Literal["Uncertain"]
- distribution: Literal["Uniform"]
- inputType: Literal["Moments"]
+class UniformMoments(RVCommonData): # noqa: D101
+ variableClass: Literal['Uncertain'] # noqa: N815
+ distribution: Literal['Uniform']
+ inputType: Literal['Moments'] # noqa: N815
mean: float
- standardDev: PositiveFloat
+ standardDev: PositiveFloat # noqa: N815
def _to_scipy(self):
loc = self.mean - np.sqrt(12) * self.standardDev / 2
scale = np.sqrt(12) * self.standardDev
- return {"loc": loc, "scale": scale}
+ return {'loc': loc, 'scale': scale}
-class UniformDataset(RVCommonData):
- variableClass: Literal["Uncertain"]
- distribution: Literal["Uniform"]
- inputType: Literal["Dataset"]
- dataDir: str
+class UniformDataset(RVCommonData): # noqa: D101
+ variableClass: Literal['Uncertain'] # noqa: N815
+ distribution: Literal['Uniform']
+ inputType: Literal['Dataset'] # noqa: N815
+ dataDir: str # noqa: N815
def _to_scipy(self):
data = readFile(self.dataDir)
low = np.min(data)
high = np.max(data)
- return {"loc": low, "scale": high - low}
+ return {'loc': low, 'scale': high - low}
-def readFile(path):
- with open(path, "r") as f:
+def readFile(path): # noqa: N802, D103
+ with open(path) as f: # noqa: PLW1514, PTH123
return np.genfromtxt(f)
-DistributionDTO = Annotated[Union[UniformParameters, UniformMoments, UniformDataset], Field(discriminator='inputType')]
+DistributionDTO = Annotated[
+ Union[UniformParameters, UniformMoments, UniformDataset],
+ Field(discriminator='inputType'),
+]
diff --git a/modules/performUQ/UQpy/src/modules/ModuleDTOs.py b/modules/performUQ/UQpy/src/modules/ModuleDTOs.py
index 9129b4e64..d814db5fa 100644
--- a/modules/performUQ/UQpy/src/modules/ModuleDTOs.py
+++ b/modules/performUQ/UQpy/src/modules/ModuleDTOs.py
@@ -1,25 +1,30 @@
+from typing import Literal, Union # noqa: CPY001, D100, INP001
+
from pydantic import BaseModel, Field
-from typing import Literal, Union
-from typing_extensions import Annotated
from src.reliability.ReliabilityMethodsDTOs import ReliabilityMethod
+from typing_extensions import Annotated
-class ModuleBaseDTO(BaseModel):
+class ModuleBaseDTO(BaseModel): # noqa: D101
pass
-class SamplingDTO(ModuleBaseDTO):
- uqType: Literal['Sampling'] = 'Sampling'
+class SamplingDTO(ModuleBaseDTO): # noqa: D101
+ uqType: Literal['Sampling'] = 'Sampling' # noqa: N815
- def generate_code(self):
+ def generate_code(self): # noqa: D102
pass
-class SurrogatesDTO(ModuleBaseDTO):
- uqType: Literal['Surrogates'] = 'Surrogates'
-class ReliabilityDTO(ModuleBaseDTO):
- uqType: Literal['Reliability Analysis'] = 'Reliability Analysis'
- methodData: ReliabilityMethod
+class SurrogatesDTO(ModuleBaseDTO): # noqa: D101
+ uqType: Literal['Surrogates'] = 'Surrogates' # noqa: N815
+
+
+class ReliabilityDTO(ModuleBaseDTO): # noqa: D101
+ uqType: Literal['Reliability Analysis'] = 'Reliability Analysis' # noqa: N815
+ methodData: ReliabilityMethod # noqa: N815
-ModuleDTO = Annotated[Union[ReliabilityDTO, SamplingDTO], Field(discriminator='uqType')]
+ModuleDTO = Annotated[
+ Union[ReliabilityDTO, SamplingDTO], Field(discriminator='uqType')
+]
diff --git a/modules/performUQ/UQpy/src/quofemDTOs.py b/modules/performUQ/UQpy/src/quofemDTOs.py
index 5c0643b1a..206983f4a 100644
--- a/modules/performUQ/UQpy/src/quofemDTOs.py
+++ b/modules/performUQ/UQpy/src/quofemDTOs.py
@@ -1,74 +1,72 @@
-from __future__ import annotations
+from __future__ import annotations # noqa: CPY001, D100, INP001
from typing import Any, Dict, List
from pydantic import BaseModel
-from .distributions.UniformDTOs import DistributionDTO
-from .modules.ModuleDTOs import ModuleDTO
-from .sampling.mcmc.StretchDto import StretchDto
+from .distributions.UniformDTOs import DistributionDTO # noqa: TCH001
+from .modules.ModuleDTOs import ModuleDTO # noqa: TCH001
+from .sampling.mcmc.StretchDto import StretchDto # noqa: TCH001
-class ApplicationData(BaseModel):
+class ApplicationData(BaseModel): # noqa: D101
MS_Path: str
- mainScript: str
- postprocessScript: str
+ mainScript: str # noqa: N815
+ postprocessScript: str # noqa: N815
-class FEM(BaseModel):
+class FEM(BaseModel): # noqa: D101
Application: str
ApplicationData: ApplicationData
-class UQ(BaseModel):
+class UQ(BaseModel): # noqa: D101
Application: str
- ApplicationData: Dict[str, Any]
+ ApplicationData: Dict[str, Any] # noqa: UP006
-class Applications(BaseModel):
+class Applications(BaseModel): # noqa: D101
FEM: FEM
UQ: UQ
-class EDPItem(BaseModel):
+class EDPItem(BaseModel): # noqa: D101
length: int
name: str
type: str
+class SubsetSimulationData(BaseModel): # noqa: D101
+ conditionalProbability: float # noqa: N815
+ failureThreshold: int # noqa: N815
+ maxLevels: int # noqa: N815
+ mcmcMethodData: StretchDto # noqa: N815
-class SubsetSimulationData(BaseModel):
- conditionalProbability: float
- failureThreshold: int
- maxLevels: int
- mcmcMethodData: StretchDto
-
-class ReliabilityMethodData(BaseModel):
+class ReliabilityMethodData(BaseModel): # noqa: D101
method: str
- subsetSimulationData: SubsetSimulationData
-
+ subsetSimulationData: SubsetSimulationData # noqa: N815
-class RandomVariable(BaseModel):
+class RandomVariable(BaseModel): # noqa: D101
distribution: str
- inputType: str
+ inputType: str # noqa: N815
lowerbound: int
name: str
- refCount: int
+ refCount: int # noqa: N815
upperbound: int
value: str
- variableClass: str
+ variableClass: str # noqa: N815
-class Model(BaseModel):
+class Model(BaseModel): # noqa: D101
Applications: Applications
- EDP: List[EDPItem]
- FEM: Dict[str, Any]
+ EDP: List[EDPItem] # noqa: UP006
+ FEM: Dict[str, Any] # noqa: UP006
UQ: ModuleDTO
# correlationMatrix: List[int]
- localAppDir: str
- randomVariables: List[DistributionDTO]
- remoteAppDir: str
- runType: str
- workingDir: str
+ localAppDir: str # noqa: N815
+ randomVariables: List[DistributionDTO] # noqa: N815, UP006
+ remoteAppDir: str # noqa: N815
+ runType: str # noqa: N815
+ workingDir: str # noqa: N815
diff --git a/modules/performUQ/UQpy/src/reliability/ReliabilityMethodsDTOs.py b/modules/performUQ/UQpy/src/reliability/ReliabilityMethodsDTOs.py
index 5ae8520f8..a789bb9f0 100644
--- a/modules/performUQ/UQpy/src/reliability/ReliabilityMethodsDTOs.py
+++ b/modules/performUQ/UQpy/src/reliability/ReliabilityMethodsDTOs.py
@@ -1,73 +1,97 @@
-from pathlib import Path
+from typing import Literal, Union # noqa: CPY001, D100, INP001
-from pydantic import BaseModel, Field
-from typing import Literal, Union
-from typing_extensions import Annotated
-
-from src.UQpyDTO import UQpyDTO
+from pydantic import Field
from src.sampling.mcmc.StretchDto import SamplingMethod
+from src.UQpyDTO import UQpyDTO
+from typing_extensions import Annotated
-class ReliabilityMethodBaseDTO(UQpyDTO):
+class ReliabilityMethodBaseDTO(UQpyDTO): # noqa: D101
pass
-class SubsetSimulationDTO(ReliabilityMethodBaseDTO):
+class SubsetSimulationDTO(ReliabilityMethodBaseDTO): # noqa: D101
method: Literal['Subset Simulation'] = 'Subset Simulation'
- conditionalProbability: float
- failure_threshold: float = Field(..., alias="failureThreshold")
- maxLevels: int
+ conditionalProbability: float # noqa: N815
+ failure_threshold: float = Field(..., alias='failureThreshold')
+ maxLevels: int # noqa: N815
samples_per_subset: int
- samplingMethod: SamplingMethod
+ samplingMethod: SamplingMethod # noqa: N815
# def __post_init__(self):
- # self.samplingMethod.n_chains=int(self.samples_per_subset*self.conditionalProbability)
+ # self.samplingMethod.n_chains=int(self.samples_per_subset*self.conditionalProbability)
+
+ def init_to_text(self): # noqa: D102
+ from UQpy.reliability.SubsetSimulation import ( # noqa: PLC0415
+ SubsetSimulation,
+ )
+ from UQpy.sampling.MonteCarloSampling import ( # noqa: PLC0415
+ MonteCarloSampling,
+ )
- def init_to_text(self):
- from UQpy.reliability.SubsetSimulation import SubsetSimulation
- from UQpy.sampling.MonteCarloSampling import MonteCarloSampling
c = SubsetSimulation
self.__create_postprocess_script()
# output_script = Path('postprocess_script.py')
- initial_sampler = "from " + MonteCarloSampling.__module__ + " import " + \
- MonteCarloSampling.__module__.split(".")[-1] + "\n"
+ initial_sampler = (
+ 'from '
+ + MonteCarloSampling.__module__
+ + ' import '
+ + MonteCarloSampling.__module__.split('.')[-1]
+ + '\n'
+ )
initial_sampler += f"monte_carlo = {MonteCarloSampling.__module__.split('.')[-1]}(distributions=marginals, nsamples={self.samples_per_subset}, random_state=sampling.random_state)\n"
-
- class_name = c.__module__.split(".")[-1]
- import_statement = "from " + c.__module__ + " import " + class_name
-
- input_str = "subset"
- initializer = f'{input_str} = {class_name}(sampling={self.samplingMethod}, ' \
- f'conditional_probability={self.conditionalProbability}, ' \
- f'max_level={self.maxLevels}, runmodel_object=run_model, ' \
- f'nsamples_per_subset={self.samples_per_subset}, '\
- f'samples_init=monte_carlo.samples)\n'
-
- results_script = "#\n# Creating the results\n#\n"
- results_script += "samples_list = []\n"\
- "for s in subset.samples:\n"\
- "\tsamples_list.append(s.tolist())\n\n"\
- "performance_function_list = []\n"\
- "for p in subset.performance_function_per_level:\n"\
- "\tperformance_function_list.append(p.tolist())\n\n"
- results_script += "output_data = {\n\t'failure_probability': subset.failure_probability, "\
- "\n\t'time_to_completion_in_minutes': f'{(time.time() - t1)/60}', "\
- "\n\t'number_of_model_evaluations': len(run_model.qoi_list), "\
- "\n\t'num_levels': f'{len(subset.samples)}', "\
- "\n\t'performance_threshold_per_level': subset.performance_threshold_per_level, "\
- "\n\t'sample_values_per_level': samples_list, "\
- "\n\t'performance_function_per_level': performance_function_list, "\
- "\n\t'independent_chains_CoV': f'{subset.independent_chains_CoV}', "\
- "\n\t'dependent_chains_CoV': f'{subset.dependent_chains_CoV}'"\
- "\n}\n"
- save_script = "#\n# Writing the UQ analysis results\n#\n"
- save_script += "import json \n"
- save_script+="with open('uqpy_results.json', 'w') as file:\n"\
- "\tfile.write(json.dumps(output_data))\n"
-
- prerequisite_str = "\n".join([initial_sampler, import_statement, initializer, results_script, save_script])
+
+ class_name = c.__module__.split('.')[-1]
+ import_statement = 'from ' + c.__module__ + ' import ' + class_name
+
+ input_str = 'subset'
+ initializer = (
+ f'{input_str} = {class_name}(sampling={self.samplingMethod}, '
+ f'conditional_probability={self.conditionalProbability}, '
+ f'max_level={self.maxLevels}, runmodel_object=run_model, '
+ f'nsamples_per_subset={self.samples_per_subset}, '
+ f'samples_init=monte_carlo.samples)\n'
+ )
+
+ results_script = '#\n# Creating the results\n#\n'
+ results_script += (
+ 'samples_list = []\n'
+ 'for s in subset.samples:\n'
+ '\tsamples_list.append(s.tolist())\n\n'
+ 'performance_function_list = []\n'
+ 'for p in subset.performance_function_per_level:\n'
+ '\tperformance_function_list.append(p.tolist())\n\n'
+ )
+ results_script += (
+ "output_data = {\n\t'failure_probability': subset.failure_probability, "
+ "\n\t'time_to_completion_in_minutes': f'{(time.time() - t1)/60}', "
+ "\n\t'number_of_model_evaluations': len(run_model.qoi_list), "
+ "\n\t'num_levels': f'{len(subset.samples)}', "
+ "\n\t'performance_threshold_per_level': subset.performance_threshold_per_level, "
+ "\n\t'sample_values_per_level': samples_list, "
+ "\n\t'performance_function_per_level': performance_function_list, "
+ "\n\t'independent_chains_CoV': f'{subset.independent_chains_CoV}', "
+ "\n\t'dependent_chains_CoV': f'{subset.dependent_chains_CoV}'"
+ '\n}\n'
+ )
+ save_script = '#\n# Writing the UQ analysis results\n#\n'
+ save_script += 'import json \n'
+ save_script += (
+ "with open('uqpy_results.json', 'w') as file:\n"
+ '\tfile.write(json.dumps(output_data))\n'
+ )
+
+ prerequisite_str = '\n'.join( # noqa: FLY002
+ [
+ initial_sampler,
+ import_statement,
+ initializer,
+ results_script,
+ save_script,
+ ]
+ )
return prerequisite_str, input_str
def __create_postprocess_script(self, results_filename: str = 'results.out'):
@@ -83,18 +107,20 @@ def __create_postprocess_script(self, results_filename: str = 'results.out'):
'\t\texcept Exception:',
'\t\t\traise',
'\t\telse:',
- f"\t\t\treturn {self.failure_threshold} - res",
+ f'\t\t\treturn {self.failure_threshold} - res',
'\telse:',
- "\t\traise ValueError(f'Result not found in results.out file for sample evaluation "
+ "\t\traise ValueError(f'Result not found in results.out file for sample evaluation " # noqa: ISC003
+ "{index}')",
]
- with open("postprocess_script.py", "w") as f:
- f.write("\n".join(postprocess_script_code))
+ with open('postprocess_script.py', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('\n'.join(postprocess_script_code))
-class FormDTO(ReliabilityMethodBaseDTO):
+class FormDTO(ReliabilityMethodBaseDTO): # noqa: D101
method: Literal['FORM'] = 'FORM'
-ReliabilityMethod = Annotated[Union[SubsetSimulationDTO, FormDTO], Field(discriminator='method')]
+ReliabilityMethod = Annotated[
+ Union[SubsetSimulationDTO, FormDTO], Field(discriminator='method')
+]
diff --git a/modules/performUQ/UQpy/src/runmodel/RunModelDTOs.py b/modules/performUQ/UQpy/src/runmodel/RunModelDTOs.py
index 3db06792e..d53550fd5 100644
--- a/modules/performUQ/UQpy/src/runmodel/RunModelDTOs.py
+++ b/modules/performUQ/UQpy/src/runmodel/RunModelDTOs.py
@@ -1,21 +1,23 @@
+from pathlib import Path # noqa: CPY001, D100, INP001
from typing import List
-from pathlib import Path
from src.quofemDTOs import RandomVariable
-class RunModelDTO:
+class RunModelDTO: # noqa: D101
@staticmethod
- def create_runmodel_with_variables_driver(variables: List[RandomVariable],
- driver_filename: str = 'driver'):
+ def create_runmodel_with_variables_driver( # noqa: D102
+ variables: List[RandomVariable], # noqa: FA100
+ driver_filename: str = 'driver',
+ ):
RunModelDTO.__create_runmodel_input_teplate(variables)
RunModelDTO.__create_model_script(driver_filename)
RunModelDTO.__create_postprocess_script()
- #Validate file paths
- input_template = Path('params_template.in')
- model_script = Path('model_script.py')
- output_script = Path('postprocess_script.py')
+ # Validate file paths
+ input_template = Path('params_template.in') # noqa: F841
+ model_script = Path('model_script.py') # noqa: F841
+ output_script = Path('postprocess_script.py') # noqa: F841
var_names = [f'{rv.name}' for rv in variables]
run_model_code = [
@@ -25,28 +27,28 @@ def create_runmodel_with_variables_driver(variables: List[RandomVariable],
'run_model = RunModel(model=third_party_model)\n',
]
- return "\n".join(run_model_code)
+ return '\n'.join(run_model_code)
@staticmethod
- def __create_runmodel_input_teplate(variables: List[RandomVariable]):
- template_code = [f"{len(variables)}"]
+ def __create_runmodel_input_teplate(variables: List[RandomVariable]): # noqa: FA100
+ template_code = [f'{len(variables)}']
for rv in variables:
- template_code.append(f"{rv.name} <{rv.name}>")
+ template_code.append(f'{rv.name} <{rv.name}>') # noqa: PERF401
- with open("params_template.in", "w") as f:
- f.write("\n".join(template_code))
+ with open('params_template.in', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('\n'.join(template_code))
@staticmethod
def __create_model_script(driver_filename):
- template_filepath = Path("params_template.in")
+ template_filepath = Path('params_template.in')
template_file_base = template_filepath.stem
template_file_suffix = template_filepath.suffix
model_script_code = [
'import subprocess',
'import fire\n',
'def model(sample_index: int) -> None:',
- f"\tcommand1 = f'mv ./InputFiles/{template_file_base}_"
- + "{sample_index}"
+ f"\tcommand1 = f'mv ./InputFiles/{template_file_base}_" # noqa: ISC003
+ + '{sample_index}'
+ f"{template_file_suffix} ./params.in'",
f"\tcommand2 = './{driver_filename}'\n",
'\tsubprocess.run(command1, stderr=subprocess.STDOUT, shell=True)',
@@ -55,8 +57,8 @@ def __create_model_script(driver_filename):
'\tfire.Fire(model)',
]
- with open('model_script.py', "w") as f:
- f.write("\n".join(model_script_code))
+ with open('model_script.py', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('\n'.join(model_script_code))
@staticmethod
def __create_postprocess_script(results_filename: str = 'results.out'):
@@ -72,11 +74,11 @@ def __create_postprocess_script(results_filename: str = 'results.out'):
'\t\texcept Exception:',
'\t\t\traise',
'\t\telse:',
- "\t\t\treturn res",
+ '\t\t\treturn res',
'\telse:',
- "\t\traise ValueError(f'Result not found in results.out file for sample evaluation "
+ "\t\traise ValueError(f'Result not found in results.out file for sample evaluation " # noqa: ISC003
+ "{index}')",
]
- with open("postprocess_script.py", "w") as f:
- f.write("\n".join(postprocess_script_code))
+ with open('postprocess_script.py', 'w') as f: # noqa: FURB103, PLW1514, PTH123
+ f.write('\n'.join(postprocess_script_code))
diff --git a/modules/performUQ/UQpy/src/sampling/mcmc/ModifiedMetropolisHastingsDto.py b/modules/performUQ/UQpy/src/sampling/mcmc/ModifiedMetropolisHastingsDto.py
index 28ef91f86..6e5dcacb7 100644
--- a/modules/performUQ/UQpy/src/sampling/mcmc/ModifiedMetropolisHastingsDto.py
+++ b/modules/performUQ/UQpy/src/sampling/mcmc/ModifiedMetropolisHastingsDto.py
@@ -1,9 +1,11 @@
-from src.UQpyDTO import UQpyDTO
-from typing import Literal, Annotated, Union
+from typing import Literal # noqa: CPY001, D100, INP001
+
from pydantic import Field
+from src.UQpyDTO import UQpyDTO
-class ModifiedMetropolisHastingsDto(UQpyDTO):
- method: Literal["Modified Metropolis Hastings"] = "Modified Metropolis Hastings"
+
+class ModifiedMetropolisHastingsDto(UQpyDTO): # noqa: D101
+ method: Literal['Modified Metropolis Hastings'] = 'Modified Metropolis Hastings'
burn_length: int = Field(default=0, alias='burn-in', ge=0)
jump: int = Field(default=1, ge=0)
# dimension: int = Field(..., gt=0)
@@ -13,25 +15,29 @@ class ModifiedMetropolisHastingsDto(UQpyDTO):
concatenate_chains = True
proposal_is_symmetric = False
- def init_to_text(self):
- from UQpy.sampling.mcmc.ModifiedMetropolisHastings import ModifiedMetropolisHastings
- c= ModifiedMetropolisHastings
+ def init_to_text(self): # noqa: D102
+ from UQpy.sampling.mcmc.ModifiedMetropolisHastings import ( # noqa: PLC0415
+ ModifiedMetropolisHastings,
+ )
+
+ c = ModifiedMetropolisHastings
- class_name = c.__module__.split(".")[-1]
- import_statement = "from " + c.__module__ + " import " + class_name + "\n"
+ class_name = c.__module__.split('.')[-1]
+ import_statement = 'from ' + c.__module__ + ' import ' + class_name + '\n'
stretch_parameters = self.dict()
- stretch_parameters.pop("method")
- stretch_parameters["log_pdf_target"] = f"marginals.log_pdf"
- stretch_parameters["seed"] = f"list(marginals.rvs(numRV,))"
+ stretch_parameters.pop('method')
+ stretch_parameters['log_pdf_target'] = 'marginals.log_pdf'
+ stretch_parameters['seed'] = 'list(marginals.rvs(numRV,))'
# stretch_parameters["seed"] = f"list(marginals.rvs({self.n_chains},))"
- str_parameters = str()
+ str_parameters = ''
for key in stretch_parameters:
- if stretch_parameters[key] is None: continue
- str_parameters += key + "=" + str(stretch_parameters[key]) + ", "
+ if stretch_parameters[key] is None:
+ continue
+ str_parameters += key + '=' + str(stretch_parameters[key]) + ', '
prerequisite_str = import_statement
- prerequisite_str += "sampling = " + class_name + "(" + str_parameters + ")"
- sampling_str = "sampling"
+ prerequisite_str += 'sampling = ' + class_name + '(' + str_parameters + ')'
+ sampling_str = 'sampling'
- return (prerequisite_str, sampling_str)
\ No newline at end of file
+ return (prerequisite_str, sampling_str)
diff --git a/modules/performUQ/UQpy/src/sampling/mcmc/StretchDto.py b/modules/performUQ/UQpy/src/sampling/mcmc/StretchDto.py
index 4d893724e..bc448c80a 100644
--- a/modules/performUQ/UQpy/src/sampling/mcmc/StretchDto.py
+++ b/modules/performUQ/UQpy/src/sampling/mcmc/StretchDto.py
@@ -1,14 +1,15 @@
-from __future__ import annotations
+from __future__ import annotations # noqa: CPY001, D100, INP001
-from typing import Literal, Annotated, Union
+from typing import Literal, Union
from pydantic import Field
-
+from src.sampling.mcmc.ModifiedMetropolisHastingsDto import (
+ ModifiedMetropolisHastingsDto,
+)
from src.UQpyDTO import UQpyDTO
-from src.sampling.mcmc.ModifiedMetropolisHastingsDto import ModifiedMetropolisHastingsDto
-class StretchDto(UQpyDTO):
+class StretchDto(UQpyDTO): # noqa: D101
method: Literal['Stretch'] = 'Stretch'
burn_length: int = Field(default=0, alias='burn-in', ge=0)
jump: int = Field(default=1, ge=0)
@@ -17,30 +18,32 @@ class StretchDto(UQpyDTO):
random_state: int = Field(..., alias='randomState')
scale: float = Field(..., gt=0)
- def init_to_text(self):
- from UQpy.sampling.mcmc.Stretch import Stretch
+ def init_to_text(self): # noqa: D102
+ from UQpy.sampling.mcmc.Stretch import Stretch # noqa: PLC0415
+
c = Stretch
- class_name = c.__module__.split(".")[-1]
- import_statement = "from " + c.__module__ + " import " + class_name + "\n"
+ class_name = c.__module__.split('.')[-1]
+ import_statement = 'from ' + c.__module__ + ' import ' + class_name + '\n'
stretch_parameters = self.dict()
- stretch_parameters.pop("method")
- stretch_parameters["log_pdf_target"] = f"marginals.log_pdf"
+ stretch_parameters.pop('method')
+ stretch_parameters['log_pdf_target'] = 'marginals.log_pdf'
# stretch_parameters["seed"] = f"list(marginals.rvs({self.n_chains},))"
- stretch_parameters["seed"] = f"list(marginals.rvs(numRV,))"
- str_parameters = str()
+ stretch_parameters['seed'] = 'list(marginals.rvs(numRV,))'
+ str_parameters = ''
for key in stretch_parameters:
- if stretch_parameters[key] is None: continue
- str_parameters += key + "=" + str(stretch_parameters[key]) + ", "
+ if stretch_parameters[key] is None:
+ continue
+ str_parameters += key + '=' + str(stretch_parameters[key]) + ', '
# prerequisite_str = import_statement + import_likehood_statement
prerequisite_str = import_statement
- prerequisite_str += "sampling = " + class_name + "(" + str_parameters + ")"
- sampling_str = "sampling"
+ prerequisite_str += 'sampling = ' + class_name + '(' + str_parameters + ')'
+ sampling_str = 'sampling'
return (prerequisite_str, sampling_str)
# SamplingMethod = Annotated[Union[StretchDto, ModifiedMetropolisHastingsDto], Field(discriminator='method')]
-SamplingMethod = Union[StretchDto, ModifiedMetropolisHastingsDto]
\ No newline at end of file
+SamplingMethod = Union[StretchDto, ModifiedMetropolisHastingsDto]
diff --git a/modules/performUQ/common/ERAClasses/ERACond.py b/modules/performUQ/common/ERAClasses/ERACond.py
index 5a8973f48..2f47d13e4 100644
--- a/modules/performUQ/common/ERAClasses/ERACond.py
+++ b/modules/performUQ/common/ERAClasses/ERACond.py
@@ -1,10 +1,10 @@
-# import of modules
-import numpy as np
-from scipy import optimize, stats, special, integrate
+# import of modules # noqa: CPY001, D100, INP001
import types
-import inspect
-'''
+import numpy as np
+from scipy import integrate, optimize, special, stats
+
+"""
---------------------------------------------------------------------------
Generation of conditional distribution objects for the use within the
ERARosen class.
@@ -45,38 +45,39 @@
References:
1. Documentation of the ERA Distribution Classes
---------------------------------------------------------------------------
-'''
-#%%
-class ERACond(object):
- """
- Generation of conditional distribution objects for the use within the
+""" # noqa: W291
+
+
+# %%
+class ERACond:
+ """Generation of conditional distribution objects for the use within the
ERARosen class.
-
+
Construction of the conditional distribution object with
-
- Obj = ERACond(name,opt,param)
+
+ Obj = ERACond(name,opt,param)
or Obj = ERACond(name,opt,param,id)
-
- The available distributions, represented by the input variable 'name',
- are the same as in the ERADist class (see below). They can be described
+
+ The available distributions, represented by the input variable 'name',
+ are the same as in the ERADist class (see below). They can be described
either by parameters (opt='PAR') or by the first and second moment
- (opt='MOM').
-
+ (opt='MOM').
+
The parameters or moments must be given as a lambda function. Examples
for lambda functions given by the input 'param' of a two parametric
- distribution depending on two other random variables could be:
-
+ distribution depending on two other random variables could be:
+
param = lambda x,y: [x+y,0.2*x^2], param = lambda a,b: [3*a-2*b,4]
-
+
The input 'id' can be used to better identify the different variables
(nodes) when plotting the graph describing the dependency between the
different variables in the ERARosen class (method plotGraph). The input
'id' is however not mandatory.
-
-
+
+
The following distribution types are available:
-
- opt = "PAR", if you want to specify the distibution by its parameters:
+
+ opt = "PAR", if you want to specify the distribution by its parameters:
Beta: Obj = ERADist('beta','PAR',lambda ... :[r,s,a,b])
Binomial: Obj = ERADist('binomial','PAR',lambda ... :[n,p])
Chi-squared: Obj = ERADist('chisquare','PAR',lambda ... :[k])
@@ -97,10 +98,10 @@ class ERACond(object):
Rayleigh: Obj = ERADist('rayleigh','PAR',lambda ... :[alpha])
Truncated normal: Obj = ERADist('truncatednormal','PAR',lambda ... :[mu_N,sig_N,a,b])
Uniform: Obj = ERADist('uniform','PAR',lambda ... :[lower,upper])
- Weibull: Obj = ERADist('weibull','PAR',lambda ... :[a_n,k])
-
-
- opt = "MOM", if you want to specify the distibution by its moments:
+ Weibull: Obj = ERADist('weibull','PAR',lambda ... :[a_n,k])
+
+
+ opt = "MOM", if you want to specify the distribution by its moments:
Beta: Obj = ERADist('beta','MOM',lambda ... :[mean,std,a,b])
Binomial: Obj = ERADist('binomial','MOM',lambda ... :[mean,std])
Chi-squared: Obj = ERADist('chisquare','MOM',lambda ... :[mean])
@@ -122,436 +123,483 @@ class ERACond(object):
Truncated normal: Obj = ERADist('truncatednormal','MOM',lambda ... :[mean,std,a,b])
Uniform: Obj = ERADist('uniform','MOM',lambda ... :[mean,std])
Weibull: Obj = ERADist('weibull','MOM',lambda ... :[mean,std])
-
- """
-
- def __init__(self, name, opt, param, ID=False):
- """
- Constructor method, for more details have a look at the
+
+ """ # noqa: D205
+
+ def __init__(self, name, opt, param, ID=False): # noqa: FBT002, N803
+ """Constructor method, for more details have a look at the
class description.
- """
-
+ """ # noqa: D205, D401
self.Name = name.lower()
-
- if opt.upper() == "PAR" or opt.upper() == "MOM":
+
+ if opt.upper() == 'PAR' or opt.upper() == 'MOM':
self.Opt = opt.upper()
else:
- raise RuntimeError("Conditional distributions can only be defined "
- "by moments (opt = 'MOM') or by parameters (opt = 'PAR').")
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Conditional distributions can only be defined ' # noqa: EM101
+ "by moments (opt = 'MOM') or by parameters (opt = 'PAR')."
+ )
+
self.ID = ID
-
+
# check if param is a lambda function
- if type(param) == types.LambdaType:
+ if type(param) == types.LambdaType: # noqa: E721
self.Param = param
else:
- raise RuntimeError("The input param must be a lambda function.")
-
- self.modParam = param
-
-#%%
- def condParam(self,cond):
- """
- Evaluates the parameters of the distribution for the
+ raise RuntimeError('The input param must be a lambda function.') # noqa: DOC501, EM101, TRY003
+
+ self.modParam = param
+
+ # %%
+ def condParam(self, cond): # noqa: C901, N802, PLR0912, PLR0915
+ """Evaluates the parameters of the distribution for the
different given conditions.
- In case that the distribution is described by its moments,
- the evaluated moments are used to obtain the distribution
- parameters.
+ In case that the distribution is described by its moments,
+ the evaluated moments are used to obtain the distribution
+ parameters.
This method is used by the ERACond methods condCDF, condPDF,
condiCDF and condRandom.
- """
-
+ """ # noqa: D205, D401
cond = np.array(cond, ndmin=2, dtype=float).T
par = self.modParam(cond)
n_cond = np.shape(cond)[0]
-
- #----------------------------------------------------------------------------
+
+ # ----------------------------------------------------------------------------
# for the case of Opt == PAR
- if self.Opt == "PAR":
- if self.Name == "beta":
- Par = [par[0], par[1], par[2], par[3]-par[2]]
- elif self.Name == "binomial":
- Par = [par[0].astype(int), par[1]]
- elif self.Name == "chisquare":
- Par = np.around(par,0)
- elif self.Name == "exponential":
- Par = 1/par
- elif self.Name == "frechet":
- Par = [-1/par[1], par[0]/par[1], par[0]]
- elif self.Name == "gamma":
- Par = [par[1], 1/par[0]]
- elif self.Name == "geometric":
- Par = par
- elif self.Name == "gev":
- Par = [-par[0], par[1], par[2]]
- elif self.Name == "gevmin":
- Par = [-par[0], par[1], -par[2]]
- elif self.Name == "gumbel":
- Par = par
- elif self.Name == "gumbelmin":
- Par = par
- elif self.Name == "lognormal":
- Par = [par[1],np.exp(par[0])]
- elif self.Name == "negativebinomial":
- Par = par
- elif self.Name == "normal":
- Par = par
- elif self.Name == "pareto":
- Par = [1/par[1], par[0]/par[1], par[0]]
- elif self.Name == "poisson":
+ if self.Opt == 'PAR':
+ if self.Name == 'beta':
+ Par = [par[0], par[1], par[2], par[3] - par[2]] # noqa: N806
+ elif self.Name == 'binomial':
+ Par = [par[0].astype(int), par[1]] # noqa: N806
+ elif self.Name == 'chisquare':
+ Par = np.around(par, 0) # noqa: N806
+ elif self.Name == 'exponential':
+ Par = 1 / par # noqa: N806
+ elif self.Name == 'frechet':
+ Par = [-1 / par[1], par[0] / par[1], par[0]] # noqa: N806
+ elif self.Name == 'gamma':
+ Par = [par[1], 1 / par[0]] # noqa: N806
+ elif self.Name == 'geometric':
+ Par = par # noqa: N806
+ elif self.Name == 'gev':
+ Par = [-par[0], par[1], par[2]] # noqa: N806
+ elif self.Name == 'gevmin':
+ Par = [-par[0], par[1], -par[2]] # noqa: N806
+ elif self.Name == 'gumbel' or self.Name == 'gumbelmin': # noqa: PLR1714
+ Par = par # noqa: N806
+ elif self.Name == 'lognormal':
+ Par = [par[1], np.exp(par[0])] # noqa: N806
+ elif self.Name == 'negativebinomial' or self.Name == 'normal': # noqa: PLR1714
+ Par = par # noqa: N806
+ elif self.Name == 'pareto':
+ Par = [1 / par[1], par[0] / par[1], par[0]] # noqa: N806
+ elif self.Name == 'poisson':
if isinstance(par, list):
- Par = par[0]*par[1]
+ Par = par[0] * par[1] # noqa: N806
else:
- Par = par
- elif self.Name == "rayleigh":
- Par = par
- elif self.Name == "truncatednormal":
- a = (par[2]-par[0])/par[1]
- b = (par[3]-par[0])/par[1]
- Par = [par[0], par[1], a, b]
- elif self.Name == "uniform":
- Par = [par[0], par[1]-par[0]]
- elif self.Name == "weibull":
- Par = par
-
- #----------------------------------------------------------------------------
+ Par = par # noqa: N806
+ elif self.Name == 'rayleigh':
+ Par = par # noqa: N806
+ elif self.Name == 'truncatednormal':
+ a = (par[2] - par[0]) / par[1]
+ b = (par[3] - par[0]) / par[1]
+ Par = [par[0], par[1], a, b] # noqa: N806
+ elif self.Name == 'uniform':
+ Par = [par[0], par[1] - par[0]] # noqa: N806
+ elif self.Name == 'weibull':
+ Par = par # noqa: N806
+
+ # ----------------------------------------------------------------------------
# for the case of Opt == MOM
- else:
- if self.Name == "beta":
- r = ((par[3]-par[0])*(par[0]-par[2])/par[1]**2-1)*(par[0]-par[2])/(par[3]-par[2])
- s = r*(par[3]-par[0])/(par[0]-par[2])
- Par = [r, s, par[2], par[3]-par[2]]
- elif self.Name == "binomial":
- p = 1 - (par[1]) ** 2 / par[0]
- n = par[0] / p
- Par = [n.astype(int), p]
- elif self.Name == "chisquare":
- Par = np.around(par,0)
- elif self.Name == "exponential":
- Par = par
- elif self.Name == "frechet":
- c = np.zeros(n_cond)
- scale = np.zeros(n_cond)
- loc = np.zeros(n_cond)
- for i in range(n_cond):
- param0 = 2.0001
- def equation(param):
- return (np.sqrt(special.gamma(1 - 2 / param)- special.gamma(1 - 1 / param) ** 2)
- / special.gamma(1 - 1 / param)- par[1][i] / par[0][i])
- sol = optimize.fsolve(equation, x0=param0, full_output=True)
- if sol[2] == 1:
- k = sol[0][0]
- a_n = par[0][i] / special.gamma(1 - 1 / k)
- c[i] = -1/k
- scale[i] = a_n/k
- loc[i] = a_n
- else:
- c[i] = np.nan
- scale[i] = np.nan
- loc[i] = np.nan
- Par = [c, scale, loc]
- elif self.Name == "gamma":
- Par = [(par[0]/par[1])**2, par[1]**2/par[0]]
- elif self.Name == "geometric":
- Par = 1/par
- elif self.Name == "gev":
- beta = par[2]
- alpha = abs(beta)*par[1]/np.sqrt(special.gamma(1-2*beta)-special.gamma(1-beta)**2)
- epsilon = par[0]-(alpha/beta*(special.gamma(1-beta)-1))
- Par = [-beta, alpha, epsilon]
- elif self.Name == "gevmin":
- beta = par[2]
- alpha = abs(beta)*par[1]/np.sqrt(special.gamma(1-2*beta)-special.gamma(1-beta)**2)
- epsilon = par[0]+(alpha/beta*(special.gamma(1-beta)-1))
- Par = [-beta, alpha, -epsilon]
- elif self.Name == "gumbel":
- a_n = par[1] * np.sqrt(6)/np.pi
- b_n = par[0] - np.euler_gamma * a_n
- Par = [a_n, b_n]
- elif self.Name == "gumbelmin":
- a_n = par[1] * np.sqrt(6) / np.pi
- b_n = par[0] + np.euler_gamma * a_n
- Par = [a_n, b_n]
- elif self.Name == "lognormal":
- mu_lnx = np.log(par[0] ** 2 / np.sqrt(par[1] ** 2 + par[0] ** 2))
- sig_lnx = np.sqrt(np.log(1 + (par[1] / par[0]) ** 2))
- Par = [sig_lnx, np.exp(mu_lnx)]
- elif self.Name == "negativebinomial":
- p = par[0] / (par[0] + par[1] ** 2)
- k = par[0] * p
- Par = [k, p]
- elif self.Name == "normal":
- Par = par
- elif self.Name == "pareto":
- alpha = 1 + np.sqrt(1 + (par[0] / par[1]) ** 2)
- x_m = par[0] * (alpha - 1) / alpha
- Par = [1/alpha, x_m/alpha, x_m]
- elif self.Name == "poisson":
- if isinstance(par, list):
- Par = par[0]
+ elif self.Name == 'beta':
+ r = (
+ ((par[3] - par[0]) * (par[0] - par[2]) / par[1] ** 2 - 1)
+ * (par[0] - par[2])
+ / (par[3] - par[2])
+ )
+ s = r * (par[3] - par[0]) / (par[0] - par[2])
+ Par = [r, s, par[2], par[3] - par[2]] # noqa: N806
+ elif self.Name == 'binomial':
+ p = 1 - (par[1]) ** 2 / par[0]
+ n = par[0] / p
+ Par = [n.astype(int), p] # noqa: N806
+ elif self.Name == 'chisquare':
+ Par = np.around(par, 0) # noqa: N806
+ elif self.Name == 'exponential':
+ Par = par # noqa: N806
+ elif self.Name == 'frechet':
+ c = np.zeros(n_cond)
+ scale = np.zeros(n_cond)
+ loc = np.zeros(n_cond)
+ for i in range(n_cond):
+ param0 = 2.0001
+
+ def equation(param):
+ return (
+ np.sqrt(
+ special.gamma(1 - 2 / param)
+ - special.gamma(1 - 1 / param) ** 2
+ )
+ / special.gamma(1 - 1 / param)
+ - par[1][i] / par[0][i] # noqa: B023
+ )
+
+ sol = optimize.fsolve(equation, x0=param0, full_output=True)
+ if sol[2] == 1:
+ k = sol[0][0]
+ a_n = par[0][i] / special.gamma(1 - 1 / k)
+ c[i] = -1 / k
+ scale[i] = a_n / k
+ loc[i] = a_n
+ else:
+ c[i] = np.nan
+ scale[i] = np.nan
+ loc[i] = np.nan
+ Par = [c, scale, loc] # noqa: N806
+ elif self.Name == 'gamma':
+ Par = [(par[0] / par[1]) ** 2, par[1] ** 2 / par[0]] # noqa: N806
+ elif self.Name == 'geometric':
+ Par = 1 / par # noqa: N806
+ elif self.Name == 'gev':
+ beta = par[2]
+ alpha = (
+ abs(beta)
+ * par[1]
+ / np.sqrt(special.gamma(1 - 2 * beta) - special.gamma(1 - beta) ** 2)
+ )
+ epsilon = par[0] - (alpha / beta * (special.gamma(1 - beta) - 1))
+ Par = [-beta, alpha, epsilon] # noqa: N806
+ elif self.Name == 'gevmin':
+ beta = par[2]
+ alpha = (
+ abs(beta)
+ * par[1]
+ / np.sqrt(special.gamma(1 - 2 * beta) - special.gamma(1 - beta) ** 2)
+ )
+ epsilon = par[0] + (alpha / beta * (special.gamma(1 - beta) - 1))
+ Par = [-beta, alpha, -epsilon] # noqa: N806
+ elif self.Name == 'gumbel':
+ a_n = par[1] * np.sqrt(6) / np.pi
+ b_n = par[0] - np.euler_gamma * a_n
+ Par = [a_n, b_n] # noqa: N806
+ elif self.Name == 'gumbelmin':
+ a_n = par[1] * np.sqrt(6) / np.pi
+ b_n = par[0] + np.euler_gamma * a_n
+ Par = [a_n, b_n] # noqa: N806
+ elif self.Name == 'lognormal':
+ mu_lnx = np.log(par[0] ** 2 / np.sqrt(par[1] ** 2 + par[0] ** 2))
+ sig_lnx = np.sqrt(np.log(1 + (par[1] / par[0]) ** 2))
+ Par = [sig_lnx, np.exp(mu_lnx)] # noqa: N806
+ elif self.Name == 'negativebinomial':
+ p = par[0] / (par[0] + par[1] ** 2)
+ k = par[0] * p
+ Par = [k, p] # noqa: N806
+ elif self.Name == 'normal':
+ Par = par # noqa: N806
+ elif self.Name == 'pareto':
+ alpha = 1 + np.sqrt(1 + (par[0] / par[1]) ** 2)
+ x_m = par[0] * (alpha - 1) / alpha
+ Par = [1 / alpha, x_m / alpha, x_m] # noqa: N806
+ elif self.Name == 'poisson':
+ if isinstance(par, list):
+ Par = par[0] # noqa: N806
+ else:
+ Par = par # noqa: N806
+ elif self.Name == 'rayleigh':
+ Par = par / np.sqrt(np.pi / 2) # noqa: N806
+ elif self.Name == 'truncatednormal':
+ mu = np.zeros(n_cond)
+ sig = np.zeros(n_cond)
+ a = par[2]
+ b = par[3]
+ for i in range(n_cond):
+ mean = par[0][i]
+ std = par[1][i]
+ if a[i] >= b[i] or mean <= a[i] or mean >= b[i]:
+ a[i] = np.nan
+ b[i] = np.nan
+ mu[i] = np.nan
+ sig[i] = np.nan
+ continue
+
+ def equation(param):
+ f = lambda x: stats.norm.pdf(x, param[0], param[1]) / ( # noqa: E731
+ stats.norm.cdf(b[i], param[0], param[1]) # noqa: B023
+ - stats.norm.cdf(a[i], param[0], param[1]) # noqa: B023
+ )
+ expec_eq = (
+ integrate.quadrature(lambda x: x * f(x), a[i], b[i])[0] # noqa: B023
+ - mean # noqa: B023
+ )
+ std_eq = (
+ np.sqrt(
+ integrate.quadrature(lambda x: x**2 * f(x), a[i], b[i])[ # noqa: B023
+ 0
+ ]
+ - (integrate.quadrature(lambda x: x * f(x), a[i], b[i]))[ # noqa: B023
+ 0
+ ]
+ ** 2
+ )
+ - std # noqa: B023
+ )
+ eq = [expec_eq, std_eq]
+ return eq # noqa: RET504
+
+ x0 = [mean, std]
+ sol = optimize.fsolve(equation, x0=x0, full_output=True)
+ if sol[2] == 1:
+ mu[i] = sol[0][0]
+ sig[i] = sol[0][1]
+ else:
+ a[i] = np.nan
+ b[i] = np.nan
+ mu[i] = np.nan
+ sig[i] = np.nan
+ Par = [mu, sig, (a - mu) / sig, (b - mu) / sig] # noqa: N806
+ elif self.Name == 'uniform':
+ lower = par[0] - np.sqrt(12) * par[1] / 2
+ upper = par[0] + np.sqrt(12) * par[1] / 2
+ Par = [lower, upper - lower] # noqa: N806
+ elif self.Name == 'weibull':
+ a_n = np.zeros(n_cond)
+ k = np.zeros(n_cond)
+ for i in range(n_cond):
+
+ def equation(param):
+ return (
+ np.sqrt(
+ special.gamma(1 + 2 / param)
+ - (special.gamma(1 + 1 / param)) ** 2
+ )
+ / special.gamma(1 + 1 / param)
+ - par[1][i] / par[0][i] # noqa: B023
+ )
+
+ sol = optimize.fsolve(equation, x0=0.02, full_output=True)
+ if sol[2] == 1:
+ k[i] = sol[0][0]
+ a_n[i] = par[0][i] / special.gamma(1 + 1 / k[i])
else:
- Par = par
- elif self.Name == "rayleigh":
- Par = par / np.sqrt(np.pi / 2)
- elif self.Name == "truncatednormal":
- mu = np.zeros(n_cond)
- sig = np.zeros(n_cond)
- a = par[2]
- b = par[3]
- for i in range(n_cond):
- mean = par[0][i]; std = par[1][i];
- if a[i] >= b[i] or mean <= a[i] or mean >= b[i]:
- a[i] = np.nan; b[i] = np.nan; mu[i] = np.nan; sig[i] = np.nan;
- continue
- def equation(param):
- f = lambda x: stats.norm.pdf(x,param[0],param[1])/(stats.norm.cdf(b[i],param[0],param[1])-stats.norm.cdf(a[i],param[0],param[1]))
- expec_eq = integrate.quadrature(lambda x: x*f(x),a[i],b[i])[0]-mean
- std_eq = np.sqrt(integrate.quadrature(lambda x: x**2*f(x),a[i],b[i])[0]-(integrate.quadrature(lambda x: x*f(x),a[i],b[i]))[0]**2)-std
- eq = [expec_eq, std_eq]
- return(eq)
- x0 = [mean, std]
- sol = optimize.fsolve(equation, x0=x0, full_output=True)
- if sol[2] == 1:
- mu[i] = sol[0][0]; sig[i] = sol[0][1];
- else:
- a[i] = np.nan; b[i] = np.nan; mu[i] = np.nan; sig[i] = np.nan;
- Par = [mu, sig, (a-mu)/sig, (b-mu)/sig]
- elif self.Name == "uniform":
- lower = par[0] - np.sqrt(12) * par[1] / 2
- upper = par[0] + np.sqrt(12) * par[1] / 2
- Par = [lower, upper-lower]
- elif self.Name == "weibull":
- a_n = np.zeros(n_cond)
- k = np.zeros(n_cond)
- for i in range(n_cond):
- def equation(param):
- return (np.sqrt(special.gamma(1 + 2 / param) - (special.gamma(1 + 1 / param)) ** 2)
- / special.gamma(1 + 1 / param) - par[1][i] / par[0][i])
-
- sol = optimize.fsolve(equation, x0=0.02, full_output=True)
- if sol[2] == 1:
- k[i] = sol[0][0]
- a_n[i] = par[0][i] / special.gamma(1 + 1 / k[i])
- else:
- k[i] = np.nan; a_n[i] = np.nan;
- Par = [a_n, k]
-
- for i in range(0,len(Par)):
+ k[i] = np.nan
+ a_n[i] = np.nan
+ Par = [a_n, k] # noqa: N806
+
+ for i in range(len(Par)):
Par[i] = np.squeeze(Par[i])
-
- return Par
-
-#%%
- def condCDF(self,x,cond):
- """
- Evaluates the CDF of the conditional distribution at x for
+
+ return Par
+
+ # %%
+ def condCDF(self, x, cond): # noqa: C901, N802
+ """Evaluates the CDF of the conditional distribution at x for
the given conditions.
This method is used by the ERARosen method X2U.
- """
-
- par = self.condParam(cond) #computation of the conditional parameters
+ """ # noqa: D205, D401
+ par = self.condParam(cond) # computation of the conditional parameters
x = np.array(x, ndmin=1, dtype=float)
-
- if self.Name == "beta":
- CDF = stats.beta.cdf(x, a=par[0], b=par[1], loc=par[2], scale=par[3])
- elif self.Name == "binomial":
- CDF = stats.binom.cdf(x, n=par[0], p=par[1])
- elif self.Name == "chisquare":
- CDF = stats.chi2.cdf(x,df=par)
- elif self.Name == "exponential":
- CDF = stats.expon.cdf(x, scale=par)
- elif self.Name == "frechet":
- CDF = stats.genextreme.cdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gamma":
- CDF = stats.gamma.cdf(x, a=par[0], scale=par[1])
- elif self.Name == "geometric":
- CDF = stats.geom.cdf(x, p=par)
- elif self.Name == "gev":
- CDF = stats.genextreme.cdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gevmin":
- CDF = 1-stats.genextreme.cdf(-x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gumbel":
- CDF = stats.gumbel_r.cdf(x, scale=par[0], loc=par[1])
- elif self.Name == "gumbelmin":
- CDF = stats.gumbel_l.cdf(x, scale=par[0], loc=par[1])
- elif self.Name == "lognormal":
- CDF = stats.lognorm.cdf(x, s=par[0], scale=par[1])
- elif self.Name == "negativebinomial":
- CDF = stats.nbinom.cdf(x-par[0], n=par[0], p=par[1])
- elif self.Name == "normal":
- CDF = stats.norm.cdf(x, loc=par[0], scale=par[1])
- elif self.Name == "pareto":
- CDF = stats.genpareto.cdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "poisson":
- CDF = stats.poisson.cdf(x, mu=par)
- elif self.Name == "rayleigh":
- CDF = stats.rayleigh.cdf(x, scale=par)
- elif self.Name == "truncatednormal":
- CDF = stats.truncnorm.cdf(x, loc=par[0], scale=par[1], a=par[2], b=par[3])
- elif self.Name == "uniform":
- CDF = stats.uniform.cdf(x, loc=par[0], scale=par[1])
- elif self.Name == "weibull":
- CDF = stats.weibull_min.cdf(x, c=par[1], scale=par[0])
-
+
+ if self.Name == 'beta':
+ CDF = stats.beta.cdf(x, a=par[0], b=par[1], loc=par[2], scale=par[3]) # noqa: N806
+ elif self.Name == 'binomial':
+ CDF = stats.binom.cdf(x, n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'chisquare':
+ CDF = stats.chi2.cdf(x, df=par) # noqa: N806
+ elif self.Name == 'exponential':
+ CDF = stats.expon.cdf(x, scale=par) # noqa: N806
+ elif self.Name == 'frechet':
+ CDF = stats.genextreme.cdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gamma':
+ CDF = stats.gamma.cdf(x, a=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'geometric':
+ CDF = stats.geom.cdf(x, p=par) # noqa: N806
+ elif self.Name == 'gev':
+ CDF = stats.genextreme.cdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gevmin':
+ CDF = 1 - stats.genextreme.cdf(-x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gumbel':
+ CDF = stats.gumbel_r.cdf(x, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'gumbelmin':
+ CDF = stats.gumbel_l.cdf(x, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'lognormal':
+ CDF = stats.lognorm.cdf(x, s=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'negativebinomial':
+ CDF = stats.nbinom.cdf(x - par[0], n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'normal':
+ CDF = stats.norm.cdf(x, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'pareto':
+ CDF = stats.genpareto.cdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'poisson':
+ CDF = stats.poisson.cdf(x, mu=par) # noqa: N806
+ elif self.Name == 'rayleigh':
+ CDF = stats.rayleigh.cdf(x, scale=par) # noqa: N806
+ elif self.Name == 'truncatednormal':
+ CDF = stats.truncnorm.cdf( # noqa: N806
+ x, loc=par[0], scale=par[1], a=par[2], b=par[3]
+ )
+ elif self.Name == 'uniform':
+ CDF = stats.uniform.cdf(x, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'weibull':
+ CDF = stats.weibull_min.cdf(x, c=par[1], scale=par[0]) # noqa: N806
+
return CDF
-
-#%%
- def condiCDF(self,y,cond):
- """
- Evaluates the inverse CDF of the conditional distribution at
+
+ # %%
+ def condiCDF(self, y, cond): # noqa: C901, N802
+ """Evaluates the inverse CDF of the conditional distribution at
y for the given conditions.
This method is used by the ERARosen method U2X.
- """
-
- par = self.condParam(cond) #computation of the conditional parameters
+ """ # noqa: D205, D401
+ par = self.condParam(cond) # computation of the conditional parameters
y = np.array(y, ndmin=1, dtype=float)
-
- if self.Name == "beta":
- iCDF = stats.beta.ppf(y, a=par[0], b=par[1], loc=par[2], scale=par[3])
- elif self.Name == "binomial":
- iCDF = stats.binom.ppf(y, n=par[0], p=par[1])
- elif self.Name == "chisquare":
- iCDF = stats.chi2.ppf(y,df=par)
- elif self.Name == "exponential":
- iCDF = stats.expon.ppf(y, scale=par)
- elif self.Name == "frechet":
- iCDF = stats.genextreme.ppf(y, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gamma":
- iCDF = stats.gamma.ppf(y, a=par[0], scale=par[1])
- elif self.Name == "geometric":
- iCDF = stats.geom.ppf(y, p=par)
- elif self.Name == "gev":
- iCDF = stats.genextreme.ppf(y, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gevmin":
- iCDF = -stats.genextreme.ppf(1-y, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gumbel":
- iCDF = stats.gumbel_r.ppf(y, scale=par[0], loc=par[1])
- elif self.Name == "gumbelmin":
- iCDF = stats.gumbel_l.ppf(y, scale=par[0], loc=par[1])
- elif self.Name == "lognormal":
- iCDF = stats.lognorm.ppf(y, s=par[0], scale=par[1])
- elif self.Name == "negativebinomial":
- iCDF = stats.nbinom.ppf(y, n=par[0], p=par[1])+par[0]
- elif self.Name == "normal":
- iCDF = stats.norm.ppf(y, loc=par[0], scale=par[1])
- elif self.Name == "pareto":
- iCDF = stats.genpareto.ppf(y, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "poisson":
- iCDF = stats.poisson.ppf(y, mu=par)
- elif self.Name == "rayleigh":
- iCDF = stats.rayleigh.ppf(y, scale=par)
- elif self.Name == "truncatednormal":
- iCDF = stats.truncnorm.ppf(y, loc=par[0], scale=par[1], a=par[2], b=par[3])
- elif self.Name == "uniform":
- iCDF = stats.uniform.ppf(y, loc=par[0], scale=par[1])
- elif self.Name == "weibull":
- iCDF = stats.weibull_min.ppf(y, c=par[1], scale=par[0])
+
+ if self.Name == 'beta':
+ iCDF = stats.beta.ppf(y, a=par[0], b=par[1], loc=par[2], scale=par[3]) # noqa: N806
+ elif self.Name == 'binomial':
+ iCDF = stats.binom.ppf(y, n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'chisquare':
+ iCDF = stats.chi2.ppf(y, df=par) # noqa: N806
+ elif self.Name == 'exponential':
+ iCDF = stats.expon.ppf(y, scale=par) # noqa: N806
+ elif self.Name == 'frechet':
+ iCDF = stats.genextreme.ppf(y, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gamma':
+ iCDF = stats.gamma.ppf(y, a=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'geometric':
+ iCDF = stats.geom.ppf(y, p=par) # noqa: N806
+ elif self.Name == 'gev':
+ iCDF = stats.genextreme.ppf(y, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gevmin':
+ iCDF = -stats.genextreme.ppf(1 - y, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gumbel':
+ iCDF = stats.gumbel_r.ppf(y, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'gumbelmin':
+ iCDF = stats.gumbel_l.ppf(y, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'lognormal':
+ iCDF = stats.lognorm.ppf(y, s=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'negativebinomial':
+ iCDF = stats.nbinom.ppf(y, n=par[0], p=par[1]) + par[0] # noqa: N806
+ elif self.Name == 'normal':
+ iCDF = stats.norm.ppf(y, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'pareto':
+ iCDF = stats.genpareto.ppf(y, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'poisson':
+ iCDF = stats.poisson.ppf(y, mu=par) # noqa: N806
+ elif self.Name == 'rayleigh':
+ iCDF = stats.rayleigh.ppf(y, scale=par) # noqa: N806
+ elif self.Name == 'truncatednormal':
+ iCDF = stats.truncnorm.ppf( # noqa: N806
+ y, loc=par[0], scale=par[1], a=par[2], b=par[3]
+ )
+ elif self.Name == 'uniform':
+ iCDF = stats.uniform.ppf(y, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'weibull':
+ iCDF = stats.weibull_min.ppf(y, c=par[1], scale=par[0]) # noqa: N806
return iCDF
-
-#%%
- def condPDF(self,x,cond):
- """
- Evaluates the PDF of the conditional distribution at x for
+
+ # %%
+ def condPDF(self, x, cond): # noqa: C901, N802
+ """Evaluates the PDF of the conditional distribution at x for
the given conditions.
This method is used by the ERARosen method pdf.
- """
-
- par = self.condParam(cond) #computation of the conditional parameters
+ """ # noqa: D205, D401
+ par = self.condParam(cond) # computation of the conditional parameters
x = np.array(x, ndmin=1, dtype=float)
-
- if self.Name == "beta":
- PDF = stats.beta.pdf(x, a=par[0], b=par[1], loc=par[2], scale=par[3])
- elif self.Name == "binomial":
- PDF = stats.binom.pmf(x, n=par[0], p=par[1])
- elif self.Name == "chisquare":
- PDF = stats.chi2.pdf(x,df=par)
- elif self.Name == "exponential":
- PDF = stats.expon.pdf(x, scale=par)
- elif self.Name == "frechet":
- PDF = stats.genextreme.pdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gamma":
- PDF = stats.gamma.pdf(x, a=par[0], scale=par[1])
- elif self.Name == "geometric":
- PDF = stats.geom.pmf(x, p=par)
- elif self.Name == "gev":
- PDF = stats.genextreme.pdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gevmin":
- PDF = stats.genextreme.pdf(-x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gumbel":
- PDF = stats.gumbel_r.pdf(x, scale=par[0], loc=par[1])
- elif self.Name == "gumbelmin":
- PDF = stats.gumbel_l.pdf(x, scale=par[0], loc=par[1])
- elif self.Name == "lognormal":
- PDF = stats.lognorm.pdf(x, s=par[0], scale=par[1])
- elif self.Name == "negativebinomial":
- PDF = stats.nbinom.pmf(x-par[0], n=par[0], p=par[1])
- elif self.Name == "normal":
- PDF = stats.norm.pdf(x, loc=par[0], scale=par[1])
- elif self.Name == "pareto":
- PDF = stats.genpareto.pdf(x, c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "poisson":
- PDF = stats.poisson.pmf(x, mu=par)
- elif self.Name == "rayleigh":
- PDF = stats.rayleigh.pdf(x, scale=par)
- elif self.Name == "truncatednormal":
- PDF = stats.truncnorm.pdf(x, loc=par[0], scale=par[1], a=par[2], b=par[3])
- elif self.Name == "uniform":
- PDF = stats.uniform.pdf(x, loc=par[0], scale=par[1])
- elif self.Name == "weibull":
- PDF = stats.weibull_min.pdf(x, c=par[1], scale=par[0])
-
+
+ if self.Name == 'beta':
+ PDF = stats.beta.pdf(x, a=par[0], b=par[1], loc=par[2], scale=par[3]) # noqa: N806
+ elif self.Name == 'binomial':
+ PDF = stats.binom.pmf(x, n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'chisquare':
+ PDF = stats.chi2.pdf(x, df=par) # noqa: N806
+ elif self.Name == 'exponential':
+ PDF = stats.expon.pdf(x, scale=par) # noqa: N806
+ elif self.Name == 'frechet':
+ PDF = stats.genextreme.pdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gamma':
+ PDF = stats.gamma.pdf(x, a=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'geometric':
+ PDF = stats.geom.pmf(x, p=par) # noqa: N806
+ elif self.Name == 'gev':
+ PDF = stats.genextreme.pdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gevmin':
+ PDF = stats.genextreme.pdf(-x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gumbel':
+ PDF = stats.gumbel_r.pdf(x, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'gumbelmin':
+ PDF = stats.gumbel_l.pdf(x, scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'lognormal':
+ PDF = stats.lognorm.pdf(x, s=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'negativebinomial':
+ PDF = stats.nbinom.pmf(x - par[0], n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'normal':
+ PDF = stats.norm.pdf(x, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'pareto':
+ PDF = stats.genpareto.pdf(x, c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'poisson':
+ PDF = stats.poisson.pmf(x, mu=par) # noqa: N806
+ elif self.Name == 'rayleigh':
+ PDF = stats.rayleigh.pdf(x, scale=par) # noqa: N806
+ elif self.Name == 'truncatednormal':
+ PDF = stats.truncnorm.pdf( # noqa: N806
+ x, loc=par[0], scale=par[1], a=par[2], b=par[3]
+ )
+ elif self.Name == 'uniform':
+ PDF = stats.uniform.pdf(x, loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'weibull':
+ PDF = stats.weibull_min.pdf(x, c=par[1], scale=par[0]) # noqa: N806
+
return PDF
-
-#%%
- def condRandom(self,cond):
- """
- Creates one random sample for each given condition.
+
+ # %%
+ def condRandom(self, cond): # noqa: C901, N802
+ """Creates one random sample for each given condition.
This method is used by the ERARosen method random.
- """
-
- par = self.condParam(cond) #computation of the conditional parameters
-
- if self.Name == "beta":
- Random = stats.beta.rvs(a=par[0], b=par[1], loc=par[2], scale=par[3])
- elif self.Name == "binomial":
- Random = stats.binom.rvs(n=par[0], p=par[1])
- elif self.Name == "chisquare":
- Random = stats.chi2.rvs(df=par)
- elif self.Name == "exponential":
- Random = stats.expon.rvs(scale=par)
- elif self.Name == "frechet":
- Random = stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gamma":
- Random = stats.gamma.rvs(a=par[0], scale=par[1])
- elif self.Name == "geometric":
- Random = stats.geom.rvs(p=par)
- elif self.Name == "gev":
- Random = stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gevmin":
- Random = -stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "gumbel":
- Random = stats.gumbel_r.rvs(scale=par[0], loc=par[1])
- elif self.Name == "gumbelmin":
- Random = stats.gumbel_l.rvs(scale=par[0], loc=par[1])
- elif self.Name == "lognormal":
- Random = stats.lognorm.rvs(s=par[0], scale=par[1])
- elif self.Name == "negativebinomial":
- Random = stats.nbinom.rvs(n=par[0], p=par[1])+par[0]
- elif self.Name == "normal":
- Random = stats.norm.rvs(loc=par[0], scale=par[1])
- elif self.Name == "pareto":
- Random = stats.genpareto.rvs(c=par[0], scale=par[1], loc=par[2])
- elif self.Name == "poisson":
- Random = stats.poisson.rvs(mu=par)
- elif self.Name == "rayleigh":
- Random = stats.rayleigh.rvs(scale=par)
- elif self.Name == "truncatednormal":
- Random = stats.truncnorm.rvs(loc=par[0], scale=par[1], a=par[2], b=par[3])
- elif self.Name == "uniform":
- Random = stats.uniform.rvs(loc=par[0], scale=par[1])
- elif self.Name == "weibull":
- Random = stats.weibull_min.rvs(c=par[1], scale=par[0])
-
+ """ # noqa: D205, D401
+ par = self.condParam(cond) # computation of the conditional parameters
+
+ if self.Name == 'beta':
+ Random = stats.beta.rvs(a=par[0], b=par[1], loc=par[2], scale=par[3]) # noqa: N806
+ elif self.Name == 'binomial':
+ Random = stats.binom.rvs(n=par[0], p=par[1]) # noqa: N806
+ elif self.Name == 'chisquare':
+ Random = stats.chi2.rvs(df=par) # noqa: N806
+ elif self.Name == 'exponential':
+ Random = stats.expon.rvs(scale=par) # noqa: N806
+ elif self.Name == 'frechet':
+ Random = stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gamma':
+ Random = stats.gamma.rvs(a=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'geometric':
+ Random = stats.geom.rvs(p=par) # noqa: N806
+ elif self.Name == 'gev':
+ Random = stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gevmin':
+ Random = -stats.genextreme.rvs(c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'gumbel':
+ Random = stats.gumbel_r.rvs(scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'gumbelmin':
+ Random = stats.gumbel_l.rvs(scale=par[0], loc=par[1]) # noqa: N806
+ elif self.Name == 'lognormal':
+ Random = stats.lognorm.rvs(s=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'negativebinomial':
+ Random = stats.nbinom.rvs(n=par[0], p=par[1]) + par[0] # noqa: N806
+ elif self.Name == 'normal':
+ Random = stats.norm.rvs(loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'pareto':
+ Random = stats.genpareto.rvs(c=par[0], scale=par[1], loc=par[2]) # noqa: N806
+ elif self.Name == 'poisson':
+ Random = stats.poisson.rvs(mu=par) # noqa: N806
+ elif self.Name == 'rayleigh':
+ Random = stats.rayleigh.rvs(scale=par) # noqa: N806
+ elif self.Name == 'truncatednormal':
+ Random = stats.truncnorm.rvs( # noqa: N806
+ loc=par[0], scale=par[1], a=par[2], b=par[3]
+ )
+ elif self.Name == 'uniform':
+ Random = stats.uniform.rvs(loc=par[0], scale=par[1]) # noqa: N806
+ elif self.Name == 'weibull':
+ Random = stats.weibull_min.rvs(c=par[1], scale=par[0]) # noqa: N806
+
return Random
-
\ No newline at end of file
diff --git a/modules/performUQ/common/ERAClasses/ERADist.py b/modules/performUQ/common/ERAClasses/ERADist.py
index 7176fb569..96d78b23d 100644
--- a/modules/performUQ/common/ERAClasses/ERADist.py
+++ b/modules/performUQ/common/ERAClasses/ERADist.py
@@ -1,8 +1,9 @@
-# import of modules
+# import of modules # noqa: CPY001, D100, INP001
+import warnings
+
import numpy as np
import scipy as sp
-from scipy import optimize, stats, special
-import warnings
+from scipy import optimize, special, stats
"""
---------------------------------------------------------------------------
@@ -46,21 +47,20 @@
They can be defined either by their parameters, the first and second
moment or by data, given as a vector.
---------------------------------------------------------------------------
-"""
+""" # noqa: W291
-class ERADist(object):
- """
- Generation of marginal distribution objects.
- Construction of the distribution object with
-
+class ERADist:
+ """Generation of marginal distribution objects.
+ Construction of the distribution object with
+
Obj = ERADist(name,opt,val)
or Obj = ERADist(name,opt,val,ID)
-
+
The second option is only useful when using the ERADist object within
the scope of an ERARosen object.
-
-
+
+
The following distribution types are available:
opt = "PAR", specification of the distribution by its parameters:
@@ -86,8 +86,8 @@ class ERADist(object):
Truncated normal: Obj = ERADist('truncatednormal','PAR',[mu_n,sigma_n,a,b])
Uniform: Obj = ERADist('uniform','PAR',[lower,upper])
Weibull: Obj = ERADist('weibull','PAR',[a_n,k])
-
-
+
+
opt = "MOM", specification of the distribution by its moments:
Beta: Obj = ERADist('beta','MOM',[mean,std,a,b])
Binomial: Obj = ERADist('binomial','MOM',[mean,std])
@@ -111,8 +111,8 @@ class ERADist(object):
Truncated normal: Obj = ERADist('truncatednormal','MOM',[mean,std,a,b])
Uniform: Obj = ERADist('uniform','MOM',[mean,std])
Weibull: Obj = ERADist('weibull','MOM',[mean,std])
-
-
+
+
opt = "DATA", specification of the distribution by data given as a vector:
Beta: Obj = ERADist('beta','DATA',[[X],[a,b]])
Binomial: Obj = ERADist('binomial','DATA',[[X],n])
@@ -135,258 +135,308 @@ class ERADist(object):
Truncated normal: Obj = ERADist('truncatednormal','DATA',[[X],[a,b]])
Uniform: Obj = ERADist('uniform','DATA',[X])
Weibull: Obj = ERADist('weibull','DATA',[X])
-
- """
-#%%
- def __init__(self, name, opt, val=[0, 1], ID=False):
- """
- Constructor method, for more details have a look at the
+
+ """ # noqa: D205, D400
+
+ # %%
+ def __init__(self, name, opt, val=[0, 1], ID=False): # noqa: FBT002, B006, C901, N803, PLR0912, PLR0915
+ """Constructor method, for more details have a look at the
class description.
- """
-
+ """ # noqa: D205, D401
self.Name = name.lower()
self.ID = ID
-
- #----------------------------------------------------------------------------
+
+ # ----------------------------------------------------------------------------
# definition of the distribution by its parameters
- if opt.upper() == "PAR":
+ if opt.upper() == 'PAR':
val = np.array(val, ndmin=1, dtype=float)
-
-
- if name.lower() == "beta":
+
+ if name.lower() == 'beta':
"""
beta distribution in lecture notes can be shifted in order to
account for ranges [a,b] -> this is not implemented yet
"""
if (val[0] > 0) and (val[1] > 0) and (val[2] < val[3]):
- self.Par = {'r':val[0],'s':val[1],'a':val[2],'b':val[3]}
- self.Dist = stats.beta(a=self.Par['r'], b=self.Par['s'],
- loc=self.Par['a'],scale = self.Par['b']-self.Par['a'])
+ self.Par = {'r': val[0], 's': val[1], 'a': val[2], 'b': val[3]}
+ self.Dist = stats.beta(
+ a=self.Par['r'],
+ b=self.Par['s'],
+ loc=self.Par['a'],
+ scale=self.Par['b'] - self.Par['a'],
+ )
else:
- raise RuntimeError("The Beta distribution is not defined for your parameters.")
-
-
- elif name.lower() == "binomial":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Beta distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'binomial':
if (val[1] >= 0) and (val[1] <= 1) and (val[0] % 1 == 0):
- self.Par = {'n':int(val[0]), 'p':val[1]}
+ self.Par = {'n': int(val[0]), 'p': val[1]}
self.Dist = stats.binom(n=self.Par['n'], p=self.Par['p'])
else:
- raise RuntimeError(
- "The Binomial distribution is not defined for your parameters.")
-
-
- elif name.lower() == "chisquare":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Binomial distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'chisquare':
if val[0] > 0 and val[0] < np.inf and val[0] % 1 <= 10 ** (-4):
- self.Par = {'k':np.around(val[0],0)}
+ self.Par = {'k': np.around(val[0], 0)}
self.Dist = stats.chi2(df=self.Par['k'])
else:
- raise RuntimeError(
- "The Chi-Squared distribution is not defined for your parameters.")
-
-
- elif name.lower() == "exponential":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Chi-Squared distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'exponential':
if val[0] > 0:
- self.Par = {'lambda':val[0]}
+ self.Par = {'lambda': val[0]}
self.Dist = stats.expon(scale=1 / self.Par['lambda'])
else:
- raise RuntimeError(
- "The Exponential distribution is not defined for your parameters.")
-
-
- elif name.lower() == "frechet":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Exponential distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'frechet':
if (val[0] > 0) and (val[1] > 0):
- self.Par = {'a_n':val[0],'k':val[1]}
+ self.Par = {'a_n': val[0], 'k': val[1]}
self.Dist = stats.genextreme(
- c=-1 / self.Par['k'], scale=self.Par['a_n'] / self.Par['k'], loc=self.Par['a_n'])
+ c=-1 / self.Par['k'],
+ scale=self.Par['a_n'] / self.Par['k'],
+ loc=self.Par['a_n'],
+ )
else:
- raise RuntimeError(
- "The Frechet distribution is not defined for your parameters.")
-
-
- elif name.lower() == "gamma":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Frechet distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gamma':
if val[0] > 0 and val[1] > 0:
- self.Par = {'lambda':val[0], 'k':val[1]}
- self.Dist = stats.gamma(a=self.Par['k'], scale=1/self.Par['lambda'])
+ self.Par = {'lambda': val[0], 'k': val[1]}
+ self.Dist = stats.gamma(
+ a=self.Par['k'], scale=1 / self.Par['lambda']
+ )
else:
- raise RuntimeError(
- "The Gamma distribution is not defined for your parameters.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Gamma distribution is not defined for your parameters.' # noqa: EM101
+ )
-
- elif name.lower() == "geometric":
+ elif name.lower() == 'geometric':
val = val[0]
if val > 0 and val <= 1:
- self.Par = {'p':val}
+ self.Par = {'p': val}
self.Dist = stats.geom(p=self.Par['p'])
else:
- raise RuntimeError(
- "The Geometric distribution is not defined for your parameters.")
-
-
- elif name.lower() == "gev":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Geometric distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gev':
if val[1] > 0:
- self.Par = {'beta':val[0], 'alpha':val[1], 'epsilon':val[2]}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=self.Par['epsilon'])
+ self.Par = {'beta': val[0], 'alpha': val[1], 'epsilon': val[2]}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=self.Par['epsilon'],
+ )
else:
- raise RuntimeError(
- "The Generalized Extreme Value gistribution is not defined for your parameters.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Generalized Extreme Value gistribution is not defined for your parameters.' # noqa: EM101
+ )
-
- elif name.lower() == "gevmin":
+ elif name.lower() == 'gevmin':
if val[1] > 0:
- self.Par = {'beta':val[0], 'alpha':val[1], 'epsilon':val[2]}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=-self.Par['epsilon'])
+ self.Par = {'beta': val[0], 'alpha': val[1], 'epsilon': val[2]}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=-self.Par['epsilon'],
+ )
else:
- raise RuntimeError(
- "The Generalized Extreme Value distribution is not defined for your parameters.")
-
-
- elif name.lower() == "gumbel":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Generalized Extreme Value distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gumbel':
if val[0] > 0:
- self.Par = {'a_n':val[0], 'b_n':val[1]}
- self.Dist = stats.gumbel_r(scale=self.Par['a_n'], loc=self.Par['b_n'])
+ self.Par = {'a_n': val[0], 'b_n': val[1]}
+ self.Dist = stats.gumbel_r(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
else:
- raise RuntimeError(
- "The Gumbel distribution is not defined for your parameters.")
-
-
- elif name.lower() == "gumbelmin":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Gumbel distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gumbelmin':
if val[0] > 0:
- self.Par = {'a_n':val[0], 'b_n':val[1]}
- self.Dist = stats.gumbel_l(scale=self.Par['a_n'], loc=self.Par['b_n'])
+ self.Par = {'a_n': val[0], 'b_n': val[1]}
+ self.Dist = stats.gumbel_l(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
else:
- raise RuntimeError(
- "The Gumbel distribution is not defined for your parameters.")
-
-
- elif name.lower() == "lognormal":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Gumbel distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'lognormal':
if val[1] > 0:
- self.Par = {'mu_lnx':val[0],'sig_lnx':val[1]}
- self.Dist = stats.lognorm(s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx']))
+ self.Par = {'mu_lnx': val[0], 'sig_lnx': val[1]}
+ self.Dist = stats.lognorm(
+ s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx'])
+ )
else:
- raise RuntimeError(
- "The Lognormal distribution is not defined for your parameters.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Lognormal distribution is not defined for your parameters.' # noqa: EM101
+ )
-
- elif name.lower() == "negativebinomial":
+ elif name.lower() == 'negativebinomial':
if (
(val[1] > 0)
and (val[1] <= 1)
and (val[0] > 0)
and (val[0] % 1 == 0)
):
- self.Par = {'k':val[0], 'p':val[1]}
+ self.Par = {'k': val[0], 'p': val[1]}
self.Dist = stats.nbinom(n=self.Par['k'], p=self.Par['p'])
else:
- raise RuntimeError(
- "The Negative Binomial distribution is not defined for your parameters.")
-
-
- elif name.lower() == "normal" or name.lower() == "gaussian":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Negative Binomial distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'normal' or name.lower() == 'gaussian':
if val[1] > 0:
- self.Par = {'mu':val[0], 'sigma':val[1]}
- self.Dist = stats.norm(loc=self.Par['mu'], scale=self.Par['sigma'])
+ self.Par = {'mu': val[0], 'sigma': val[1]}
+ self.Dist = stats.norm(
+ loc=self.Par['mu'], scale=self.Par['sigma']
+ )
else:
- raise RuntimeError(
- "The Normal distribution is not defined for your parameters.")
-
-
- elif name.lower() == "pareto":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Normal distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'pareto':
if val[0] > 0 and val[1] > 0:
- self.Par = {'x_m':val[0],'alpha':val[1]}
- self.Dist = stats.genpareto(c=1 / self.Par['alpha'],
- scale=self.Par['x_m']/self.Par['alpha'], loc=self.Par['x_m'])
+ self.Par = {'x_m': val[0], 'alpha': val[1]}
+ self.Dist = stats.genpareto(
+ c=1 / self.Par['alpha'],
+ scale=self.Par['x_m'] / self.Par['alpha'],
+ loc=self.Par['x_m'],
+ )
else:
- raise RuntimeError(
- "The Pareto distribution is not defined for your parameters.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Pareto distribution is not defined for your parameters.' # noqa: EM101
+ )
-
- elif name.lower() == "poisson":
+ elif name.lower() == 'poisson':
n = len(val)
if n == 1:
if val > 0:
- self.Par = {'lambda':val[0]}
+ self.Par = {'lambda': val[0]}
self.Dist = stats.poisson(mu=self.Par['lambda'])
else:
- raise RuntimeError(
- "The Poisson distribution is not defined for your parameters.")
-
- if n == 2:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Poisson distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ if n == 2: # noqa: PLR2004
if val[0] > 0 and val[1] > 0:
- self.Par = {'v':val[0],'t':val[1]}
+ self.Par = {'v': val[0], 't': val[1]}
self.Dist = stats.poisson(mu=self.Par['v'] * self.Par['t'])
else:
- raise RuntimeError(
- "The Poisson distribution is not defined for your parameters.")
-
-
- elif name.lower() == "rayleigh":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Poisson distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif name.lower() == 'rayleigh':
alpha = val[0]
if alpha > 0:
- self.Par = {'alpha':alpha}
+ self.Par = {'alpha': alpha}
self.Dist = stats.rayleigh(scale=self.Par['alpha'])
else:
- raise RuntimeError(
- "The Rayleigh distribution is not defined for your parameters.")
-
-
- elif (name.lower() == "standardnormal") or (name.lower() == "standardgaussian"):
- self.Par = {'mu':0, 'sigma':1}
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Rayleigh distribution is not defined for your parameters.' # noqa: EM101
+ )
+
+ elif (name.lower() == 'standardnormal') or (
+ name.lower() == 'standardgaussian'
+ ):
+ self.Par = {'mu': 0, 'sigma': 1}
self.Dist = stats.norm(loc=0, scale=1)
-
-
- elif name.lower() == "truncatednormal":
+
+ elif name.lower() == 'truncatednormal':
if val[2] >= val[3]:
- raise RuntimeError("The upper bound a must be larger than the lower bound b.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The upper bound a must be larger than the lower bound b.' # noqa: EM101
+ )
if val[1] < 0:
- raise RuntimeError("sigma must be larger than 0.")
- self.Par = {'mu_n':val[0], 'sig_n':val[1], 'a':val[2], 'b':val[3]}
- a_mod = (self.Par['a']-self.Par['mu_n'])/self.Par['sig_n']
- b_mod = (self.Par['b']-self.Par['mu_n'])/self.Par['sig_n']
- self.Dist = stats.truncnorm(loc=self.Par['mu_n'],scale=self.Par['sig_n'], a=a_mod, b=b_mod)
-
-
- elif name.lower() == "uniform":
+ raise RuntimeError('sigma must be larger than 0.') # noqa: DOC501, EM101, TRY003
+ self.Par = {
+ 'mu_n': val[0],
+ 'sig_n': val[1],
+ 'a': val[2],
+ 'b': val[3],
+ }
+ a_mod = (self.Par['a'] - self.Par['mu_n']) / self.Par['sig_n']
+ b_mod = (self.Par['b'] - self.Par['mu_n']) / self.Par['sig_n']
+ self.Dist = stats.truncnorm(
+ loc=self.Par['mu_n'], scale=self.Par['sig_n'], a=a_mod, b=b_mod
+ )
+
+ elif name.lower() == 'uniform':
if val[0] < val[1]:
- self.Par = {'lower':val[0], 'upper': val[1]}
- self.Dist = stats.uniform(loc=self.Par['lower'], scale=self.Par['upper'] - self.Par['lower'])
+ self.Par = {'lower': val[0], 'upper': val[1]}
+ self.Dist = stats.uniform(
+ loc=self.Par['lower'],
+ scale=self.Par['upper'] - self.Par['lower'],
+ )
else:
- raise RuntimeError(
- "The Uniform distribution is not defined for your parameters.")
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Uniform distribution is not defined for your parameters.' # noqa: EM101
+ )
- elif name.lower() == "weibull":
+ elif name.lower() == 'weibull':
if (val[0] > 0) and (val[1] > 0):
- self.Par = {'a_n':val[0], 'k':val[1]}
- self.Dist = stats.weibull_min(c=self.Par['k'], scale=self.Par['a_n'])
+ self.Par = {'a_n': val[0], 'k': val[1]}
+ self.Dist = stats.weibull_min(
+ c=self.Par['k'], scale=self.Par['a_n']
+ )
else:
- raise RuntimeError(
- "The Weibull distribution is not defined for your parameters.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The Weibull distribution is not defined for your parameters.' # noqa: EM101
+ )
else:
- raise RuntimeError("Distribution type '" + name + "' not available.")
-
- #----------------------------------------------------------------------------
+ raise RuntimeError("Distribution type '" + name + "' not available.") # noqa: DOC501
+
+ # ----------------------------------------------------------------------------
# if the distribution is defined by its moments
- elif opt.upper() == "MOM":
+ elif opt.upper() == 'MOM':
val = np.array(val, ndmin=1, dtype=float)
-
+
if val.size > 1 and val[1] < 0:
- raise RuntimeError("The standard deviation must be non-negative.")
+ raise RuntimeError('The standard deviation must be non-negative.') # noqa: DOC501, EM101, TRY003
if name.lower() == 'beta':
if val[3] <= val[2]:
- raise RuntimeError("Please select an other support [a,b].")
- r = ((val[3]-val[0])*(val[0]-val[2])/val[1]**2-1)*(val[0]-val[2])/(val[3]-val[2])
- s = r*(val[3]-val[0])/(val[0]-val[2])
+ raise RuntimeError('Please select an other support [a,b].') # noqa: DOC501, EM101, TRY003
+ r = (
+ ((val[3] - val[0]) * (val[0] - val[2]) / val[1] ** 2 - 1)
+ * (val[0] - val[2])
+ / (val[3] - val[2])
+ )
+ s = r * (val[3] - val[0]) / (val[0] - val[2])
# Evaluate if distribution can be defined on the parameters
if r <= 0 and s <= 0:
- raise RuntimeError("Please select other moments.")
- self.Par = {'r':r,'s':s,'a':val[2],'b':val[3]}
- self.Dist = stats.beta(a=self.Par['r'], b=self.Par['s'],
- loc=self.Par['a'],scale = self.Par['b']-self.Par['a'])
-
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+ self.Par = {'r': r, 's': s, 'a': val[2], 'b': val[3]}
+ self.Dist = stats.beta(
+ a=self.Par['r'],
+ b=self.Par['s'],
+ loc=self.Par['a'],
+ scale=self.Par['b'] - self.Par['a'],
+ )
- elif name.lower() == "binomial":
+ elif name.lower() == 'binomial':
# Solve system of two equations for the parameters
p = 1 - (val[1]) ** 2 / val[0]
n = val[0] / p
@@ -394,641 +444,777 @@ class description.
if n % 1 <= 10 ** (-4):
n = int(n)
else:
- raise RuntimeError("Please select other moments.")
- if 0 <= p and p <= 1 and 0 < n:
- self.Par = {'n':n, 'p':p}
- self.Dist = stats.binom(n=self.Par['n'], p=self.Par['p'])
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+ if p >= 0 and p <= 1 and n > 0:
+ self.Par = {'n': n, 'p': p}
+ self.Dist = stats.binom(n=self.Par['n'], p=self.Par['p'])
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "chisquare":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'chisquare':
if val[0] > 0 and val[0] < np.inf and val[0] % 1 <= 10 ** (-4):
- self.Par = {'k':np.around(val[0],0)}
+ self.Par = {'k': np.around(val[0], 0)}
self.Dist = stats.chi2(df=self.Par['k'])
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "exponential":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'exponential':
try:
lam = 1 / val[0]
except ZeroDivisionError:
- raise RuntimeError("The first moment cannot be zero!")
- if 0 <= lam:
- self.Par = {'lambda':lam}
+ raise RuntimeError('The first moment cannot be zero!') # noqa: B904, DOC501, EM101, TRY003
+ if lam >= 0:
+ self.Par = {'lambda': lam}
self.Dist = stats.expon(scale=1 / self.Par['lambda'])
else:
- raise RuntimeError("Please select other moments.")
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
-
- elif name.lower() == "frechet":
+ elif name.lower() == 'frechet':
par0 = 2.0001
def equation(par):
- return (np.sqrt(special.gamma(1 - 2 / par)- special.gamma(1 - 1 / par) ** 2)
- / special.gamma(1 - 1 / par)- val[1] / val[0])
+ return (
+ np.sqrt(
+ special.gamma(1 - 2 / par)
+ - special.gamma(1 - 1 / par) ** 2
+ )
+ / special.gamma(1 - 1 / par)
+ - val[1] / val[0]
+ )
sol = optimize.fsolve(equation, x0=par0, full_output=True)
if sol[2] == 1:
k = sol[0][0]
a_n = val[0] / special.gamma(1 - 1 / k)
else:
- raise RuntimeError(
- "fsolve could not converge to a solution, therefore"
- "the parameters of the Frechet distribution could not be determined.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'fsolve could not converge to a solution, therefore' # noqa: EM101
+ 'the parameters of the Frechet distribution could not be determined.'
+ )
if a_n > 0 and k > 0:
- self.Par = {'a_n':a_n,'k':k}
+ self.Par = {'a_n': a_n, 'k': k}
self.Dist = stats.genextreme(
- c=-1 / self.Par['k'], scale=self.Par['a_n'] / self.Par['k'], loc=self.Par['a_n'])
+ c=-1 / self.Par['k'],
+ scale=self.Par['a_n'] / self.Par['k'],
+ loc=self.Par['a_n'],
+ )
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "gamma":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'gamma':
# Solve system of equations for the parameters
lam = val[0] / (val[1] ** 2)
k = lam * val[0]
# Evaluate if distribution can be defined on the parameters
if lam > 0 and k > 0:
- self.Par = {'lambda':lam, 'k':k}
- self.Dist = stats.gamma(a=self.Par['k'], scale=1/self.Par['lambda'])
+ self.Par = {'lambda': lam, 'k': k}
+ self.Dist = stats.gamma(
+ a=self.Par['k'], scale=1 / self.Par['lambda']
+ )
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "geometric":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'geometric':
# Solve Equation for the parameter based on the first moment
p = 1 / val[0]
- if 0 <= p and p <= 1:
- self.Par = {'p':p}
+ if p >= 0 and p <= 1:
+ self.Par = {'p': p}
self.Dist = stats.geom(p=self.Par['p'])
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "gev":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'gev':
beta = val[2]
- if beta == 0: # corresponds to Gumbel distribution
+ if beta == 0: # corresponds to Gumbel distribution
# Solve two equations for the parameters of the distribution
- alpha = val[1]*np.sqrt(6)/np.pi # scale parameter
- epsilon = val[2] - np.euler_gamma*alpha # location parameter
- elif beta >= 0.5:
- raise RuntimeError("MOM can only be used for beta < 0.5 .")
+ alpha = val[1] * np.sqrt(6) / np.pi # scale parameter
+ epsilon = val[2] - np.euler_gamma * alpha # location parameter
+ elif beta >= 0.5: # noqa: PLR2004
+ raise RuntimeError('MOM can only be used for beta < 0.5 .') # noqa: DOC501, EM101, TRY003
else:
- alpha = abs(beta)*val[1]/np.sqrt(special.gamma(1-2*beta)-special.gamma(1-beta)**2)
- epsilon = val[0]-(alpha/beta*(special.gamma(1-beta)-1))
- self.Par = {'beta':beta, 'alpha':alpha, 'epsilon':epsilon}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=self.Par['epsilon'])
-
+ alpha = (
+ abs(beta)
+ * val[1]
+ / np.sqrt(
+ special.gamma(1 - 2 * beta)
+ - special.gamma(1 - beta) ** 2
+ )
+ )
+ epsilon = val[0] - (alpha / beta * (special.gamma(1 - beta) - 1))
+ self.Par = {'beta': beta, 'alpha': alpha, 'epsilon': epsilon}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=self.Par['epsilon'],
+ )
- elif name.lower() == "gevmin":
+ elif name.lower() == 'gevmin':
beta = val[2]
- if beta == 0: # corresponds to Gumbel distribution
+ if beta == 0: # corresponds to Gumbel distribution
# Solve two equations for the parameters of the distribution
- alpha = val[1]*np.sqrt(6)/np.pi # scale parameter
- epsilon = val[2] + np.euler_gamma*alpha # location parameter
- elif beta >= 0.5:
- raise RuntimeError("MOM can only be used for beta < 0.5 .")
+ alpha = val[1] * np.sqrt(6) / np.pi # scale parameter
+ epsilon = val[2] + np.euler_gamma * alpha # location parameter
+ elif beta >= 0.5: # noqa: PLR2004
+ raise RuntimeError('MOM can only be used for beta < 0.5 .') # noqa: DOC501, EM101, TRY003
else:
- alpha = abs(beta)*val[1]/np.sqrt(special.gamma(1-2*beta)-special.gamma(1-beta)**2)
- epsilon = val[0]+(alpha/beta*(special.gamma(1-beta)-1))
- self.Par = {'beta':beta, 'alpha':alpha, 'epsilon':epsilon}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=-self.Par['epsilon'])
-
-
- elif name.lower() == "gumbel":
+ alpha = (
+ abs(beta)
+ * val[1]
+ / np.sqrt(
+ special.gamma(1 - 2 * beta)
+ - special.gamma(1 - beta) ** 2
+ )
+ )
+ epsilon = val[0] + (alpha / beta * (special.gamma(1 - beta) - 1))
+ self.Par = {'beta': beta, 'alpha': alpha, 'epsilon': epsilon}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=-self.Par['epsilon'],
+ )
+
+ elif name.lower() == 'gumbel':
# solve two equations for the parameters of the distribution
a_n = val[1] * np.sqrt(6) / np.pi # scale parameter
b_n = val[0] - np.euler_gamma * a_n # location parameter
if a_n > 0:
- self.Par = {'a_n':a_n, 'b_n':b_n}
- self.Dist = stats.gumbel_r(scale=self.Par['a_n'], loc=self.Par['b_n'])
+ self.Par = {'a_n': a_n, 'b_n': b_n}
+ self.Dist = stats.gumbel_r(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
else:
- raise RuntimeError("Please select other moments.")
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
-
- elif name.lower() == "gumbelmin":
+ elif name.lower() == 'gumbelmin':
# solve two equations for the parameters of the distribution
a_n = val[1] * np.sqrt(6) / np.pi # scale parameter
b_n = val[0] + np.euler_gamma * a_n # location parameter
if a_n > 0:
- self.Par = {'a_n':a_n, 'b_n':b_n}
- self.Dist = stats.gumbel_l(scale=self.Par['a_n'], loc=self.Par['b_n'])
+ self.Par = {'a_n': a_n, 'b_n': b_n}
+ self.Dist = stats.gumbel_l(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "lognormal":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'lognormal':
if val[0] <= 0:
- raise RuntimeError(
- "Please select other moments, the first moment must be greater than zero.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Please select other moments, the first moment must be greater than zero.' # noqa: EM101
+ )
# solve two equations for the parameters of the distribution
mu_lnx = np.log(val[0] ** 2 / np.sqrt(val[1] ** 2 + val[0] ** 2))
sig_lnx = np.sqrt(np.log(1 + (val[1] / val[0]) ** 2))
- self.Par = {'mu_lnx':mu_lnx,'sig_lnx':sig_lnx}
- self.Dist = stats.lognorm(s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx']))
-
-
- elif name.lower() == "negativebinomial":
+ self.Par = {'mu_lnx': mu_lnx, 'sig_lnx': sig_lnx}
+ self.Dist = stats.lognorm(
+ s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx'])
+ )
+
+ elif name.lower() == 'negativebinomial':
# Solve System of two equations for the parameters
p = val[0] / (val[0] + val[1] ** 2)
k = val[0] * p
# Evaluate if distribution can be defined on the parameters
if k % 1 <= 10 ** (-4):
k = round(k, 0)
- if 0 <= p and p <= 1:
- self.Par = {'k':k, 'p':p}
+ if p >= 0 and p <= 1:
+ self.Par = {'k': k, 'p': p}
self.Dist = stats.nbinom(n=self.Par['k'], p=self.Par['p'])
else:
- raise RuntimeError("Please select other moments.")
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
else:
- raise RuntimeError("Please select other moments.")
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
-
- elif (name.lower() == "normal") or (name.lower() == "gaussian"):
- self.Par = {'mu':val[0], 'sigma':val[1]}
+ elif (name.lower() == 'normal') or (name.lower() == 'gaussian'):
+ self.Par = {'mu': val[0], 'sigma': val[1]}
self.Dist = stats.norm(loc=self.Par['mu'], scale=self.Par['sigma'])
-
-
- elif name.lower() == "pareto":
+
+ elif name.lower() == 'pareto':
alpha = 1 + np.sqrt(1 + (val[0] / val[1]) ** 2)
x_m = val[0] * (alpha - 1) / alpha
if x_m > 0 and alpha > 0:
- self.Par = {'x_m':x_m,'alpha':alpha}
- self.Dist = stats.genpareto(c=1 / self.Par['alpha'],
- scale=self.Par['x_m']/self.Par['alpha'], loc=self.Par['x_m'])
+ self.Par = {'x_m': x_m, 'alpha': alpha}
+ self.Dist = stats.genpareto(
+ c=1 / self.Par['alpha'],
+ scale=self.Par['x_m'] / self.Par['alpha'],
+ loc=self.Par['x_m'],
+ )
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "poisson":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'poisson':
n = len(val)
if n == 1:
if val > 0:
- self.Par = {'lambda':val[0]}
+ self.Par = {'lambda': val[0]}
self.Dist = stats.poisson(mu=self.Par['lambda'])
else:
- raise RuntimeError("Please select other moments.")
-
- if n == 2:
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ if n == 2: # noqa: PLR2004
if val[0] > 0 and val[1] > 0:
- v = val[0]/val[1]
- if val[1] <= 0:
- raise RuntimeError("t must be positive.")
- self.Par = {'v':v,'t':val[1]}
+ v = val[0] / val[1]
+ if val[1] <= 0:
+ raise RuntimeError('t must be positive.') # noqa: DOC501, EM101, TRY003
+ self.Par = {'v': v, 't': val[1]}
self.Dist = stats.poisson(mu=self.Par['v'] * self.Par['t'])
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif name.lower() == "rayleigh":
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'rayleigh':
alpha = val[0] / np.sqrt(np.pi / 2)
if alpha > 0:
- self.Par = {'alpha':alpha}
+ self.Par = {'alpha': alpha}
self.Dist = stats.rayleigh(scale=self.Par['alpha'])
else:
- raise RuntimeError("Please select other moments.")
-
-
- elif (name.lower() == "standardnormal") or (name.lower() == "standardgaussian"):
- self.Par = {'mu':0, 'sigma':1}
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
+ elif (name.lower() == 'standardnormal') or (
+ name.lower() == 'standardgaussian'
+ ):
+ self.Par = {'mu': 0, 'sigma': 1}
self.Dist = stats.norm(loc=0, scale=1)
-
-
- elif name.lower() == "truncatednormal":
+
+ elif name.lower() == 'truncatednormal':
if val[2] >= val[3]:
- raise RuntimeError("The upper bound a must be larger than the lower bound b.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The upper bound a must be larger than the lower bound b.' # noqa: EM101
+ )
if val[0] <= val[2] or val[0] >= val[3]:
- raise RuntimeError('The mean of the distribution must be within the interval [a,b].')
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The mean of the distribution must be within the interval [a,b].' # noqa: EM101
+ )
+
def equation(par):
- f = lambda x: stats.norm.pdf(x,par[0],par[1])/(stats.norm.cdf(val[3],par[0],par[1])-stats.norm.cdf(val[2],par[0],par[1]))
- expec_eq = sp.integrate.quad(lambda x: x*f(x),val[2],val[3])[0]-val[0]
- std_eq = np.sqrt(sp.integrate.quad(lambda x: x**2*f(x),val[2],val[3])[0]-(sp.integrate.quad(lambda x: x*f(x),val[2],val[3]))[0]**2)-val[1]
+ f = lambda x: stats.norm.pdf(x, par[0], par[1]) / ( # noqa: E731
+ stats.norm.cdf(val[3], par[0], par[1])
+ - stats.norm.cdf(val[2], par[0], par[1])
+ )
+ expec_eq = (
+ sp.integrate.quad(lambda x: x * f(x), val[2], val[3])[0]
+ - val[0]
+ )
+ std_eq = (
+ np.sqrt(
+ sp.integrate.quad(lambda x: x**2 * f(x), val[2], val[3])[
+ 0
+ ]
+ - (
+ sp.integrate.quad(lambda x: x * f(x), val[2], val[3])
+ )[0]
+ ** 2
+ )
+ - val[1]
+ )
eq = [expec_eq, std_eq]
- return(eq)
-
- x0 = [val[0],val[1]]
+ return eq # noqa: RET504
+
+ x0 = [val[0], val[1]]
sol = optimize.fsolve(equation, x0=x0, full_output=True)
if sol[2] == 1:
- self.Par = {'mu_n':sol[0][0], 'sig_n':sol[0][1], 'a':val[2], 'b':val[3]}
- a_mod = (self.Par['a']-self.Par['mu_n'])/self.Par['sig_n']
- b_mod = (self.Par['b']-self.Par['mu_n'])/self.Par['sig_n']
- self.Dist = stats.truncnorm(loc=self.Par['mu_n'],scale=self.Par['sig_n'], a=a_mod, b=b_mod)
+ self.Par = {
+ 'mu_n': sol[0][0],
+ 'sig_n': sol[0][1],
+ 'a': val[2],
+ 'b': val[3],
+ }
+ a_mod = (self.Par['a'] - self.Par['mu_n']) / self.Par['sig_n']
+ b_mod = (self.Par['b'] - self.Par['mu_n']) / self.Par['sig_n']
+ self.Dist = stats.truncnorm(
+ loc=self.Par['mu_n'],
+ scale=self.Par['sig_n'],
+ a=a_mod,
+ b=b_mod,
+ )
else:
- raise RuntimeError("fsolve did not converge.")
-
-
- elif name.lower() == "uniform":
+ raise RuntimeError('fsolve did not converge.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'uniform':
# compute parameters
lower = val[0] - np.sqrt(12) * val[1] / 2
upper = val[0] + np.sqrt(12) * val[1] / 2
- self.Par = {'lower':lower, 'upper': upper}
- self.Dist = stats.uniform(loc=self.Par['lower'], scale=self.Par['upper'] - self.Par['lower'])
-
-
- elif name.lower() == "weibull":
+ self.Par = {'lower': lower, 'upper': upper}
+ self.Dist = stats.uniform(
+ loc=self.Par['lower'],
+ scale=self.Par['upper'] - self.Par['lower'],
+ )
+
+ elif name.lower() == 'weibull':
+
def equation(par):
- return (np.sqrt(special.gamma(1 + 2 / par) - (special.gamma(1 + 1 / par)) ** 2)
- / special.gamma(1 + 1 / par) - val[1] / val[0])
+ return (
+ np.sqrt(
+ special.gamma(1 + 2 / par)
+ - (special.gamma(1 + 1 / par)) ** 2
+ )
+ / special.gamma(1 + 1 / par)
+ - val[1] / val[0]
+ )
sol = optimize.fsolve(equation, x0=0.02, full_output=True)
if sol[2] == 1:
k = sol[0][0]
a_n = val[0] / special.gamma(1 + 1 / k)
else:
- raise RuntimeError(
- "fsolve could not converge to a solution, therefore"
- "the parameters of the Weibull distribution could not be determined.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'fsolve could not converge to a solution, therefore' # noqa: EM101
+ 'the parameters of the Weibull distribution could not be determined.'
+ )
if a_n > 0 and k > 0:
- self.Par = {'a_n':a_n, 'k':k}
- self.Dist = stats.weibull_min(c=self.Par['k'], scale=self.Par['a_n'])
+ self.Par = {'a_n': a_n, 'k': k}
+ self.Dist = stats.weibull_min(
+ c=self.Par['k'], scale=self.Par['a_n']
+ )
else:
- raise RuntimeError("Please select other moments.")
-
-
+ raise RuntimeError('Please select other moments.') # noqa: DOC501, EM101, TRY003
+
else:
- raise RuntimeError("Distribution type '" + name + "' not available.")
+ raise RuntimeError("Distribution type '" + name + "' not available.") # noqa: DOC501
- #----------------------------------------------------------------------------
+ # ----------------------------------------------------------------------------
# if the distribution is to be fitted to a data vector
- elif opt.upper() == "DATA":
-
- if name.lower() == "beta":
+ elif opt.upper() == 'DATA':
+ if name.lower() == 'beta':
if val[2] <= val[1]:
- raise RuntimeError("Please select a different support [a,b].")
+ raise RuntimeError('Please select a different support [a,b].') # noqa: DOC501, EM101, TRY003
if min(val[0]) >= val[1] and max(val[0]) <= val[2]:
- pars = stats.beta.fit(val[0], floc=val[1], fscale=val[2]-val[1])
- self.Par = {'r':pars[0],'s':pars[1],'a':val[1],'b':val[2]}
- self.Dist = stats.beta(a=self.Par['r'], b=self.Par['s'],
- loc=self.Par['a'],scale = self.Par['b']-self.Par['a'])
+ pars = stats.beta.fit(
+ val[0], floc=val[1], fscale=val[2] - val[1]
+ )
+ self.Par = {'r': pars[0], 's': pars[1], 'a': val[1], 'b': val[2]}
+ self.Dist = stats.beta(
+ a=self.Par['r'],
+ b=self.Par['s'],
+ loc=self.Par['a'],
+ scale=self.Par['b'] - self.Par['a'],
+ )
else:
- raise RuntimeError("The given samples must be in the support range [a,b].")
-
-
- elif name.lower() == "binomial":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be in the support range [a,b].' # noqa: EM101
+ )
+
+ elif name.lower() == 'binomial':
# Evaluate if distribution can be defined on the parameters
if val[1] % 1 <= 10 ** (-4) and val[1] > 0:
val[1] = int(val[1])
else:
- raise RuntimeError("n must be a positive integer.")
- X = np.array(val[0])
- if all((X) % 1 <= 10 ** (-4)) and all(X >= 0) and all(X <= val[1]):
- X = np.around(X, 0)
+ raise RuntimeError('n must be a positive integer.') # noqa: DOC501, EM101, TRY003
+ X = np.array(val[0]) # noqa: N806
+ if all((X) % 1 <= 10 ** (-4)) and all(X >= 0) and all(val[1] >= X):
+ X = np.around(X, 0) # noqa: N806
else:
- raise RuntimeError("The given samples must be integers in the range [0,n].")
- val[0] = np.mean(val[0])/val[1]
- self.Par = {'n':val[1], 'p':val[0]}
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be integers in the range [0,n].' # noqa: EM101
+ )
+ val[0] = np.mean(val[0]) / val[1]
+ self.Par = {'n': val[1], 'p': val[0]}
self.Dist = stats.binom(n=self.Par['n'], p=self.Par['p'])
-
-
- elif name.lower() == "chisquare":
+
+ elif name.lower() == 'chisquare':
if min(val) >= 0:
- pars = stats.chi2.fit(val, floc=0, fscale=1)
- self.Par = {'k':np.around(pars[0],0)}
+ pars = stats.chi2.fit(val, floc=0, fscale=1)
+ self.Par = {'k': np.around(pars[0], 0)}
self.Dist = stats.chi2(df=self.Par['k'])
else:
- raise RuntimeError("The given samples must be non-negative.")
-
-
- elif name.lower() == "exponential":
+ raise RuntimeError('The given samples must be non-negative.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'exponential':
if min(val) >= 0:
pars = stats.expon.fit(val, floc=0)
- self.Par = {'lambda':1 / pars[1]}
+ self.Par = {'lambda': 1 / pars[1]}
self.Dist = stats.expon(scale=1 / self.Par['lambda'])
else:
- raise RuntimeError("The given samples must be non-negative.")
-
+ raise RuntimeError('The given samples must be non-negative.') # noqa: DOC501, EM101, TRY003
- elif name.lower() == "frechet":
+ elif name.lower() == 'frechet':
if min(val) < 0:
- raise RuntimeError("The given samples must be non-negative.")
+ raise RuntimeError('The given samples must be non-negative.') # noqa: DOC501, EM101, TRY003
+
def equation(par):
- return (-np.sum(np.log(stats.genextreme.pdf(val,c=-1 / par[1], scale=par[0] / par[1], loc=par[0]))))
-
+ return -np.sum(
+ np.log(
+ stats.genextreme.pdf(
+ val, c=-1 / par[1], scale=par[0] / par[1], loc=par[0]
+ )
+ )
+ )
+
par1 = 2.0001
par0 = par1 / special.gamma(1 - 1 / np.mean(val))
- x0 = np.array([par0,par1])
- bnds = optimize.Bounds(lb=[0,0],ub=[np.inf,np.inf])
+ x0 = np.array([par0, par1])
+ bnds = optimize.Bounds(lb=[0, 0], ub=[np.inf, np.inf])
sol = optimize.minimize(equation, x0, bounds=bnds)
- if sol.success == True:
- self.Par = {'a_n':sol.x[0],'k':sol.x[1]}
+ if sol.success == True: # noqa: E712
+ self.Par = {'a_n': sol.x[0], 'k': sol.x[1]}
self.Dist = stats.genextreme(
- c=-1 / self.Par['k'], scale=self.Par['a_n'] / self.Par['k'], loc=self.Par['a_n'])
+ c=-1 / self.Par['k'],
+ scale=self.Par['a_n'] / self.Par['k'],
+ loc=self.Par['a_n'],
+ )
else:
- raise RuntimeError("Maximum likelihood estimation did not converge.")
-
-
- elif name.lower() == "gamma":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Maximum likelihood estimation did not converge.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gamma':
pars = stats.gamma.fit(val, floc=0)
- self.Par = {'lambda':1 / pars[2], 'k':pars[0]}
- self.Dist = stats.gamma(a=self.Par['k'], scale=1/self.Par['lambda'])
-
-
- elif name.lower() == "geometric":
- if all(val > 0) and all(val %1 == 0):
- self.Par = {'p':1/np.mean(val)}
+ self.Par = {'lambda': 1 / pars[2], 'k': pars[0]}
+ self.Dist = stats.gamma(
+ a=self.Par['k'], scale=1 / self.Par['lambda']
+ )
+
+ elif name.lower() == 'geometric':
+ if all(val > 0) and all(val % 1 == 0):
+ self.Par = {'p': 1 / np.mean(val)}
self.Dist = stats.geom(p=self.Par['p'])
else:
- raise RuntimeError("The given samples must be integers larger than 0.")
-
-
- elif name.lower() == "gev":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be integers larger than 0.' # noqa: EM101
+ )
+
+ elif name.lower() == 'gev':
pars = gevfit_alt(np.squeeze(val))
- self.Par = {'beta':pars[0], 'alpha':pars[1], 'epsilon':pars[2]}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=self.Par['epsilon'])
+ self.Par = {'beta': pars[0], 'alpha': pars[1], 'epsilon': pars[2]}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=self.Par['epsilon'],
+ )
-
- elif name.lower() == "gevmin":
+ elif name.lower() == 'gevmin':
pars = gevfit_alt(np.squeeze(-val))
- self.Par = {'beta':pars[0], 'alpha':pars[1], 'epsilon':-pars[2]}
- self.Dist = stats.genextreme(c=-self.Par['beta'], scale=self.Par['alpha'], loc=-self.Par['epsilon'])
-
-
- elif name.lower() == "gumbel":
+ self.Par = {'beta': pars[0], 'alpha': pars[1], 'epsilon': -pars[2]}
+ self.Dist = stats.genextreme(
+ c=-self.Par['beta'],
+ scale=self.Par['alpha'],
+ loc=-self.Par['epsilon'],
+ )
+
+ elif name.lower() == 'gumbel':
pars = stats.gumbel_r.fit(val)
- self.Par = {'a_n':pars[1], 'b_n':pars[0]}
- self.Dist = stats.gumbel_r(scale=self.Par['a_n'], loc=self.Par['b_n'])
+ self.Par = {'a_n': pars[1], 'b_n': pars[0]}
+ self.Dist = stats.gumbel_r(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
-
- elif name.lower() == "gumbelmin":
+ elif name.lower() == 'gumbelmin':
pars = stats.gumbel_l.fit(val)
- self.Par = {'a_n':pars[1], 'b_n':pars[0]}
- self.Dist = stats.gumbel_l(scale=self.Par['a_n'], loc=self.Par['b_n'])
-
-
- elif name.lower() == "lognormal":
+ self.Par = {'a_n': pars[1], 'b_n': pars[0]}
+ self.Dist = stats.gumbel_l(
+ scale=self.Par['a_n'], loc=self.Par['b_n']
+ )
+
+ elif name.lower() == 'lognormal':
pars = stats.lognorm.fit(val, floc=0)
- self.Par = {'mu_lnx':np.log(pars[2]),'sig_lnx':pars[0]}
- self.Dist = stats.lognorm(s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx']))
-
-
- elif name.lower() == "negativebinomial":
+ self.Par = {'mu_lnx': np.log(pars[2]), 'sig_lnx': pars[0]}
+ self.Dist = stats.lognorm(
+ s=self.Par['sig_lnx'], scale=np.exp(self.Par['mu_lnx'])
+ )
+
+ elif name.lower() == 'negativebinomial':
# first estimation of k,p with method of moments
- p = np.mean(val)/(np.mean(val)+np.var(val))
- k = np.mean(val)*p
- if k==0:
- raise RuntimeError("No suitable parameters can be estimated from the given data.")
- k = round(k, 0) # rounding of k, since k must be a positive integer according to ERADist definition
- p = k/np.mean(val); # estimation of p for rounded k (mle)
- self.Par = {'k':k, 'p':p}
+ p = np.mean(val) / (np.mean(val) + np.var(val))
+ k = np.mean(val) * p
+ if k == 0:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'No suitable parameters can be estimated from the given data.' # noqa: EM101
+ )
+ k = round(
+ k, 0
+ ) # rounding of k, since k must be a positive integer according to ERADist definition
+ p = k / np.mean(val) # estimation of p for rounded k (mle)
+ self.Par = {'k': k, 'p': p}
self.Dist = stats.nbinom(n=self.Par['k'], p=self.Par['p'])
-
- elif name.lower() == "normal" or name.lower() == "gaussian":
+ elif name.lower() == 'normal' or name.lower() == 'gaussian':
pars = stats.norm.fit(val)
- self.Par = {'mu':pars[0], 'sigma':pars[1]}
+ self.Par = {'mu': pars[0], 'sigma': pars[1]}
self.Dist = stats.norm(loc=self.Par['mu'], scale=self.Par['sigma'])
-
-
- elif name.lower() == "pareto":
+
+ elif name.lower() == 'pareto':
x_m = min(val)
if x_m > 0:
+
def equation(par):
- return (-np.sum(np.log(stats.genpareto.pdf(val,c = 1 / par, scale = x_m / par, loc = x_m))))
+ return -np.sum(
+ np.log(
+ stats.genpareto.pdf(
+ val, c=1 / par, scale=x_m / par, loc=x_m
+ )
+ )
+ )
x0 = x_m
sol = optimize.minimize(equation, x0)
- if sol.success == True:
- self.Par = {'x_m':x_m,'alpha':float(sol.x)}
- self.Dist = stats.genpareto(c=1 / self.Par['alpha'],
- scale=self.Par['x_m']/self.Par['alpha'], loc=self.Par['x_m'])
+ if sol.success == True: # noqa: E712
+ self.Par = {'x_m': x_m, 'alpha': float(sol.x)}
+ self.Dist = stats.genpareto(
+ c=1 / self.Par['alpha'],
+ scale=self.Par['x_m'] / self.Par['alpha'],
+ loc=self.Par['x_m'],
+ )
else:
- raise RuntimeError("Maximum likelihood estimation did not converge.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Maximum likelihood estimation did not converge.' # noqa: EM101
+ )
else:
- raise RuntimeError("The given data must be positive.")
-
-
- elif name.lower() == "poisson":
+ raise RuntimeError('The given data must be positive.') # noqa: DOC501, EM101, TRY003
+
+ elif name.lower() == 'poisson':
n = len(val)
- if n == 2:
- X = val[0]
+ if n == 2: # noqa: PLR2004
+ X = val[0] # noqa: N806
t = val[1]
if t <= 0:
- raise RuntimeError("t must be positive.")
- if all(X >=0) and all(X %1 == 0):
- v = np.mean(X)/t
- self.Par = {'v':v,'t':t}
+ raise RuntimeError('t must be positive.') # noqa: DOC501, EM101, TRY003
+ if all(X >= 0) and all(X % 1 == 0):
+ v = np.mean(X) / t
+ self.Par = {'v': v, 't': t}
self.Dist = stats.poisson(mu=self.Par['v'] * self.Par['t'])
else:
- raise RuntimeError("The given samples must be non-negative integers.")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be non-negative integers.' # noqa: EM101
+ )
+ elif all(val >= 0) and all(val % 1 == 0):
+ self.Par = {'lambda': np.mean(val)}
+ self.Dist = stats.poisson(mu=self.Par['lambda'])
else:
- if all(val >= 0) and all(val %1 == 0):
- self.Par = {'lambda':np.mean(val)}
- self.Dist = stats.poisson(mu=self.Par['lambda'])
- else:
- raise RuntimeError("The given samples must be non-negative integers.")
-
-
- elif name.lower() == "rayleigh":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be non-negative integers.' # noqa: EM101
+ )
+
+ elif name.lower() == 'rayleigh':
pars = stats.rayleigh.fit(val, floc=0)
- self.Par = {'alpha':pars[1]}
+ self.Par = {'alpha': pars[1]}
self.Dist = stats.rayleigh(scale=self.Par['alpha'])
-
-
- elif name.lower() == "truncatednormal":
- X = val[0]
+
+ elif name.lower() == 'truncatednormal':
+ X = val[0] # noqa: N806
if val[1] >= val[2]:
- raise RuntimeError("The upper bound a must be larger than the lower bound b.")
- if not (all(X >= val[1]) and all(X <= val[2])):
- raise RuntimeError("The given samples must be in the range [a,b].")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The upper bound a must be larger than the lower bound b.' # noqa: EM101
+ )
+ if not (all(val[1] <= X) and all(val[2] >= X)):
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given samples must be in the range [a,b].' # noqa: EM101
+ )
+
def equation(par):
- return (-np.sum(np.log(stats.norm.pdf(X,loc=par[0], scale=par[1])/
- (stats.norm.cdf(val[2],par[0],par[1])-stats.norm.cdf(val[1],par[0],par[1])))))
-
- x0 = np.array([np.mean(X),np.std(X)])
- bnds = optimize.Bounds(lb=[-np.inf,0],ub=[np.inf,np.inf])
+ return -np.sum(
+ np.log(
+ stats.norm.pdf(X, loc=par[0], scale=par[1])
+ / (
+ stats.norm.cdf(val[2], par[0], par[1])
+ - stats.norm.cdf(val[1], par[0], par[1])
+ )
+ )
+ )
+
+ x0 = np.array([np.mean(X), np.std(X)])
+ bnds = optimize.Bounds(lb=[-np.inf, 0], ub=[np.inf, np.inf])
sol = optimize.minimize(equation, x0, bounds=bnds)
- if sol.success == True:
- self.Par = {'mu_n':float(sol.x[0]), 'sig_n':float(sol.x[1]), 'a':val[1], 'b':val[2]}
- a_mod = (self.Par['a']-self.Par['mu_n'])/self.Par['sig_n']
- b_mod = (self.Par['b']-self.Par['mu_n'])/self.Par['sig_n']
- self.Dist = stats.truncnorm(loc=self.Par['mu_n'],scale=self.Par['sig_n'], a=a_mod, b=b_mod)
+ if sol.success == True: # noqa: E712
+ self.Par = {
+ 'mu_n': float(sol.x[0]),
+ 'sig_n': float(sol.x[1]),
+ 'a': val[1],
+ 'b': val[2],
+ }
+ a_mod = (self.Par['a'] - self.Par['mu_n']) / self.Par['sig_n']
+ b_mod = (self.Par['b'] - self.Par['mu_n']) / self.Par['sig_n']
+ self.Dist = stats.truncnorm(
+ loc=self.Par['mu_n'],
+ scale=self.Par['sig_n'],
+ a=a_mod,
+ b=b_mod,
+ )
else:
- raise RuntimeError("Maximum likelihood estimation did not converge.")
-
-
- elif name.lower() == "uniform":
- self.Par = {'lower':min(val), 'upper': max(val)}
- self.Dist = stats.uniform(loc=self.Par['lower'], scale=self.Par['upper'] - self.Par['lower'])
-
- elif name.lower() == "weibull":
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Maximum likelihood estimation did not converge.' # noqa: EM101
+ )
+
+ elif name.lower() == 'uniform':
+ self.Par = {'lower': min(val), 'upper': max(val)}
+ self.Dist = stats.uniform(
+ loc=self.Par['lower'],
+ scale=self.Par['upper'] - self.Par['lower'],
+ )
+
+ elif name.lower() == 'weibull':
pars = stats.weibull_min.fit(val, floc=0)
- self.Par = {'a_n':pars[2], 'k':pars[0]}
+ self.Par = {'a_n': pars[2], 'k': pars[0]}
self.Dist = stats.weibull_min(c=self.Par['k'], scale=self.Par['a_n'])
-
else:
- raise RuntimeError("Distribution type '" + name + "' not available.")
+ raise RuntimeError("Distribution type '" + name + "' not available.") # noqa: DOC501
else:
- raise RuntimeError("Unknown option :" + opt)
-
-#%%
+ raise RuntimeError('Unknown option :' + opt) # noqa: DOC501
+
+ # %%
def mean(self):
- """
- Returns the mean of the distribution.
- """
-
- if self.Name == "gevmin":
+ """Returns the mean of the distribution.""" # noqa: D401
+ if self.Name == 'gevmin':
return -self.Dist.mean()
-
- elif self.Name == "negativebinomial":
+
+ elif self.Name == 'negativebinomial': # noqa: RET505
return self.Dist.mean() + self.Par['k']
else:
return self.Dist.mean()
-#%%
+ # %%
def std(self):
- """
- Returns the standard deviation of the distribution.
- """
-
+ """Returns the standard deviation of the distribution.""" # noqa: D401
return self.Dist.std()
-#%%
+ # %%
def pdf(self, x):
- """
- Returns the PDF value.
- """
-
- if self.Name == "binomial":
+ """Returns the PDF value.""" # noqa: D401
+ if self.Name == 'binomial' or self.Name == 'geometric': # noqa: PLR1714
return self.Dist.pmf(x)
- elif self.Name == "geometric":
- return self.Dist.pmf(x)
-
- elif self.Name == "gevmin":
+ elif self.Name == 'gevmin': # noqa: RET505
return self.Dist.pdf(-x)
- elif self.Name == "negativebinomial":
+ elif self.Name == 'negativebinomial':
return self.Dist.pmf(x - self.Par['k'])
- elif self.Name == "poisson":
+ elif self.Name == 'poisson':
return self.Dist.pmf(x)
else:
return self.Dist.pdf(x)
-
-#%%
+
+ # %%
def cdf(self, x):
- """
- Returns the CDF value.
- """
-
- if self.Name == "gevmin":
- return 1-self.Dist.cdf(-x) # <-- this is not a proper cdf !
-
- elif self.Name == "negativebinomial":
+ """Returns the CDF value.""" # noqa: D401
+ if self.Name == 'gevmin':
+ return 1 - self.Dist.cdf(-x) # <-- this is not a proper cdf !
+
+ elif self.Name == 'negativebinomial': # noqa: RET505
return self.Dist.cdf(x - self.Par['k'])
else:
return self.Dist.cdf(x)
-#%%
+ # %%
def random(self, size=None):
- """
- Generates random samples according to the distribution of the
+ """Generates random samples according to the distribution of the
object.
- """
-
- if self.Name == "gevmin":
+ """ # noqa: D205, D401
+ if self.Name == 'gevmin':
return self.Dist.rvs(size=size) * (-1)
-
- elif self.Name == "negativebinomial":
+
+ elif self.Name == 'negativebinomial': # noqa: RET505
samples = self.Dist.rvs(size=size) + self.Par['k']
- return samples
+ return samples # noqa: RET504
else:
samples = self.Dist.rvs(size=size)
- return samples
+ return samples # noqa: RET504
-#%%
+ # %%
def icdf(self, y):
- """
- Returns the value of the inverse CDF.
- """
-
- if self.Name == "gevmin":
- return -self.Dist.ppf(1-y)
-
- elif self.Name == "negativebinomial":
+ """Returns the value of the inverse CDF.""" # noqa: D401
+ if self.Name == 'gevmin':
+ return -self.Dist.ppf(1 - y)
+
+ elif self.Name == 'negativebinomial': # noqa: RET505
return self.Dist.ppf(y) + self.Par['k']
else:
return self.Dist.ppf(y)
-
-#%% Nested functions: for GEV-parameter fitting
-
+
+
+# %% Nested functions: for GEV-parameter fitting
+
+
def gevfit_alt(y):
- '''Author: Iason Papaioannou
+ """Author: Iason Papaioannou
The function gevfit_alt evaluates the parameters of the generalized
extreme value distribution with the method of Probability Weighted
- Moments (PWM) and Maximum Likelihood Estimation (MLE).'''
-
+ Moments (PWM) and Maximum Likelihood Estimation (MLE).
+ """ # noqa: D205, D401
# compute PWM estimates
x01 = gevpwm(y)
-
+
if x01[0] > 0:
# Compute mle estimates using PWM estimates as starting points
- x02 = stats.genextreme.fit(y,scale=x01[1], loc=x01[2])
- x02 = np.array([-x02[0],x02[2],x02[1]])
+ x02 = stats.genextreme.fit(y, scale=x01[1], loc=x01[2])
+ x02 = np.array([-x02[0], x02[2], x02[1]])
# if alpha reasonable
- if x02[1] >= 1.e-6:
+ if x02[1] >= 1.0e-6: # noqa: PLR2004
# set parameters
par = x02
if par[0] < -1:
par = x01
- warnings.warn('The MLE estimate of the shape parameter of the GEV is not in the range where the MLE estimator is valid. PWM estimation is used.')
- if par[0] > 0.4:
- warnings.warn('The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.')
- else:
+ warnings.warn( # noqa: B028
+ 'The MLE estimate of the shape parameter of the GEV is not in the range where the MLE estimator is valid. PWM estimation is used.'
+ )
+ if par[0] > 0.4: # noqa: PLR2004
+ warnings.warn( # noqa: B028
+ 'The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.'
+ )
+ else:
# set parameters obtained by PWM
- par = x01
- if par[0] > 0.4:
- warnings.warn('The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.')
- else:
+ par = x01
+ if par[0] > 0.4: # noqa: PLR2004
+ warnings.warn( # noqa: B028
+ 'The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.'
+ )
+ else:
# set parameters obtained by PWM
par = x01
- if par[0] < -0.4:
- warnings.warn('The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.')
-
+ if par[0] < -0.4: # noqa: PLR2004
+ warnings.warn( # noqa: B028
+ 'The shape parameter of the GEV is not in the range where PWM asymptotic results are valid.'
+ )
+
return par
-
-#------------------------------------------------------------------------------
-
+
+# ------------------------------------------------------------------------------
+
+
def gevpwm(y):
- '''Author: Iason Papaioannou
+ """Author: Iason Papaioannou
The function gevpwm evaluates the parameters of the generalized
extreme value distribution applying the method of Probability Weighted
- Moments.'''
-
+ Moments.
+ """ # noqa: D205, D401
# compute PWM estimates
y2 = np.sort(y)
beta0 = np.mean(y)
-
- p1 = np.arange(len(y))/(len(y)-1)
- p2 = p1*(np.arange(len(y))-1)/(len(y)-2)
- beta1 = p1@y2
- beta2 = p2@y2
-
- beta1 = beta1/len(y)
- beta2 = beta2/len(y)
-
- c = (2*beta1-beta0)/(3*beta2-beta0)-np.log(2)/np.log(3)
- par0 = -7.8590*c -2.9554*c**2
- equation = lambda x: (3*beta2-beta0)/(2*beta1-beta0)-(1-3**x)/(1-2**x)
+
+ p1 = np.arange(len(y)) / (len(y) - 1)
+ p2 = p1 * (np.arange(len(y)) - 1) / (len(y) - 2)
+ beta1 = p1 @ y2
+ beta2 = p2 @ y2
+
+ beta1 = beta1 / len(y) # noqa: PLR6104
+ beta2 = beta2 / len(y) # noqa: PLR6104
+
+ c = (2 * beta1 - beta0) / (3 * beta2 - beta0) - np.log(2) / np.log(3)
+ par0 = -7.8590 * c - 2.9554 * c**2
+ equation = lambda x: (3 * beta2 - beta0) / (2 * beta1 - beta0) - (1 - 3**x) / ( # noqa: E731
+ 1 - 2**x
+ )
sol = optimize.fsolve(equation, x0=par0, full_output=True)
sol = optimize.fsolve(equation, x0=0.02, full_output=True)
if sol[2] == 1:
par = np.zeros(3)
par[0] = sol[0][0]
- par[1] = -(2*beta1-beta0)*par[0]/special.gamma(1-par[0])/(1-2**par[0])
- par[2] = beta0 - par[1]/par[0]*(special.gamma(1-par[0])-1)
+ par[1] = (
+ -(2 * beta1 - beta0)
+ * par[0]
+ / special.gamma(1 - par[0])
+ / (1 - 2 ** par[0])
+ )
+ par[2] = beta0 - par[1] / par[0] * (special.gamma(1 - par[0]) - 1)
else:
- warnings.warn("fsolve could not converge to a solution for the PWM estimate.")
-
+ warnings.warn( # noqa: B028
+ 'fsolve could not converge to a solution for the PWM estimate.'
+ )
- return par
\ No newline at end of file
+ return par
diff --git a/modules/performUQ/common/ERAClasses/ERANataf.py b/modules/performUQ/common/ERAClasses/ERANataf.py
index 6ac22eead..16f163331 100644
--- a/modules/performUQ/common/ERAClasses/ERANataf.py
+++ b/modules/performUQ/common/ERAClasses/ERANataf.py
@@ -1,9 +1,10 @@
-# import of modules
+# import of modules # noqa: CPY001, D100, INP001
import numpy as np
from scipy import optimize, stats
+
realmin = np.finfo(np.double).tiny
-'''
+"""
---------------------------------------------------------------------------
Nataf Transformation of random variables
---------------------------------------------------------------------------
@@ -57,34 +58,32 @@
models with prescribed marginals and covariances.
Probabilistic Engineering Mechanics 1(2), 105-112
---------------------------------------------------------------------------
-'''
+""" # noqa: W291
-#%%
-class ERANataf(object):
- """
- Generation of joint distribution objects.
+
+# %%
+class ERANataf:
+ """Generation of joint distribution objects.
Construction of the joint distribution object with
-
+
Obj = ERANataf(M,Correlation)
-
- 'M' must be an list or array of ERADist objects defining the marginal
+
+ 'M' must be an list or array of ERADist objects defining the marginal
distributions that together define the joint distribution.
-
+
'Correlation' must be a correlation matrix with shape [d,d], where d is
the number of marginal distributions (dimensions) of the joint
distribution. The matrix describes the dependency between the different
- marginal distributions. According to the general definition of a
+ marginal distributions. According to the general definition of a
correlation matrix, the input matrix must be symmetric, the matrix entries
on the diagonal must be equal to one, all other entries (correlation
coefficients) can have values between -1 and 1.
- """
-
- def __init__(self, M, Correlation):
- """
- Constructor method, for more details have a look at the
+ """ # noqa: D205, D400
+
+ def __init__(self, M, Correlation): # noqa: C901, N803
+ """Constructor method, for more details have a look at the
class description.
- """
-
+ """ # noqa: D205, D401
self.Marginals = np.array(M, ndmin=1)
self.Marginals = self.Marginals.ravel()
self.Rho_X = np.array(Correlation, ndmin=2)
@@ -92,35 +91,45 @@ class description.
# check if all distributions have finite moments
for i in range(n_dist):
- if (not((np.isfinite(self.Marginals[i].mean()) and
- np.isfinite(self.Marginals[i].std())))):
- raise RuntimeError("The marginal distributions need to have "
- "finite mean and variance")
+ if not (
+ np.isfinite(self.Marginals[i].mean())
+ and np.isfinite(self.Marginals[i].std())
+ ):
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The marginal distributions need to have ' # noqa: EM101
+ 'finite mean and variance'
+ )
# Check if input for correlation matrix fulfills requirements
try:
np.linalg.cholesky(self.Rho_X)
except np.linalg.LinAlgError:
- raise RuntimeError("The given correlation matrix is not positive definite"
- "--> Nataf transformation is not applicable.")
- if not np.all(self.Rho_X-self.Rho_X.T == 0):
- raise RuntimeError("The given correlation matrix is not symmetric "
- "--> Nataf transformation is not applicable.")
+ raise RuntimeError( # noqa: B904, DOC501, TRY003
+ 'The given correlation matrix is not positive definite' # noqa: EM101
+ '--> Nataf transformation is not applicable.'
+ )
+ if not np.all(self.Rho_X - self.Rho_X.T == 0):
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The given correlation matrix is not symmetric ' # noqa: EM101
+ '--> Nataf transformation is not applicable.'
+ )
if not np.all(np.diag(self.Rho_X) == 1):
- raise RuntimeError("Not all diagonal entries of the given correlation matrix are equal to one "
- "--> Nataf transformation is not applicable.")
-
- '''
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Not all diagonal entries of the given correlation matrix are equal to one ' # noqa: EM101
+ '--> Nataf transformation is not applicable.'
+ )
+
+ """
Calculation of the transformed correlation matrix. This is achieved
by a quadratic two-dimensional Gauss-Legendre integration
- '''
+ """
n = 1024
zmax = 8
zmin = -zmax
points, weights = np.polynomial.legendre.leggauss(n)
- points = - (0.5 * (points + 1) * (zmax - zmin) + zmin)
- weights = weights * (0.5 * (zmax - zmin))
+ points = -(0.5 * (points + 1) * (zmax - zmin) + zmin)
+ weights = weights * (0.5 * (zmax - zmin)) # noqa: PLR6104
xi = np.tile(points, [n, 1])
xi = xi.flatten(order='F')
@@ -133,309 +142,345 @@ class description.
weights2d = first * second
w2d = weights2d.flatten()
- # check is X the identiy
+ # check is X the identity
self.Rho_Z = np.identity(n=n_dist)
- if (np.linalg.norm(self.Rho_X - np.identity(n=n_dist)) > 10**(-5)):
+ if np.linalg.norm(self.Rho_X - np.identity(n=n_dist)) > 10 ** (-5): # noqa: PLR1702
for i in range(n_dist):
- for j in range(i+1, n_dist):
- if (self.Rho_X[i, j] == 0):
- continue
-
- elif ((self.Marginals[i].Name == 'standardnormal') and
- (self.Marginals[j].Name == 'standardnormal')):
- self.Rho_Z[i, j] = self.Rho_X[i, j]
- self.Rho_Z[j, i] = self.Rho_X[j, i]
+ for j in range(i + 1, n_dist):
+ if self.Rho_X[i, j] == 0:
continue
- elif ((self.Marginals[i].Name == 'normal') and
- (self.Marginals[j].Name == 'normal')):
+ elif ( # noqa: RET507
+ (
+ (self.Marginals[i].Name == 'standardnormal')
+ and (self.Marginals[j].Name == 'standardnormal')
+ )
+ or (
+ (self.Marginals[i].Name == 'normal')
+ and (self.Marginals[j].Name == 'normal')
+ )
+ ):
self.Rho_Z[i, j] = self.Rho_X[i, j]
self.Rho_Z[j, i] = self.Rho_X[j, i]
continue
- elif ((self.Marginals[i].Name == 'normal') and
- (self.Marginals[j].Name == 'lognormal')):
- Vj = self.Marginals[j].std()/self.Marginals[j].mean()
- self.Rho_Z[i, j] = (self.Rho_X[i, j] *
- Vj/np.sqrt(np.log(1 + Vj**2)))
+ elif (self.Marginals[i].Name == 'normal') and (
+ self.Marginals[j].Name == 'lognormal'
+ ):
+ Vj = self.Marginals[j].std() / self.Marginals[j].mean() # noqa: N806
+ self.Rho_Z[i, j] = (
+ self.Rho_X[i, j] * Vj / np.sqrt(np.log(1 + Vj**2))
+ )
self.Rho_Z[j, i] = self.Rho_Z[i, j]
continue
- elif ((self.Marginals[i].Name == 'lognormal') and
- (self.Marginals[j].Name == 'normal')):
- Vi = self.Marginals[i].std()/self.Marginals[i].mean()
- self.Rho_Z[i, j] = (self.Rho_X[i, j] *
- Vi/np.sqrt(np.log(1 + Vi**2)))
+ elif (self.Marginals[i].Name == 'lognormal') and (
+ self.Marginals[j].Name == 'normal'
+ ):
+ Vi = self.Marginals[i].std() / self.Marginals[i].mean() # noqa: N806
+ self.Rho_Z[i, j] = (
+ self.Rho_X[i, j] * Vi / np.sqrt(np.log(1 + Vi**2))
+ )
self.Rho_Z[j, i] = self.Rho_Z[i, j]
continue
- elif ((self.Marginals[i].Name == 'lognormal') and
- (self.Marginals[j].Name == 'lognormal')):
- Vi = self.Marginals[i].std()/self.Marginals[i].mean()
- Vj = self.Marginals[j].std()/self.Marginals[j].mean()
- self.Rho_Z[i, j] = (np.log(1 + self.Rho_X[i, j]*Vi*Vj)
- / np.sqrt(np.log(1 + Vi**2) *
- np.log(1+Vj**2)))
+ elif (self.Marginals[i].Name == 'lognormal') and (
+ self.Marginals[j].Name == 'lognormal'
+ ):
+ Vi = self.Marginals[i].std() / self.Marginals[i].mean() # noqa: N806
+ Vj = self.Marginals[j].std() / self.Marginals[j].mean() # noqa: N806
+ self.Rho_Z[i, j] = np.log(
+ 1 + self.Rho_X[i, j] * Vi * Vj
+ ) / np.sqrt(np.log(1 + Vi**2) * np.log(1 + Vj**2))
self.Rho_Z[j, i] = self.Rho_Z[i, j]
continue
# solving Nataf
- tmp_f_xi = ((self.Marginals[j].icdf(stats.norm.cdf(eta)) -
- self.Marginals[j].mean()) /
- self.Marginals[j].std())
- tmp_f_eta = ((self.Marginals[i].icdf(stats.norm.cdf(xi)) -
- self.Marginals[i].mean()) /
- self.Marginals[i].std())
+ tmp_f_xi = (
+ self.Marginals[j].icdf(stats.norm.cdf(eta))
+ - self.Marginals[j].mean()
+ ) / self.Marginals[j].std()
+ tmp_f_eta = (
+ self.Marginals[i].icdf(stats.norm.cdf(xi))
+ - self.Marginals[i].mean()
+ ) / self.Marginals[i].std()
coef = tmp_f_xi * tmp_f_eta * w2d
def fun(rho0):
- return ((coef *
- self.bivariateNormalPdf(xi, eta, rho0)).sum()
- - self.Rho_X[i, j])
-
- x0, r = optimize.brentq(f=fun,
- a=-1 + np.finfo(float).eps,
- b=1 - np.finfo(float).eps,
- full_output=True)
- if (r.converged == 1):
+ return (
+ coef * self.bivariateNormalPdf(xi, eta, rho0) # noqa: B023
+ ).sum() - self.Rho_X[i, j] # noqa: B023
+
+ x0, r = optimize.brentq(
+ f=fun,
+ a=-1 + np.finfo(float).eps,
+ b=1 - np.finfo(float).eps,
+ full_output=True,
+ )
+ if r.converged == 1:
self.Rho_Z[i, j] = x0
self.Rho_Z[j, i] = self.Rho_Z[i, j]
else:
- sol = optimize.fsolve(func=fun,
- x0=self.Rho_X[i, j],
- full_output=True)
- if (sol[2] == 1):
+ sol = optimize.fsolve(
+ func=fun, x0=self.Rho_X[i, j], full_output=True
+ )
+ if sol[2] == 1:
self.Rho_Z[i, j] = sol[0]
self.Rho_Z[j, i] = self.Rho_Z[i, j]
else:
- sol = optimize.fsolve(func=fun,
- x0=-self.Rho_X[i, j],
- full_output=True)
- if (sol[2] == 1):
+ sol = optimize.fsolve(
+ func=fun, x0=-self.Rho_X[i, j], full_output=True
+ )
+ if sol[2] == 1:
self.Rho_Z[i, j] = sol[0]
self.Rho_Z[j, i] = self.Rho_Z[i, j]
else:
- for i in range(10):
+ for i in range(10): # noqa: B007, PLW2901
init = 2 * np.random.rand() - 1
- sol = optimize.fsolve(func=fun,
- x0=init,
- full_output=True)
- if (sol[2] == 1):
+ sol = optimize.fsolve(
+ func=fun, x0=init, full_output=True
+ )
+ if sol[2] == 1:
break
- if (sol[2] == 1):
+ if sol[2] == 1:
self.Rho_Z[i, j] = sol[0]
self.Rho_Z[j, i] = self.Rho_Z[i, j]
else:
- raise RuntimeError("brentq and fsolve coul"
- "d not converge to a "
- "solution of the Nataf "
- "integral equation")
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'brentq and fsolve coul' # noqa: EM101
+ 'd not converge to a '
+ 'solution of the Nataf '
+ 'integral equation'
+ )
try:
self.A = np.linalg.cholesky(self.Rho_Z)
except np.linalg.LinAlgError:
- raise RuntimeError("Transformed correlation matrix is not positive"
- " definite --> Nataf transformation is not "
- "applicable.")
+ raise RuntimeError( # noqa: B904, DOC501, TRY003
+ 'Transformed correlation matrix is not positive' # noqa: EM101
+ ' definite --> Nataf transformation is not '
+ 'applicable.'
+ )
-#%%
- '''
+ # %%
+ """
This function performs the transformation from X to U by taking
the inverse standard normal cdf of the cdf of every value. Then it
performs the transformation from Z to U. A is the lower triangular
matrix of the cholesky decomposition of Rho_Z and U is the resulting
independent standard normal vector. Afterwards it calculates the
Jacobian of this Transformation if it is needed.
- '''
- def X2U(self, X, Jacobian=False):
- """
- Carries out the transformation from physical space X to
+ """
+
+ def X2U(self, X, Jacobian=False): # noqa: FBT002, N802, N803
+ """Carries out the transformation from physical space X to
standard normal space U.
X must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
The Jacobian of the transformation of the first given data
point is only given as an output in case that the input
argument Jacobian=True .
- """
-
+ """ # noqa: D205
n_dim = len(self.Marginals)
- X = np.array(X, ndmin=2)
-
+ X = np.array(X, ndmin=2) # noqa: N806
+
# check if all marginal distributions are continuous
for i in range(n_dim):
- if self.Marginals[i].Name in ['binomial','geometric','negativebinomial','poisson']:
- raise RuntimeError("At least one of the marginal distributions is a discrete distribution,"
- "the transformation X2U is therefore not possible.")
-
- # check of the dimensions of input X
- if X.ndim > 2:
- raise RuntimeError("X must have not more than two dimensions. ")
+ if self.Marginals[i].Name in [ # noqa: PLR6201
+ 'binomial',
+ 'geometric',
+ 'negativebinomial',
+ 'poisson',
+ ]:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'At least one of the marginal distributions is a discrete distribution,' # noqa: EM101
+ 'the transformation X2U is therefore not possible.'
+ )
+
+ # check of the dimensions of input X
+ if X.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('X must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(X)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- X = X.T
+ X = X.T # noqa: N806
if np.shape(X)[1] != n_dim:
- raise RuntimeError("X must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
-
- Z = np.zeros(np.flip(X.shape))
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'X must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+
+ Z = np.zeros(np.flip(X.shape)) # noqa: N806
for i in range(n_dim):
- Z[i,:] = stats.norm.ppf(self.Marginals[i].cdf(X[:, i]))
- U = np.linalg.solve(self.A, Z.squeeze()).T
-
+ Z[i, :] = stats.norm.ppf(self.Marginals[i].cdf(X[:, i]))
+ U = np.linalg.solve(self.A, Z.squeeze()).T # noqa: N806
+
if Jacobian:
diag = np.zeros([n_dim, n_dim])
for i in range(n_dim):
- diag[i, i] = self.Marginals[i].pdf(X[0,i])/stats.norm.pdf(Z[i,0])
- Jac = np.linalg.solve(self.A, diag)
+ diag[i, i] = self.Marginals[i].pdf(X[0, i]) / stats.norm.pdf(Z[i, 0])
+ Jac = np.linalg.solve(self.A, diag) # noqa: N806
return np.squeeze(U), Jac
- else:
+ else: # noqa: RET505
return np.squeeze(U)
-
-#%%
- def U2X(self, U, Jacobian=False):
- """
- Carries out the transformation from standard normal space U
+ # %%
+ def U2X(self, U, Jacobian=False): # noqa: FBT002, N802, N803
+ """Carries out the transformation from standard normal space U
to physical space X.
U must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
The Jacobian of the transformation of the first given data
point is only given as an output in case that the input
argument Jacobian=True .
- """
-
+ """ # noqa: D205
n_dim = len(self.Marginals)
- U = np.array(U, ndmin=2)
-
- # check of the dimensions of input U
- if U.ndim > 2:
- raise RuntimeError("U must have not more than two dimensions. ")
+ U = np.array(U, ndmin=2) # noqa: N806
+
+ # check of the dimensions of input U
+ if U.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('U must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(U)[1] == 1 and n_dim != 1:
# in case that only one point U is given, he can be defined either as row or column vector
- U = U.T
+ U = U.T # noqa: N806
if np.shape(U)[1] != n_dim:
- raise RuntimeError("U must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
- else:
- U = U.T
- Z = self.A @ U
-
- X = np.zeros(np.flip(U.shape))
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'U must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+ else: # noqa: RET506
+ U = U.T # noqa: N806
+ Z = self.A @ U # noqa: N806
+
+ X = np.zeros(np.flip(U.shape)) # noqa: N806
for i in range(n_dim):
X[:, i] = self.Marginals[i].icdf(stats.norm.cdf(Z[i, :]))
if Jacobian:
diag = np.zeros([n_dim, n_dim])
for i in range(n_dim):
- diag[i, i] = stats.norm.pdf(Z[i,0])/self.Marginals[i].pdf(X[0,i])
- Jac = np.dot(diag, self.A)
+ diag[i, i] = stats.norm.pdf(Z[i, 0]) / self.Marginals[i].pdf(X[0, i])
+ Jac = np.dot(diag, self.A) # noqa: N806
return np.squeeze(X), Jac
- else:
+ else: # noqa: RET505
return np.squeeze(X)
-# %%
+ # %%
def random(self, n=1):
- """
- Creates n samples of the joint distribution.
+ """Creates n samples of the joint distribution.
Every row in the output array corresponds to one sample.
- """
+ """ # noqa: D205, D401
n = int(n)
n_dim = np.size(self.Marginals)
- U = np.random.randn(n_dim, n)
- Z = np.dot(self.A, U)
- jr = np.zeros([n,n_dim])
+ U = np.random.randn(n_dim, n) # noqa: N806
+ Z = np.dot(self.A, U) # noqa: N806
+ jr = np.zeros([n, n_dim])
for i in range(n_dim):
jr[:, i] = self.Marginals[i].icdf(stats.norm.cdf(Z[i, :]))
-
+
return np.squeeze(jr)
-# %%
- def pdf(self, X):
- """
- Computes the joint PDF.
+ # %%
+ def pdf(self, X): # noqa: C901, N803
+ """Computes the joint PDF.
X must be a [n,d]-shaped array (n = number of data points,
- d = dimensions).
- """
-
+ d = dimensions).
+ """ # noqa: D205, D401
n_dim = len(self.Marginals)
- X = np.array(X, ndmin=2)
-
+ X = np.array(X, ndmin=2) # noqa: N806
+
# check if all marginal distributions are continuous
for i in range(n_dim):
- if self.Marginals[i].Name in ['binomial','geometric','negativebinomial','poisson']:
- raise RuntimeError("At least one of the marginal distributions is a discrete distribution,"
- "the transformation X2U is therefore not possible.")
-
- # check of the dimensions of input X
- if X.ndim > 2:
- raise RuntimeError("X must have not more than two dimensions.")
+ if self.Marginals[i].Name in [ # noqa: PLR6201
+ 'binomial',
+ 'geometric',
+ 'negativebinomial',
+ 'poisson',
+ ]:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'At least one of the marginal distributions is a discrete distribution,' # noqa: EM101
+ 'the transformation X2U is therefore not possible.'
+ )
+
+ # check of the dimensions of input X
+ if X.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('X must have not more than two dimensions.') # noqa: DOC501, EM101, TRY003
if np.shape(X)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- X = X.T
+ X = X.T # noqa: N806
if np.shape(X)[1] != n_dim:
- raise RuntimeError("X must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
-
- n_X = np.shape(X)[0]
- U = np.zeros([n_X, n_dim])
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'X must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+
+ n_X = np.shape(X)[0] # noqa: N806
+ U = np.zeros([n_X, n_dim]) # noqa: N806
phi = np.zeros([n_dim, n_X])
f = np.zeros([n_dim, n_X])
- mu = np.zeros(n_dim)
+ mu = np.zeros(n_dim)
for i in range(n_dim):
U[:, i] = stats.norm.ppf(self.Marginals[i].cdf(X[:, i]))
phi[i, :] = stats.norm.pdf(U[:, i])
- f[i, :] = self.Marginals[i].pdf(X[:, i])
+ f[i, :] = self.Marginals[i].pdf(X[:, i])
phi_n = stats.multivariate_normal.pdf(U, mu, self.Rho_Z)
jointpdf = np.zeros(n_X)
for i in range(n_X):
try:
- jointpdf[i] = ((np.prod(f[:, i])/(np.prod(phi[:, i])+realmin)) * phi_n[i])
- except IndexError:
+ jointpdf[i] = (
+ np.prod(f[:, i]) / (np.prod(phi[:, i]) + realmin)
+ ) * phi_n[i]
+ except IndexError: # noqa: PERF203
# In case of n=1, phi_n is a scalar.
- jointpdf[i] = ((np.prod(f[:, i])/(np.prod(phi[:, i])+realmin)) * phi_n)
+ jointpdf[i] = (
+ np.prod(f[:, i]) / (np.prod(phi[:, i]) + realmin)
+ ) * phi_n
except ZeroDivisionError:
jointpdf[i] = 0
-
+
if np.size(jointpdf) == 1:
return jointpdf[0]
- else:
+ else: # noqa: RET505
return jointpdf
-#%%
- def cdf(self, X):
- """
- Computes the joint CDF.
+ # %%
+ def cdf(self, X): # noqa: N803
+ """Computes the joint CDF.
X must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
The CDF computation is based on the multivariate normal cdf.
In scipy the multivariate normal cdf is computed by Monte Carlo
sampling, the output of this method is therefore also a
stochastic quantity.
- """
-
+ """ # noqa: D205, D401
n_dim = len(self.Marginals)
- X = np.array(X, ndmin=2)
-
- # check of the dimensions of input X
- if X.ndim > 2:
- raise RuntimeError("X must have not more than two dimensions. ")
+ X = np.array(X, ndmin=2) # noqa: N806
+
+ # check of the dimensions of input X
+ if X.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('X must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(X)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- X = X.T
+ X = X.T # noqa: N806
if np.shape(X)[1] != n_dim:
- raise RuntimeError("X must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
- n_X = np.shape(X)[0]
- U = np.zeros([n_X, n_dim])
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'X must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+ n_X = np.shape(X)[0] # noqa: N806
+ U = np.zeros([n_X, n_dim]) # noqa: N806
for i in range(n_dim):
- U[:, i] = stats.norm.ppf(self.Marginals[i].cdf(X[:, i]))
- mu = np.zeros(n_dim)
- jointcdf = stats.multivariate_normal.cdf(U,mean=mu,cov=np.matrix(self.Rho_Z))
-
- return jointcdf
+ U[:, i] = stats.norm.ppf(self.Marginals[i].cdf(X[:, i]))
+ mu = np.zeros(n_dim)
+ jointcdf = stats.multivariate_normal.cdf(
+ U, mean=mu, cov=np.matrix(self.Rho_Z)
+ )
+
+ return jointcdf # noqa: RET504
-#%%
+ # %%
@staticmethod
- def bivariateNormalPdf(x1, x2, rho):
- return (1 / (2 * np.pi * np.sqrt(1-rho**2)) *
- np.exp(-1/(2*(1-rho**2)) *
- (x1**2 - 2 * rho * x1 * x2 + x2**2)))
+ def bivariateNormalPdf(x1, x2, rho): # noqa: N802, D102
+ return (
+ 1
+ / (2 * np.pi * np.sqrt(1 - rho**2))
+ * np.exp(-1 / (2 * (1 - rho**2)) * (x1**2 - 2 * rho * x1 * x2 + x2**2))
+ )
diff --git a/modules/performUQ/common/ERAClasses/ERARosen.py b/modules/performUQ/common/ERAClasses/ERARosen.py
index 717fce26f..5625df024 100644
--- a/modules/performUQ/common/ERAClasses/ERARosen.py
+++ b/modules/performUQ/common/ERAClasses/ERARosen.py
@@ -1,12 +1,12 @@
-# import of modules
+# import of modules # noqa: CPY001, D100, INP001
+import matplotlib.pyplot as plt
+import networkx as nx
import numpy as np
-from scipy import stats
-from ERADist import ERADist
from ERACond import ERACond
-import networkx as nx
-import matplotlib.pyplot as plt
+from ERADist import ERADist
+from scipy import stats
-'''
+"""
---------------------------------------------------------------------------
Generation of joint distribution objects based on marginal and conditional
distribution objects.
@@ -59,300 +59,320 @@
3. Documentation of the ERA Distribution Classes
---------------------------------------------------------------------------
-'''
+""" # noqa: W291
+
# %%
-class ERARosen(object):
- """
- Generation of joint distribution objects.
+class ERARosen:
+ """Generation of joint distribution objects.
Construction of the joint distribution object with
-
+
Obj = ERARosen(dist,depend,opt)
-
+
'dist' must be a list or array which contains all the
marginal distributions (ERADist objects) and conditional distributions
(ERACond objects) that define the joint distribution.
-
+
'depend' describes the dependency between the different marginal and
- conditional distributions. The dependency is defined by a list of arrays
+ conditional distributions. The dependency is defined by a list of arrays
which contain the indices of the parents of the respective distributions.
- The arrays within the list must be ordered according to the place of the
+ The arrays within the list must be ordered according to the place of the
corresponding distribution in the input 'dist'. If a distribution is
defined as a marginal, and therefore has no parents, an empty array([])
- must be given for that distribution in 'depend'. For conditional
+ must be given for that distribution in 'depend'. For conditional
distributions the order of the indices within one of the arrays
corresponds to the order of the variables of the respective function
handle of the respective ERACond object.
- """
-
+ """ # noqa: D205, D400
+
def __init__(self, dist, depend):
- """
- Constructor method, for more details have a look at the
+ """Constructor method, for more details have a look at the
class description.
- """
+ """ # noqa: D205, D401
self.Dist = dist
self.Parents = depend
-
+
n_dist = len(dist)
n_dist_dep = len(depend)
if n_dist != n_dist_dep:
- raise RuntimeError("The number of distributions according to the inputs"
- " dist and depend doesn't match.")
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The number of distributions according to the inputs' # noqa: EM101
+ " dist and depend doesn't match."
+ )
+
n_parents = np.zeros(n_dist)
for i in range(n_dist):
- if isinstance(dist[i],ERACond):
+ if isinstance(dist[i], ERACond):
n_parents[i] = dist[i].Param.__code__.co_argcount
- elif not isinstance(dist[i],ERADist):
- raise RuntimeError("The objects in dist must be either ERADist or ERACond objects.")
-
+ elif not isinstance(dist[i], ERADist):
+ raise RuntimeError( # noqa: DOC501, TRY003, TRY004
+ 'The objects in dist must be either ERADist or ERACond objects.' # noqa: EM101
+ )
+
# build adjacency matrix
- adj_mat = np.zeros([n_dist,n_dist])
+ adj_mat = np.zeros([n_dist, n_dist])
for i in range(n_dist):
- adj_mat[i,depend[i]] = 1
- # check if obtained network represents a directed acyclical graph
+ adj_mat[i, depend[i]] = 1
+ # check if obtained network represents a directed acyclical graph
adj_prod = np.identity(n_dist)
- for i in range(n_dist+1):
+ for i in range(n_dist + 1): # noqa: B007
adj_prod = np.matmul(adj_prod, adj_mat)
if sum(np.diag(adj_prod)) != 0:
- raise RuntimeError("The graph defining the dependence between the different "
- "distributions must be directed and acyclical.")
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The graph defining the dependence between the different ' # noqa: EM101
+ 'distributions must be directed and acyclical.'
+ )
+
self.Adjacency = np.matrix(adj_mat)
-
+
# sort distributions according to dependencies
- layers = list()
- rem_dist = np.arange(0,n_dist)
+ layers = list() # noqa: C408
+ rem_dist = np.arange(0, n_dist)
while len(rem_dist) > 0:
- n_dep_rem = np.sum(adj_mat,1)
- curr_d = n_dep_rem == 0 # distributions on current layer
+ n_dep_rem = np.sum(adj_mat, 1)
+ curr_d = n_dep_rem == 0 # distributions on current layer
curr_dist = rem_dist[curr_d]
- layers.append(curr_dist) # save distributions on current layer
- adj_mat[:,curr_dist] = 0
- adj_mat = adj_mat[np.logical_not(curr_d),:]
+ layers.append(curr_dist) # save distributions on current layer
+ adj_mat[:, curr_dist] = 0
+ adj_mat = adj_mat[np.logical_not(curr_d), :]
rem_dist = rem_dist[np.logical_not(curr_d)]
-
+
if len(layers) > 1:
self.Order = [layers[0], np.concatenate(layers[1:])]
self.Layers = layers
else:
- raise RuntimeError("The defined joint distribution consists only of independent distributions."
- "This type of joint distribution is not supported by ERARosen.")
-
-# %%
- def X2U(self, X, error=True):
- """
- Carries out the transformation from physical space X to
- standard normal space U.
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'The defined joint distribution consists only of independent distributions.' # noqa: EM101
+ 'This type of joint distribution is not supported by ERARosen.'
+ )
+
+ # %%
+ def X2U(self, X, error=True): # noqa: FBT002, N802, N803
+ """Carries out the transformation from physical space X to
+ standard normal space U.
X must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
If no error message should be given in case of the detection
of an improper distribution, give error=False as second input.
The output for the improper data points is then given as nan.
- """
-
+ """ # noqa: D205
n_dim = len(self.Dist)
- X = np.array(X, ndmin=2, dtype=float)
-
+ X = np.array(X, ndmin=2, dtype=float) # noqa: N806
+
# check if all marginal and conditional distributions are continuous
for i in range(n_dim):
- if self.Dist[i].Name in ['binomial','geometric','negativebinomial','poisson']:
- raise RuntimeError("At least one of the marginal distributions or conditional distributions "
- "is a discrete distribution, the transformation X2U is therefore not possible.")
-
- # check of the dimensions of input X
- if X.ndim > 2:
- raise RuntimeError("X must have not more than two dimensions. ")
+ if self.Dist[i].Name in [ # noqa: PLR6201
+ 'binomial',
+ 'geometric',
+ 'negativebinomial',
+ 'poisson',
+ ]:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'At least one of the marginal distributions or conditional distributions ' # noqa: EM101
+ 'is a discrete distribution, the transformation X2U is therefore not possible.'
+ )
+
+ # check of the dimensions of input X
+ if X.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('X must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(X)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- X = X.T
+ X = X.T # noqa: N806
if np.shape(X)[1] != n_dim:
- raise RuntimeError("X must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
-
- n_X = np.shape(X)[0]
- U = np.zeros([n_X,n_dim])
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'X must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+
+ n_X = np.shape(X)[0] # noqa: N806
+ U = np.zeros([n_X, n_dim]) # noqa: N806
+
for i in self.Order[0]:
- U[:,i] = stats.norm.ppf(self.Dist[i].cdf(X[:,i]))
-
+ U[:, i] = stats.norm.ppf(self.Dist[i].cdf(X[:, i]))
+
for i in self.Order[1]:
- U[:,i] = stats.norm.ppf(self.Dist[i].condCDF(X[:,i],X[:,self.Parents[i]]))
-
+ U[:, i] = stats.norm.ppf(
+ self.Dist[i].condCDF(X[:, i], X[:, self.Parents[i]])
+ )
+
# find rows with nan
- lin_ind = np.any(np.isnan(U),1)
-
+ lin_ind = np.any(np.isnan(U), 1)
+
if error:
if not all(np.logical_not(lin_ind)):
- raise RuntimeError("Invalid joint distribution was created.")
+ raise RuntimeError('Invalid joint distribution was created.') # noqa: DOC501, EM101, TRY003
else:
- U[lin_ind,:] = np.nan
-
+ U[lin_ind, :] = np.nan
+
return np.squeeze(U)
-
-# %%
- def U2X(self, U, error=True):
- """
- Carries out the transformation from standard normal space U
- to physical space X .
+
+ # %%
+ def U2X(self, U, error=True): # noqa: FBT002, N802, N803
+ """Carries out the transformation from standard normal space U
+ to physical space X .
U must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
If no error message should be given in case of the detection
of an improper distribution, give error=False as second input.
The output for the improper data points is then given as nan.
- """
-
+ """ # noqa: D205
n_dim = len(self.Dist)
- U = np.array(U, ndmin=2, dtype=float)
-
- # check of the dimensions of input U
- if U.ndim > 2:
- raise RuntimeError("U must have not more than two dimensions. ")
+ U = np.array(U, ndmin=2, dtype=float) # noqa: N806
+
+ # check of the dimensions of input U
+ if U.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('U must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(U)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- U = U.T
+ U = U.T # noqa: N806
if np.shape(U)[1] != n_dim:
- raise RuntimeError("U must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
-
- n_U = np.shape(U)[0]
- X = np.zeros([n_U,n_dim])
- CDF_values = stats.norm.cdf(U)
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'U must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+
+ n_U = np.shape(U)[0] # noqa: N806
+ X = np.zeros([n_U, n_dim]) # noqa: N806
+ CDF_values = stats.norm.cdf(U) # noqa: N806
+
for i in self.Order[0]:
- X[:,i] = self.Dist[i].icdf(CDF_values[:,i])
-
+ X[:, i] = self.Dist[i].icdf(CDF_values[:, i])
+
for i in self.Order[1]:
- X[:,i] = self.Dist[i].condiCDF(CDF_values[:,i],X[:,self.Parents[i]])
-
+ X[:, i] = self.Dist[i].condiCDF(CDF_values[:, i], X[:, self.Parents[i]])
+
# find rows with nan
- lin_ind = np.any(np.isnan(X),1)
-
+ lin_ind = np.any(np.isnan(X), 1)
+
if error:
if not np.all(np.logical_not(lin_ind)):
- raise RuntimeError("Invalid joint distribution was created.")
+ raise RuntimeError('Invalid joint distribution was created.') # noqa: DOC501, EM101, TRY003
else:
- X[lin_ind,:] = np.nan
-
- return np.squeeze(X)
-
-# %%
- def pdf(self, X, error=True):
- """
- Computes the joint PDF.
+ X[lin_ind, :] = np.nan
+
+ return np.squeeze(X)
+
+ # %%
+ def pdf(self, X, error=True): # noqa: FBT002, N803
+ """Computes the joint PDF.
X must be a [n,d]-shaped array (n = number of data points,
d = dimensions).
If no error message should be given in case of the detection
of an improper distribution, give error=False as second input.
The output for the improper data points is then given as nan.
- """
-
+ """ # noqa: D205, D401
n_dim = len(self.Dist)
- X = np.array(X, ndmin=2, dtype=float)
-
- # check of the dimensions of input X
- if X.ndim > 2:
- raise RuntimeError("X must have not more than two dimensions. ")
+ X = np.array(X, ndmin=2, dtype=float) # noqa: N806
+
+ # check of the dimensions of input X
+ if X.ndim > 2: # noqa: PLR2004
+ raise RuntimeError('X must have not more than two dimensions. ') # noqa: DOC501, EM101, TRY003
if np.shape(X)[1] == 1 and n_dim != 1:
# in case that only one point X is given, he can be defined either as row or column vector
- X = X.T
+ X = X.T # noqa: N806
if np.shape(X)[1] != n_dim:
- raise RuntimeError("X must be an array of size [n,d], where d is the"
- " number of dimensions of the joint distribution.")
-
- n_X = np.shape(X)[0]
- pdf_values = np.zeros([n_X,n_dim])
-
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'X must be an array of size [n,d], where d is the' # noqa: EM101
+ ' number of dimensions of the joint distribution.'
+ )
+
+ n_X = np.shape(X)[0] # noqa: N806
+ pdf_values = np.zeros([n_X, n_dim])
+
for i in self.Order[0]:
- pdf_values[:,i] = self.Dist[i].pdf(X[:,i])
-
+ pdf_values[:, i] = self.Dist[i].pdf(X[:, i])
+
for i in self.Order[1]:
- pdf_values[:,i] = self.Dist[i].condPDF(X[:,i],X[:,self.Parents[i]])
-
+ pdf_values[:, i] = self.Dist[i].condPDF(X[:, i], X[:, self.Parents[i]])
+
jointpdf = np.prod(pdf_values, 1)
nan_ind = np.isnan(jointpdf)
-
+
if error:
if not np.all(np.logical_not(nan_ind)):
- raise RuntimeError("Invalid joint distribution was created.")
-
+ raise RuntimeError('Invalid joint distribution was created.') # noqa: DOC501, EM101, TRY003
+
if np.size(jointpdf) == 1:
return jointpdf[0]
- else:
- return jointpdf
-
-# %%
+ else: # noqa: RET505
+ return jointpdf
+
+ # %%
def random(self, n=1):
- """
- Creates n samples of the joint distribution.
+ """Creates n samples of the joint distribution.
Every row in the output array corresponds to one sample.
- """
-
+ """ # noqa: D205, D401
n_dim = len(self.Dist)
- X = np.zeros([n,n_dim])
-
+ X = np.zeros([n, n_dim]) # noqa: N806
+
for i in self.Order[0]:
- X[:,i] = self.Dist[i].random(n)
-
+ X[:, i] = self.Dist[i].random(n)
+
for i in self.Order[1]:
try:
- X[:,i] = self.Dist[i].condRandom(X[:,self.Parents[i]])
- except ValueError:
- raise RuntimeError("Invalid joint distribution was created.")
-
+ X[:, i] = self.Dist[i].condRandom(X[:, self.Parents[i]])
+ except ValueError: # noqa: PERF203
+ raise RuntimeError('Invalid joint distribution was created.') # noqa: B904, DOC501, EM101, TRY003
+
return np.squeeze(X)
-# %%
- def plotGraph(self,opt=False):
- """
- Plots the Bayesian network which defines the dependency
+ # %%
+ def plotGraph(self, opt=False): # noqa: FBT002, C901, N802
+ """Plots the Bayesian network which defines the dependency
between the different distributions.
If opt is given as 'numbering' the nodes are named according
to their order of input in dist(e.g., the first distribution
- is named #0, etc.). If no ID was given to a certain
+ is named #0, etc.). If no ID was given to a certain
distribution, the distribution is also named according to its
position in dist, otherwise the property ID is taken as the
name of the distribution.
- """
-
+ """ # noqa: D205, D401
n_layer = len(self.Layers)
- vert = np.flip(np.linspace(0,1,n_layer))
- pos_n = dict()
- pos_l = dict()
+ vert = np.flip(np.linspace(0, 1, n_layer))
+ pos_n = dict() # noqa: C408
+ pos_l = dict() # noqa: C408
for i in range(n_layer):
cur_l = self.Layers[i]
n_cur = len(cur_l)
- horiz = np.linspace(0,1,n_cur+2)
+ horiz = np.linspace(0, 1, n_cur + 2)
for j in range(n_cur):
- pos_n[str(cur_l[j])] = (horiz[j+1],vert[i])
- pos_l[cur_l[j]] = (horiz[j+1]+0.06,vert[i])
-
+ pos_n[str(cur_l[j])] = (horiz[j + 1], vert[i])
+ pos_l[cur_l[j]] = (horiz[j + 1] + 0.06, vert[i])
+
n_dim = len(self.Dist)
- labels = dict()
+ labels = dict() # noqa: C408
if not opt:
for i in range(n_dim):
if self.Dist[i].ID:
labels[i] = self.Dist[i].ID
else:
- labels[i] = '#'+str(i)
- elif not opt.lower == 'numbering':
+ labels[i] = '#' + str(i)
+ elif opt.lower != 'numbering':
for i in range(n_dim):
- labels[i] = '#'+str(i)
+ labels[i] = '#' + str(i)
else:
- raise RuntimeError("opt must be given as 'numbering'.")
-
- G_Adj = nx.from_numpy_matrix(self.Adjacency)
- G = nx.DiGraph()
- for i in range(1,n_layer):
+ raise RuntimeError("opt must be given as 'numbering'.") # noqa: DOC501, EM101, TRY003
+
+ G_Adj = nx.from_numpy_matrix(self.Adjacency) # noqa: N806
+ G = nx.DiGraph() # noqa: N806
+ for i in range(1, n_layer):
cur_l = self.Layers[i]
n_cur = len(cur_l)
for j in range(n_cur):
- s_n = np.array(self.Parents[cur_l[j]],ndmin=1)
+ s_n = np.array(self.Parents[cur_l[j]], ndmin=1)
for k in range(np.size(s_n)):
- G.add_edge(str(s_n[k]),str(cur_l[j]))
-
- nx.draw(G, pos_n,node_color='k',alpha = 0.3,node_size=100,arrowsize=20,arrows=True)
- nx.draw_networkx_labels(G_Adj,pos_l,labels,colors='r',font_size=15)
- plt.xlim([-0.05,1.05])
- plt.ylim([-0.1,1.1])
+ G.add_edge(str(s_n[k]), str(cur_l[j]))
+
+ nx.draw(
+ G,
+ pos_n,
+ node_color='k',
+ alpha=0.3,
+ node_size=100,
+ arrowsize=20,
+ arrows=True,
+ )
+ nx.draw_networkx_labels(G_Adj, pos_l, labels, colors='r', font_size=15)
+ plt.xlim([-0.05, 1.05])
+ plt.ylim([-0.1, 1.1])
plt.show()
diff --git a/modules/performUQ/common/createStandardUQ_Input.cpp b/modules/performUQ/common/createStandardUQ_Input.cpp
index 15bdbbd4d..3845fbb7e 100755
--- a/modules/performUQ/common/createStandardUQ_Input.cpp
+++ b/modules/performUQ/common/createStandardUQ_Input.cpp
@@ -133,7 +133,7 @@ gatherRV(json_t *rootINPUT, std::set &rvFiles){
}
- // KZ: commented for fixing RVs not in BIM.json which is not passed to inidividual json files (i.e., EVENT.json, SIM.json, SAM.json)
+ // KZ: commented for fixing RVs not in BIM.json which is not passed to individual json files (i.e., EVENT.json, SIM.json, SAM.json)
//if (numRVs != 0) {
rvFiles.insert(std::string(fName));
//}
@@ -354,7 +354,7 @@ int main(int argc, char **argv) {
std::string localDir(json_string_value(json_object_get(rootINPUT, "localAppDir")));
std::string remoteDir(json_string_value(json_object_get(rootINPUT, "remoteAppDir")));
- // KZ: for different os sytem
+ // KZ: for different os system
std::string workflowNew_os = std::string(workflowNew);
if ((strcmp(runType, "runningLocal")==0) && strcmp(osType,"Windows") == 0)
{
diff --git a/modules/performUQ/common/parallel_runner_mpi4py.py b/modules/performUQ/common/parallel_runner_mpi4py.py
index 4728e817f..4ec87fea8 100644
--- a/modules/performUQ/common/parallel_runner_mpi4py.py
+++ b/modules/performUQ/common/parallel_runner_mpi4py.py
@@ -1,27 +1,27 @@
-from mpi4py import MPI
+from mpi4py import MPI # noqa: CPY001, D100, INP001
from mpi4py.futures import MPIPoolExecutor
-class ParallelRunnerMPI4PY:
- def __init__(self, run_type: str = "runningRemote") -> None:
+class ParallelRunnerMPI4PY: # noqa: D101
+ def __init__(self, run_type: str = 'runningRemote') -> None:
self.run_type = run_type
self.comm = MPI.COMM_WORLD
self.num_processors = self.get_num_processors()
self.pool = self.get_pool()
- def get_num_processors(self) -> int:
+ def get_num_processors(self) -> int: # noqa: D102
num_processors = self.comm.Get_size()
if num_processors is None:
num_processors = 1
if num_processors < 1:
- raise ValueError(
- "Number of processes must be at least 1. Got {num_processors}"
+ raise ValueError( # noqa: TRY003
+ 'Number of processes must be at least 1. Got {num_processors}' # noqa: EM101, RUF027
)
return num_processors
- def get_pool(self) -> MPIPoolExecutor:
+ def get_pool(self) -> MPIPoolExecutor: # noqa: D102
self.pool = MPIPoolExecutor(max_workers=self.num_processors)
return self.pool
- def close_pool(self) -> None:
+ def close_pool(self) -> None: # noqa: D102
self.pool.shutdown()
diff --git a/modules/performUQ/common/quoFEM_RV_models.py b/modules/performUQ/common/quoFEM_RV_models.py
index 08117cce6..f8020e610 100644
--- a/modules/performUQ/common/quoFEM_RV_models.py
+++ b/modules/performUQ/common/quoFEM_RV_models.py
@@ -1,281 +1,291 @@
+import typing # noqa: CPY001, D100, INP001
from typing import Any
+
+import numpy as np
import pydantic
-import typing
from pydantic import Field
from typing_extensions import Annotated
-import numpy as np
-
-_supported_distributions = typing.Literal["Beta", "ChiSquared",
- "Exponential", "Gamma",
- "Gumbel", "Lognormal",
- "Normal", "Uniform",
- "Weibull"]
-_supported_input_types = typing.Literal["Parameters", "Moments",
- "Dataset"]
-_supported_variable_classes = typing.Literal["Uncertain", "Design",
- "Uniform", "NA"]
-
-
-def _get_ERADistObjectName(name_from_quoFEM: str) -> str:
- _ERADistNames = {}
- _ERADistNames["ChiSquared"] = "chisquare"
+_supported_distributions = typing.Literal[
+ 'Beta',
+ 'ChiSquared',
+ 'Exponential',
+ 'Gamma',
+ 'Gumbel',
+ 'Lognormal',
+ 'Normal',
+ 'Uniform',
+ 'Weibull',
+]
+_supported_input_types = typing.Literal['Parameters', 'Moments', 'Dataset']
+_supported_variable_classes = typing.Literal['Uncertain', 'Design', 'Uniform', 'NA']
+
+
+def _get_ERADistObjectName(name_from_quoFEM: str) -> str: # noqa: N802, N803
+ _ERADistNames = {} # noqa: N806
+ _ERADistNames['ChiSquared'] = 'chisquare'
try:
nm = _ERADistNames[name_from_quoFEM].value
- except:
+ except: # noqa: E722
nm = name_from_quoFEM.lower()
return nm
-def _get_ERADistOpt(input_type_from_quoFEM: str) -> str:
- _ERADistOpts = {}
- _ERADistOpts["Parameters"] = "PAR"
- _ERADistOpts["Moments"] = "MOM"
- _ERADistOpts["Dataset"] = "DATA"
+def _get_ERADistOpt(input_type_from_quoFEM: str) -> str: # noqa: N802, N803
+ _ERADistOpts = {} # noqa: N806
+ _ERADistOpts['Parameters'] = 'PAR'
+ _ERADistOpts['Moments'] = 'MOM'
+ _ERADistOpts['Dataset'] = 'DATA'
try:
opt = _ERADistOpts[input_type_from_quoFEM].value
- except:
- opt = "PAR"
+ except: # noqa: E722
+ opt = 'PAR'
return opt
-class RVData(pydantic.BaseModel):
+class RVData(pydantic.BaseModel): # noqa: D101
distribution: _supported_distributions
name: str
- inputType: _supported_input_types = "Parameters"
- refCount: int
+ inputType: _supported_input_types = 'Parameters' # noqa: N815
+ refCount: int # noqa: N815
value: str
- variableClass: _supported_variable_classes
- ERAName: str = ""
- ERAOpt: str = ""
+ variableClass: _supported_variable_classes # noqa: N815
+ ERAName: str = ''
+ ERAOpt: str = ''
ERAVal: list = []
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAName = _get_ERADistObjectName(self.distribution)
self.ERAOpt = _get_ERADistOpt(self.inputType)
return super().model_post_init(__context)
-############################################
-class BetaUncertainData(RVData):
+############################################
+class BetaUncertainData(RVData): # noqa: D101
lowerbound: float = 0.0
upperbound: float = 1.0
+
@pydantic.validator('upperbound')
- def upper_bound_not_bigger_than_lower_bound(v, values):
+ def upper_bound_not_bigger_than_lower_bound(v, values): # noqa: N805, D102
if 'lowerbound' in values and v <= values['lowerbound']:
raise ValueError(f"The upper bound must be bigger than the \
lower bound {values['lowerbound']}. \
- Got a value of {v}.")
+ Got a value of {v}.") # noqa: EM102, TRY003
return v
-class BetaParameters(BetaUncertainData):
+class BetaParameters(BetaUncertainData): # noqa: D101
alphas: pydantic.PositiveFloat
betas: pydantic.PositiveFloat
- def model_post_init(self, __context: Any) -> None:
- self.ERAVal = [self.alphas, self.betas, self.lowerbound,
- self.upperbound]
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
+ self.ERAVal = [self.alphas, self.betas, self.lowerbound, self.upperbound]
return super().model_post_init(__context)
-class BetaMoments(BetaUncertainData):
+class BetaMoments(BetaUncertainData): # noqa: D101
mean: float
- standardDev: pydantic.PositiveFloat
+ standardDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
- self.ERAVal = [self.mean, self.standardDev, self.lowerbound,
- self.upperbound]
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
+ self.ERAVal = [self.mean, self.standardDev, self.lowerbound, self.upperbound]
return super().model_post_init(__context)
-class BetaDataset(BetaUncertainData):
- dataDir: str
+class BetaDataset(BetaUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
- self.ERAVal = [np.genfromtxt(self.dataDir).tolist(),
- [self.lowerbound, self.upperbound]]
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
+ self.ERAVal = [
+ np.genfromtxt(self.dataDir).tolist(),
+ [self.lowerbound, self.upperbound],
+ ]
return super().model_post_init(__context)
+
############################################
-class ChiSquaredUncertainData(RVData):
+class ChiSquaredUncertainData(RVData): # noqa: D101
pass
-class ChiSquaredParameters(ChiSquaredUncertainData):
+class ChiSquaredParameters(ChiSquaredUncertainData): # noqa: D101
k: Annotated[int, Field(ge=1)]
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.k]
return super().model_post_init(__context)
-class ChiSquaredMoments(ChiSquaredUncertainData):
+class ChiSquaredMoments(ChiSquaredUncertainData): # noqa: D101
mean: pydantic.PositiveFloat
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean]
return super().model_post_init(__context)
-class ChiSquaredDataset(ChiSquaredUncertainData):
- dataDir: str
+class ChiSquaredDataset(ChiSquaredUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
+
############################################
-class ExponentialUncertainData(RVData):
+class ExponentialUncertainData(RVData): # noqa: D101
pass
-class ExponentialParameters(ExponentialUncertainData):
- lamda: pydantic.PositiveFloat = pydantic.Field(alias="lambda")
+class ExponentialParameters(ExponentialUncertainData): # noqa: D101
+ lamda: pydantic.PositiveFloat = pydantic.Field(alias='lambda')
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.lamda]
return super().model_post_init(__context)
-
-class ExponentialMoments(ExponentialUncertainData):
+
+class ExponentialMoments(ExponentialUncertainData): # noqa: D101
mean: pydantic.PositiveFloat
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean]
return super().model_post_init(__context)
-class ExponentialDataset(ExponentialUncertainData):
- dataDir: str
+class ExponentialDataset(ExponentialUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
+
+
############################################
-class GammaUncertainData(RVData):
+class GammaUncertainData(RVData): # noqa: D101
pass
-class GammaParameters(GammaUncertainData):
+class GammaParameters(GammaUncertainData): # noqa: D101
k: pydantic.PositiveFloat
- lamda: pydantic.PositiveFloat = pydantic.Field(alias="lambda")
+ lamda: pydantic.PositiveFloat = pydantic.Field(alias='lambda')
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.lamda, self.k]
return super().model_post_init(__context)
-
-class GammaMoments(GammaUncertainData):
+
+class GammaMoments(GammaUncertainData): # noqa: D101
mean: pydantic.PositiveFloat
- standardDev: pydantic.PositiveFloat
+ standardDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.standardDev]
return super().model_post_init(__context)
-class GammaDataset(GammaUncertainData):
- dataDir: str
+class GammaDataset(GammaUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
+
+
############################################
-class GumbelUncertainData(RVData):
+class GumbelUncertainData(RVData): # noqa: D101
pass
-class GumbelParameters(GumbelUncertainData):
+class GumbelParameters(GumbelUncertainData): # noqa: D101
alphaparam: pydantic.PositiveFloat
betaparam: float
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.alphaparam, self.betaparam]
return super().model_post_init(__context)
-
-class GumbelMoments(GumbelUncertainData):
+
+class GumbelMoments(GumbelUncertainData): # noqa: D101
mean: float
- standardDev: pydantic.PositiveFloat
+ standardDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.standardDev]
return super().model_post_init(__context)
-class GumbelDataset(GumbelUncertainData):
- dataDir: str
+class GumbelDataset(GumbelUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
+
############################################
-class LognormalUncertainData(RVData):
+class LognormalUncertainData(RVData): # noqa: D101
pass
-class LognormalParameters(LognormalUncertainData):
- lamda: float = pydantic.Field(alias="lambda")
+class LognormalParameters(LognormalUncertainData): # noqa: D101
+ lamda: float = pydantic.Field(alias='lambda')
zeta: pydantic.PositiveFloat
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.lamda, self.zeta]
return super().model_post_init(__context)
-
-class LognormalMoments(LognormalUncertainData):
+
+class LognormalMoments(LognormalUncertainData): # noqa: D101
mean: pydantic.PositiveFloat
- stdDev: pydantic.PositiveFloat
+ stdDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.stdDev]
return super().model_post_init(__context)
-class LognormalDataset(LognormalUncertainData):
- dataDir: str
+class LognormalDataset(LognormalUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
+
############################################
-class NormalUncertainData(RVData):
+class NormalUncertainData(RVData): # noqa: D101
pass
-class NormalParameters(NormalUncertainData):
+class NormalParameters(NormalUncertainData): # noqa: D101
mean: float
- stdDev: pydantic.PositiveFloat
+ stdDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.stdDev]
return super().model_post_init(__context)
-
-class NormalMoments(NormalUncertainData):
+
+class NormalMoments(NormalUncertainData): # noqa: D101
mean: float
- stdDev: pydantic.PositiveFloat
+ stdDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.stdDev]
return super().model_post_init(__context)
-class NormalDataset(NormalUncertainData):
- dataDir: str
+class NormalDataset(NormalUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
-############################################
+
+
+############################################
# class TruncatedExponentialUncertainData(RVData):
# a: float
# b: float
@@ -299,72 +309,75 @@ def model_post_init(self, __context: Any) -> None:
# inputType: typing.Literal["Dataset"]
# dataDir: str
+
############################################
-class UniformUncertainData(RVData):
+class UniformUncertainData(RVData): # noqa: D101
pass
-class UniformParameters(UniformUncertainData):
+class UniformParameters(UniformUncertainData): # noqa: D101
lowerbound: float = 0.0
upperbound: float = 1.0
+
@pydantic.validator('upperbound')
- def upper_bound_not_bigger_than_lower_bound(v, values):
+ def upper_bound_not_bigger_than_lower_bound(v, values): # noqa: N805, D102
if 'lowerbound' in values and v <= values['lowerbound']:
raise ValueError(f"The upper bound must be bigger than the \
lower bound {values['lowerbound']}. \
- Got a value of {v}.")
+ Got a value of {v}.") # noqa: EM102, TRY003
return v
-
- def model_post_init(self, __context: Any) -> None:
+
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.lowerbound, self.upperbound]
return super().model_post_init(__context)
-class UniformMoments(UniformUncertainData):
+class UniformMoments(UniformUncertainData): # noqa: D101
mean: float
- standardDev: pydantic.PositiveFloat
+ standardDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.standardDev]
return super().model_post_init(__context)
-class UniformDataset(UniformUncertainData):
- dataDir: str
+class UniformDataset(UniformUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
+
############################################
-class WeibullUncertainData(RVData):
+class WeibullUncertainData(RVData): # noqa: D101
pass
-class WeibullParameters(WeibullUncertainData):
+class WeibullParameters(WeibullUncertainData): # noqa: D101
scaleparam: pydantic.PositiveFloat
shapeparam: pydantic.PositiveFloat
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.scaleparam, self.shapeparam]
return super().model_post_init(__context)
-
-class WeibullMoments(WeibullUncertainData):
+
+class WeibullMoments(WeibullUncertainData): # noqa: D101
mean: pydantic.PositiveFloat
- standardDev: pydantic.PositiveFloat
+ standardDev: pydantic.PositiveFloat # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [self.mean, self.standardDev]
return super().model_post_init(__context)
-class WeibullDataset(WeibullUncertainData):
- dataDir: str
+class WeibullDataset(WeibullUncertainData): # noqa: D101
+ dataDir: str # noqa: N815
- def model_post_init(self, __context: Any) -> None:
+ def model_post_init(self, __context: Any) -> None: # noqa: D102, PYI063
self.ERAVal = [np.genfromtxt(self.dataDir).tolist()]
return super().model_post_init(__context)
-
-############################################
\ No newline at end of file
+
+
+############################################
diff --git a/modules/performUQ/common/uq_utilities.py b/modules/performUQ/common/uq_utilities.py
index 622b1c37b..7368094f1 100644
--- a/modules/performUQ/common/uq_utilities.py
+++ b/modules/performUQ/common/uq_utilities.py
@@ -1,84 +1,78 @@
-import glob
+import glob # noqa: CPY001, D100, INP001
import os
import shutil
-import subprocess
+import subprocess # noqa: S404
import sys
import traceback
+from dataclasses import dataclass
from multiprocessing.pool import Pool
-from typing import Any, Optional, Union
+from typing import Any, Union
import numpy as np
-import quoFEM_RV_models
+import numpy.typing as npt
+import scipy.stats
from ERAClasses.ERADist import ERADist
from ERAClasses.ERANataf import ERANataf
from numpy.typing import NDArray
-import scipy.stats
-import numpy.typing as npt
-from dataclasses import dataclass
-
-def _copytree(src, dst, symlinks=False, ignore=None):
- if not os.path.exists(dst):
- os.makedirs(dst)
+def _copytree(src, dst, symlinks=False, ignore=None): # noqa: FBT002
+ if not os.path.exists(dst): # noqa: PTH110
+ os.makedirs(dst) # noqa: PTH103
for item in os.listdir(src):
- s = os.path.join(src, item)
- d = os.path.join(dst, item)
- if os.path.isdir(s):
+ s = os.path.join(src, item) # noqa: PTH118
+ d = os.path.join(dst, item) # noqa: PTH118
+ if os.path.isdir(s): # noqa: PTH112
_copytree(s, d, symlinks, ignore)
else:
try:
if (
- not os.path.exists(d)
- or os.stat(s).st_mtime - os.stat(d).st_mtime > 1
+ not os.path.exists(d) # noqa: PTH110
+ or os.stat(s).st_mtime - os.stat(d).st_mtime > 1 # noqa: PTH116
):
shutil.copy2(s, d)
- except Exception as ex:
- msg = (
- f"Could not copy {s}. The following error occurred: \n{ex}"
- )
- return msg
- return "0"
+ except Exception as ex: # noqa: BLE001
+ msg = f'Could not copy {s}. The following error occurred: \n{ex}'
+ return msg # noqa: RET504
+ return '0'
-def _append_msg_in_out_file(msg, out_file_name: str = "ops.out"):
- if glob.glob(out_file_name):
- with open(out_file_name, "r") as text_file:
- error_FEM = text_file.read()
+def _append_msg_in_out_file(msg, out_file_name: str = 'ops.out'):
+ if glob.glob(out_file_name): # noqa: PTH207
+ with open(out_file_name) as text_file: # noqa: FURB101, PLW1514, PTH123
+ error_FEM = text_file.read() # noqa: N806
- startingCharId = error_FEM.lower().find("error")
+ startingCharId = error_FEM.lower().find('error') # noqa: N806
if startingCharId > 0:
- startingCharId = max(0, startingCharId - 20)
- endingID = max(len(error_FEM), startingCharId + 200)
+ startingCharId = max(0, startingCharId - 20) # noqa: N806
+ endingID = max(len(error_FEM), startingCharId + 200) # noqa: N806
errmsg = error_FEM[startingCharId:endingID]
- errmsg = errmsg.split(" ", 1)[1]
- errmsg = errmsg[0 : errmsg.rfind(" ")]
- msg += "\n"
- msg += "your model says...\n"
- msg += "........\n" + errmsg + "\n........ \n"
- msg += "to read more, see " + os.path.join(
- os.getcwd(), out_file_name
- )
+ errmsg = errmsg.split(' ', 1)[1]
+ errmsg = errmsg[0 : errmsg.rfind(' ')]
+ msg += '\n'
+ msg += 'your model says...\n'
+ msg += '........\n' + errmsg + '\n........ \n'
+ msg += 'to read more, see ' + os.path.join(os.getcwd(), out_file_name) # noqa: PTH109, PTH118
return msg
-class ModelEvaluationError(Exception):
+class ModelEvaluationError(Exception): # noqa: D101
def __init__(self, msg: str) -> None:
super().__init__(msg)
-class SimCenterWorkflowDriver:
+class SimCenterWorkflowDriver: # noqa: D101
def __init__(
self,
- full_path_of_tmpSimCenter_dir: str,
- list_of_dir_names_to_copy_files_from: list[str],
- list_of_rv_names: list[str],
+ full_path_of_tmpSimCenter_dir: str, # noqa: N803
+ list_of_dir_names_to_copy_files_from: list[str], # noqa: FA102
+ list_of_rv_names: list[str], # noqa: FA102
driver_filename: str,
length_of_results: int,
- workdir_prefix: str = "workdir",
- ignore_nans: bool = True,
+ workdir_prefix: str = 'workdir',
+ ignore_nans: bool = True, # noqa: FBT001, FBT002
) -> None:
self.full_path_of_tmpSimCenter_dir = full_path_of_tmpSimCenter_dir
self.list_of_dir_names_to_copy_files_from = (
@@ -96,9 +90,9 @@ def _check_size_of_sample(self, sample_values: NDArray) -> None:
num_samples = len(sample_values)
if num_samples > 1:
msg = (
- f"Do one simulation at a time. There were {num_samples} "
- " samples provided in the sample value"
- f" {sample_values}."
+ f'Do one simulation at a time. There were {num_samples} '
+ ' samples provided in the sample value'
+ f' {sample_values}.'
)
raise ModelEvaluationError(msg)
@@ -106,111 +100,109 @@ def _check_size_of_sample(self, sample_values: NDArray) -> None:
num_values_in_each_sample = len(sample_values[i])
if num_values_in_each_sample != self.num_rv:
msg = (
- f"Expected {self.num_rv} values in each sample, found "
- f" {num_values_in_each_sample} in"
- f" {sample_values}."
+ f'Expected {self.num_rv} values in each sample, found '
+ f' {num_values_in_each_sample} in'
+ f' {sample_values}.'
)
raise ModelEvaluationError(msg)
def _create_workdir(self, simulation_number: int) -> str:
- workdir = os.path.join(
+ workdir = os.path.join( # noqa: PTH118
self.full_path_of_tmpSimCenter_dir,
- f"{self.workdir_prefix}.{simulation_number + 1}",
+ f'{self.workdir_prefix}.{simulation_number + 1}',
)
- if os.path.exists(workdir):
+ if os.path.exists(workdir): # noqa: PTH110
for root, dirs, files in os.walk(workdir):
for file in files:
try:
- os.chmod(os.path.join(root, file), 0o777)
- os.unlink(os.path.join(root, file))
- except:
- msg = f"Could not remove file {file} from {workdir}."
- raise ModelEvaluationError(msg)
- for dir in dirs:
+ os.chmod(os.path.join(root, file), 0o777) # noqa: S103, PTH101, PTH118
+ os.unlink(os.path.join(root, file)) # noqa: PTH108, PTH118
+ except: # noqa: PERF203, E722
+ msg = f'Could not remove file {file} from {workdir}.'
+ raise ModelEvaluationError(msg) # noqa: B904
+ for dir in dirs: # noqa: A001
try:
- shutil.rmtree(os.path.join(root, dir))
- except:
+ shutil.rmtree(os.path.join(root, dir)) # noqa: PTH118
+ except: # noqa: PERF203, E722
msg = (
- f"Could not remove directory {dir} "
- f" from {workdir}."
+ f'Could not remove directory {dir} '
+ f' from {workdir}.'
)
- raise ModelEvaluationError(msg)
+ raise ModelEvaluationError(msg) # noqa: B904
for src_dir in self.list_of_dir_names_to_copy_files_from:
- src = os.path.join(self.full_path_of_tmpSimCenter_dir, src_dir)
+ src = os.path.join(self.full_path_of_tmpSimCenter_dir, src_dir) # noqa: PTH118
msg = _copytree(src, workdir)
- if msg != "0":
+ if msg != '0':
raise ModelEvaluationError(msg)
return workdir
- def _create_params_file(
- self, sample_values: NDArray, workdir: str
- ) -> None:
+ def _create_params_file(self, sample_values: NDArray, workdir: str) -> None:
list_of_strings_to_write = []
- list_of_strings_to_write.append(f"{self.num_rv}")
+ list_of_strings_to_write.append(f'{self.num_rv}')
for i, rv in enumerate(self.list_of_rv_names):
- list_of_strings_to_write.append(f"{rv} {sample_values[0][i]}")
+ list_of_strings_to_write.append(f'{rv} {sample_values[0][i]}')
try:
- with open(os.path.join(workdir, "params.in"), "w") as f:
- f.write("\n".join(list_of_strings_to_write))
- except Exception as ex:
- raise ModelEvaluationError(
- "Failed to create params.in file in "
- f" {workdir}. The following error occurred: \n{ex}"
+ with open(os.path.join(workdir, 'params.in'), 'w') as f: # noqa: FURB103, PLW1514, PTH118, PTH123
+ f.write('\n'.join(list_of_strings_to_write))
+ except Exception as ex: # noqa: BLE001
+ raise ModelEvaluationError( # noqa: B904, TRY003
+ 'Failed to create params.in file in ' # noqa: EM102
+ f' {workdir}. The following error occurred: \n{ex}'
)
def _execute_driver_file(self, workdir: str) -> None:
command = (
- f"{os.path.join(workdir, self.driver_filename)} "
- " 1> model_eval.log 2>&1"
+ f'{os.path.join(workdir, self.driver_filename)} ' # noqa: PTH118
+ ' 1> model_eval.log 2>&1'
)
os.chdir(workdir)
- completed_process = subprocess.run(command, shell=True)
+ completed_process = subprocess.run(command, shell=True, check=False) # noqa: S602
try:
completed_process.check_returncode()
except subprocess.CalledProcessError as ex:
- returnStringList = ["Failed to run the model."]
- returnStringList.append(
- "The command to run the model was "
- f" {ex.cmd}"
+ returnStringList = ['Failed to run the model.'] # noqa: N806
+ returnStringList.append( # noqa: FURB113
+ 'The command to run the model was '
+ f' {ex.cmd}'
)
- returnStringList.append(f"The return code was {ex.returncode}")
- returnStringList.append(f"The following error occurred: \n{ex}")
- raise ModelEvaluationError(f"\n\n".join(returnStringList))
+ returnStringList.append(f'The return code was {ex.returncode}')
+ returnStringList.append(f'The following error occurred: \n{ex}')
+ raise ModelEvaluationError('\n\n'.join(returnStringList)) # noqa: B904
def _read_outputs_from_results_file(self, workdir: str) -> NDArray:
- if glob.glob("results.out"):
- outputs = np.loadtxt("results.out", dtype=float).flatten()
+ if glob.glob('results.out'): # noqa: PTH207
+ outputs = np.loadtxt('results.out', dtype=float).flatten()
else:
msg = f"Error running FEM: 'results.out' missing at {workdir}\n"
- msg = _append_msg_in_out_file(msg, out_file_name="ops.out")
+ msg = _append_msg_in_out_file(msg, out_file_name='ops.out')
raise ModelEvaluationError(msg)
if outputs.shape[0] == 0:
msg = "Error running FEM: 'results.out' is empty\n"
- msg = _append_msg_in_out_file(msg, out_file_name="ops.out")
+ msg = _append_msg_in_out_file(msg, out_file_name='ops.out')
raise ModelEvaluationError(msg)
if outputs.shape[0] != self.length_of_results:
msg = (
"Error running FEM: 'results.out' contains "
- f" {outputs.shape[0]} values, expected to get "
- f" {self.length_of_results} values\n"
+ f' {outputs.shape[0]} values, expected to get '
+ f' {self.length_of_results} values\n'
)
- msg = _append_msg_in_out_file(msg, out_file_name="ops.out")
+ msg = _append_msg_in_out_file(msg, out_file_name='ops.out')
raise ModelEvaluationError(msg)
if not self.ignore_nans:
if np.isnan(np.sum(outputs)):
- msg = f"Error running FEM: Response value in {workdir} is NaN"
+ msg = f'Error running FEM: Response value in {workdir} is NaN'
raise ModelEvaluationError(msg)
return outputs
- def evaluate_model_once(
+ def evaluate_model_once( # noqa: D102
self, simulation_number: int, sample_values: NDArray
- ) -> Union[str, NDArray]:
- outputs = ""
+ ) -> Union[str, NDArray]: # noqa: FA100
+ outputs = ''
try:
sample_values = np.atleast_2d(sample_values)
self._check_size_of_sample(sample_values)
@@ -218,13 +210,13 @@ def evaluate_model_once(
self._create_params_file(sample_values, workdir)
self._execute_driver_file(workdir)
outputs = self._read_outputs_from_results_file(workdir)
- except Exception:
+ except Exception: # noqa: BLE001
exc_type, exc_value, exc_traceback = sys.exc_info()
outputs = (
- f"\nSimulation number: {simulation_number}\n"
- + f"Samples values: {sample_values}\n"
+ f'\nSimulation number: {simulation_number}\n' # noqa: ISC003
+ + f'Samples values: {sample_values}\n'
)
- outputs += "".join(
+ outputs += ''.join(
traceback.format_exception(exc_type, exc_value, exc_traceback)
)
finally:
@@ -232,64 +224,64 @@ def evaluate_model_once(
return outputs
-class ParallelRunnerMultiprocessing:
- def __init__(self, run_type: str = "runningLocal") -> None:
+class ParallelRunnerMultiprocessing: # noqa: D101
+ def __init__(self, run_type: str = 'runningLocal') -> None:
self.run_type = run_type
self.num_processors = self.get_num_processors()
self.pool = self.get_pool()
- def get_num_processors(self) -> int:
+ def get_num_processors(self) -> int: # noqa: D102, PLR6301
num_processors = os.cpu_count()
if num_processors is None:
num_processors = 1
if num_processors < 1:
- raise ValueError(
- "Number of processes must be at least 1. "
- f" Got {num_processors}"
+ raise ValueError( # noqa: TRY003
+ 'Number of processes must be at least 1. ' # noqa: EM102
+ f' Got {num_processors}'
)
return num_processors
- def get_pool(self) -> Pool:
+ def get_pool(self) -> Pool: # noqa: D102
self.pool = Pool(processes=self.num_processors)
return self.pool
- def close_pool(self) -> None:
+ def close_pool(self) -> None: # noqa: D102
self.pool.close()
-def make_ERADist_object(name, opt, val) -> ERADist:
+def make_ERADist_object(name, opt, val) -> ERADist: # noqa: N802, D103
return ERADist(name=name, opt=opt, val=val)
-def create_one_marginal_distribution(rv_data) -> ERADist:
+def create_one_marginal_distribution(rv_data) -> ERADist: # noqa: D103
string = (
- f'quoFEM_RV_models.{rv_data["distribution"]}'
+ f'quoFEM_RV_models.{rv_data["distribution"]}' # noqa: ISC003
+ f'{rv_data["inputType"]}.model_validate({rv_data})'
)
- rv = eval(string)
+ rv = eval(string) # noqa: S307
return make_ERADist_object(name=rv.ERAName, opt=rv.ERAOpt, val=rv.ERAVal)
-def make_list_of_marginal_distributions(
+def make_list_of_marginal_distributions( # noqa: D103
list_of_random_variables_data,
-) -> list[ERADist]:
- marginal_ERAdistribution_objects_list = []
+) -> list[ERADist]: # noqa: FA102
+ marginal_ERAdistribution_objects_list = [] # noqa: N806
for rv_data in list_of_random_variables_data:
- marginal_ERAdistribution_objects_list.append(
+ marginal_ERAdistribution_objects_list.append( # noqa: PERF401
create_one_marginal_distribution(rv_data)
)
return marginal_ERAdistribution_objects_list
-def make_correlation_matrix(correlation_matrix_data, num_rvs) -> NDArray:
+def make_correlation_matrix(correlation_matrix_data, num_rvs) -> NDArray: # noqa: D103
return np.atleast_2d(correlation_matrix_data).reshape((num_rvs, num_rvs))
-def make_ERANataf_object(list_of_ERADist, correlation_matrix) -> ERANataf:
+def make_ERANataf_object(list_of_ERADist, correlation_matrix) -> ERANataf: # noqa: N802, N803, D103
return ERANataf(M=list_of_ERADist, Correlation=correlation_matrix)
-class ERANatafJointDistribution:
+class ERANatafJointDistribution: # noqa: D101
def __init__(
self,
list_of_random_variables_data: list,
@@ -303,37 +295,43 @@ def __init__(
self.correlation_matrix_data, self.num_rvs
)
self.marginal_ERAdistribution_objects_list = (
- make_list_of_marginal_distributions(
- self.list_of_random_variables_data
- )
+ make_list_of_marginal_distributions(self.list_of_random_variables_data)
)
self.ERANataf_object = make_ERANataf_object(
self.marginal_ERAdistribution_objects_list, self.correlation_matrix
)
- def u_to_x(
- self, u: NDArray, jacobian: bool = False
- ) -> Union[tuple[NDArray[np.float64], Any], NDArray[np.float64]]:
+ def u_to_x( # noqa: D102
+ self,
+ u: NDArray,
+ jacobian: bool = False, # noqa: FBT001, FBT002
+ ) -> Union[tuple[NDArray[np.float64], Any], NDArray[np.float64]]: # noqa: FA100, FA102
return self.ERANataf_object.U2X(U=u, Jacobian=jacobian)
- def x_to_u(self, x: NDArray, jacobian: bool = False) -> Union[
- tuple[NDArray[np.floating[Any]], NDArray[np.floating[Any]]],
+ def x_to_u( # noqa: D102
+ self,
+ x: NDArray,
+ jacobian: bool = False, # noqa: FBT001, FBT002
+ ) -> Union[ # noqa: FA100
+ tuple[NDArray[np.floating[Any]], NDArray[np.floating[Any]]], # noqa: FA102
NDArray[np.floating[Any]],
]:
return self.ERANataf_object.X2U(X=x, Jacobian=jacobian)
- def pdf(self, x: NDArray) -> Union[Any, NDArray[np.float64]]:
+ def pdf(self, x: NDArray) -> Union[Any, NDArray[np.float64]]: # noqa: FA100, D102
return self.ERANataf_object.pdf(X=x)
- def logpdf(self, x: NDArray) -> NDArray[np.float64]:
+ def logpdf(self, x: NDArray) -> NDArray[np.float64]: # noqa: D102
return np.log(self.pdf(x))
- def cdf(self, x: NDArray) -> float:
+ def cdf(self, x: NDArray) -> float: # noqa: D102
return self.ERANataf_object.cdf(X=x)
- def random(
- self, list_of_rngs: list[np.random.Generator] = [], n: int = 1
- ) -> Union[tuple[NDArray[np.float64], Any], NDArray[np.float64]]:
+ def random( # noqa: D102
+ self,
+ list_of_rngs: list[np.random.Generator] = [], # noqa: B006, FA102
+ n: int = 1,
+ ) -> Union[tuple[NDArray[np.float64], Any], NDArray[np.float64]]: # noqa: FA100, FA102
if list_of_rngs == []:
list_of_rngs = [
np.random.default_rng(seed=i)
@@ -345,38 +343,36 @@ def random(
return self.u_to_x(u)
-def get_list_of_pseudo_random_number_generators(entropy, num_spawn):
+def get_list_of_pseudo_random_number_generators(entropy, num_spawn): # noqa: D103
seed_sequence = np.random.SeedSequence(entropy=entropy).spawn(num_spawn)
- prngs = [
- np.random.Generator(np.random.PCG64DXSM(s)) for s in seed_sequence
- ]
- return prngs
+ prngs = [np.random.Generator(np.random.PCG64DXSM(s)) for s in seed_sequence]
+ return prngs # noqa: RET504
-def get_parallel_pool_instance(run_type: str):
- if run_type == "runningRemote":
- from parallel_runner_mpi4py import ParallelRunnerMPI4PY
+def get_parallel_pool_instance(run_type: str): # noqa: D103
+ if run_type == 'runningRemote':
+ from parallel_runner_mpi4py import ParallelRunnerMPI4PY # noqa: PLC0415
return ParallelRunnerMPI4PY(run_type)
- else:
+ else: # noqa: RET505
return ParallelRunnerMultiprocessing(run_type)
-def make_list_of_rv_names(all_rv_data):
+def make_list_of_rv_names(all_rv_data): # noqa: D103
list_of_rv_names = []
for rv_data in all_rv_data:
- list_of_rv_names.append(rv_data["name"])
+ list_of_rv_names.append(rv_data['name']) # noqa: PERF401
return list_of_rv_names
-def get_length_of_results(edp_data):
+def get_length_of_results(edp_data): # noqa: D103
length_of_results = 0
for edp in edp_data:
- length_of_results += int(float(edp["length"]))
+ length_of_results += int(float(edp['length']))
return length_of_results
-def create_default_model(
+def create_default_model( # noqa: D103
run_directory,
list_of_dir_names_to_copy_files_from,
list_of_rv_names,
@@ -392,40 +388,41 @@ def create_default_model(
length_of_results=length_of_results,
workdir_prefix=workdir_prefix,
)
- return model
+ return model # noqa: RET504
-def get_default_model_evaluation_function(model):
+def get_default_model_evaluation_function(model): # noqa: D103
return model.evaluate_model_once
-def get_ERANataf_joint_distribution_instance(
- list_of_rv_data, correlation_matrix_data
+def get_ERANataf_joint_distribution_instance( # noqa: N802, D103
+ list_of_rv_data,
+ correlation_matrix_data,
):
joint_distribution = ERANatafJointDistribution(
list_of_rv_data, correlation_matrix_data
)
- return joint_distribution
+ return joint_distribution # noqa: RET504
-def get_std_normal_to_rv_transformation_function(joint_distribution):
+def get_std_normal_to_rv_transformation_function(joint_distribution): # noqa: D103
transformation_function = joint_distribution.u_to_x
- return transformation_function
+ return transformation_function # noqa: RET504
-def get_default_model(
+def get_default_model( # noqa: D103
list_of_rv_data,
edp_data,
list_of_dir_names_to_copy_files_from,
run_directory,
- driver_filename="driver",
- workdir_prefix="workdir",
+ driver_filename='driver',
+ workdir_prefix='workdir',
):
list_of_rv_names = make_list_of_rv_names(list_of_rv_data)
length_of_results = get_length_of_results(edp_data)
- list_of_dir_names_to_copy_files_from = list_of_dir_names_to_copy_files_from
- driver_filename = driver_filename
- workdir_prefix = workdir_prefix
+ list_of_dir_names_to_copy_files_from = list_of_dir_names_to_copy_files_from # noqa: PLW0127
+ driver_filename = driver_filename # noqa: PLW0127
+ workdir_prefix = workdir_prefix # noqa: PLW0127
model = create_default_model(
run_directory,
@@ -435,35 +432,33 @@ def get_default_model(
length_of_results,
workdir_prefix,
)
- return model
+ return model # noqa: RET504
-def model_evaluation_function(
+def model_evaluation_function( # noqa: D103
func,
list_of_iterables,
):
return func(*list_of_iterables)
-def get_random_number_generators(entropy, num_prngs):
+def get_random_number_generators(entropy, num_prngs): # noqa: D103
return get_list_of_pseudo_random_number_generators(entropy, num_prngs)
-def get_standard_normal_random_variates(list_of_prngs, size=1):
+def get_standard_normal_random_variates(list_of_prngs, size=1): # noqa: D103
return [prng.standard_normal(size=size) for prng in list_of_prngs]
-def get_inverse_gamma_random_variate(prng, shape, scale, size=1):
- return scipy.stats.invgamma.rvs(
- shape, scale=scale, size=size, random_state=prng
- )
+def get_inverse_gamma_random_variate(prng, shape, scale, size=1): # noqa: D103
+ return scipy.stats.invgamma.rvs(shape, scale=scale, size=size, random_state=prng)
-def multivariate_normal_logpdf(x, mean, cov):
+def multivariate_normal_logpdf(x, mean, cov): # noqa: D103
eigenvalues, eigenvectors = np.linalg.eigh(cov)
logdet = np.sum(np.log(eigenvalues))
valsinv = 1.0 / eigenvalues
- U = eigenvectors * np.sqrt(valsinv)
+ U = eigenvectors * np.sqrt(valsinv) # noqa: N806
dim = len(eigenvalues)
dev = x - mean
maha = np.square(dev.T @ U).sum()
@@ -472,7 +467,7 @@ def multivariate_normal_logpdf(x, mean, cov):
@dataclass
-class NormalInverseWishartParameters:
+class NormalInverseWishartParameters: # noqa: D101
mu_vector: npt.NDArray
lambda_scalar: float
nu_scalar: float
@@ -480,7 +475,7 @@ class NormalInverseWishartParameters:
@dataclass
-class InverseGammaParameters:
+class InverseGammaParameters: # noqa: D101
alpha_scalar: float
beta_scalar: float
@@ -489,7 +484,8 @@ def _to_shape_and_scale(self):
def _get_tabular_results_file_name_for_dataset(
- tabular_results_file_base_name, dataset_number
+ tabular_results_file_base_name,
+ dataset_number,
):
tabular_results_parent = tabular_results_file_base_name.parent
tabular_results_stem = tabular_results_file_base_name.stem
@@ -497,11 +493,11 @@ def _get_tabular_results_file_name_for_dataset(
tabular_results_file = (
tabular_results_parent
- / f"{tabular_results_stem}_dataset_{dataset_number+1}{tabular_results_extension}"
+ / f'{tabular_results_stem}_dataset_{dataset_number + 1}{tabular_results_extension}'
)
- return tabular_results_file
+ return tabular_results_file # noqa: RET504
def _write_to_tabular_results_file(tabular_results_file, string_to_write):
- with tabular_results_file.open("a") as f:
+ with tabular_results_file.open('a') as f:
f.write(string_to_write)
diff --git a/modules/performUQ/dakota/DakotaFEM.py b/modules/performUQ/dakota/DakotaFEM.py
index 6ff3048ec..11de4469d 100644
--- a/modules/performUQ/dakota/DakotaFEM.py
+++ b/modules/performUQ/dakota/DakotaFEM.py
@@ -1,44 +1,48 @@
-import os, sys, json
+import json # noqa: CPY001, D100, INP001
+import os
+import sys
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
-from random import randrange
+import argparse
import platform
import shutil
-import subprocess
import stat
-import argparse
+import subprocess # noqa: S404
+from random import randrange
+
from preprocessJSON import preProcessDakota
-def str2bool(v):
+
+def str2bool(v): # noqa: D103
# courtesy of Maxim @ stackoverflow
if isinstance(v, bool):
- return v
- if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'):
+ return v
+ if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'): # noqa: PLR6201
return True
- elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'):
+ elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'): # noqa: PLR6201, RET505
return False
else:
- raise argparse.ArgumentTypeError('Boolean value expected.')
+ raise argparse.ArgumentTypeError('Boolean value expected.') # noqa: EM101, TRY003
-def main(args):
- #First we need to set the path and environment
- home = os.path.expanduser('~')
+def main(args): # noqa: C901, D103
+ # First we need to set the path and environment
+ home = os.path.expanduser('~') # noqa: PTH111
env = os.environ
- if os.getenv("PEGASUS_WF_UUID") is not None:
- print("Pegasus job detected - Pegasus will set up the env")
+ if os.getenv('PEGASUS_WF_UUID') is not None:
+ print('Pegasus job detected - Pegasus will set up the env') # noqa: T201
elif platform.system() == 'Darwin':
- env["PATH"] = env["PATH"] + ':{}/bin'.format(home)
- env["PATH"] = env["PATH"] + ':{}/dakota/bin'.format(home)
+ env['PATH'] = env['PATH'] + f':{home}/bin' # noqa: PLR6104
+ env['PATH'] = env['PATH'] + f':{home}/dakota/bin' # noqa: PLR6104
elif platform.system() == 'Linux':
- env["PATH"] = env["PATH"] + ':{}/bin'.format(home)
- env["PATH"] = env["PATH"] + ':{}/dakota/dakota-6.5/bin'.format(home)
+ env['PATH'] = env['PATH'] + f':{home}/bin' # noqa: PLR6104
+ env['PATH'] = env['PATH'] + f':{home}/dakota/dakota-6.5/bin' # noqa: PLR6104
elif platform.system() == 'Windows':
pass
else:
- print("PLATFORM {} NOT RECOGNIZED".format(platform.system))
+ print(f'PLATFORM {platform.system} NOT RECOGNIZED') # noqa: T201
parser = argparse.ArgumentParser()
@@ -50,9 +54,9 @@ def main(args):
parser.add_argument('--driverFile')
- parser.add_argument('--method', default="LHS")
+ parser.add_argument('--method', default='LHS')
parser.add_argument('--samples', type=int, default=None)
- parser.add_argument('--seed', type=int, default=randrange(1,1000))
+ parser.add_argument('--seed', type=int, default=randrange(1, 1000))
parser.add_argument('--samples2', type=int, default=None)
parser.add_argument('--seed2', type=int, default=None)
parser.add_argument('--ismethod', default=None)
@@ -65,87 +69,92 @@ def main(args):
parser.add_argument('--detailedLog', default=False, type=str2bool)
parser.add_argument('--runType')
- args,unknowns = parser.parse_known_args()
-
- #Reading input arguments
- aimName = args.filenameAIM
- samName = args.filenameSAM
- evtName = args.filenameEVENT
- edpName = args.filenameEDP
- simName = args.filenameSIM
- driverFile = args.driverFile
-
- uqData = dict(
- method = args.method,
-
- samples = args.samples,
- samples2 = args.samples2,
- seed = args.seed,
- seed2 = args.seed2,
- ismethod = args.ismethod,
- dataMethod = args.dataMethod,
- dataMethod2 = args.dataMethod2,
-
- concurrency = args.concurrency,
- keepSamples = args.keepSamples
+ args, unknowns = parser.parse_known_args() # noqa: F841
+
+ # Reading input arguments
+ aimName = args.filenameAIM # noqa: N806
+ samName = args.filenameSAM # noqa: N806
+ evtName = args.filenameEVENT # noqa: N806
+ edpName = args.filenameEDP # noqa: N806
+ simName = args.filenameSIM # noqa: N806
+ driverFile = args.driverFile # noqa: N806
+
+ uqData = dict( # noqa: C408, N806
+ method=args.method,
+ samples=args.samples,
+ samples2=args.samples2,
+ seed=args.seed,
+ seed2=args.seed2,
+ ismethod=args.ismethod,
+ dataMethod=args.dataMethod,
+ dataMethod2=args.dataMethod2,
+ concurrency=args.concurrency,
+ keepSamples=args.keepSamples,
)
- if uqData['samples'] is None: # this happens when the uq details are stored at the wrong place in the AIM file
- with open(aimName, 'r', encoding='utf-8') as data_file:
+ if (
+ uqData['samples'] is None
+ ): # this happens when the uq details are stored at the wrong place in the AIM file
+ with open(aimName, encoding='utf-8') as data_file: # noqa: PTH123
uq_info = json.load(data_file)['UQ']
- if 'samplingMethodData' in uq_info.keys():
+ if 'samplingMethodData' in uq_info.keys(): # noqa: SIM118
uq_info = uq_info['samplingMethodData']
- for attribute in uqData.keys():
- if attribute not in ['concurrency', 'keepSamples']:
+ for attribute in uqData:
+ if attribute not in ['concurrency', 'keepSamples']: # noqa: PLR6201
uqData[attribute] = uq_info.get(attribute, None)
- runDakota = args.runType
+ runDakota = args.runType # noqa: N806
- #Run Preprocess for Dakota
- scriptDir = os.path.dirname(os.path.realpath(__file__))
- numRVs = preProcessDakota(aimName, evtName, samName, edpName, simName, driverFile, runDakota, uqData)
-
- #Setting Workflow Driver Name
- workflowDriverName = 'workflow_driver'
- if ((platform.system() == 'Windows') and (runDakota == 'run')):
- workflowDriverName = 'workflow_driver.bat'
+ # Run Preprocess for Dakota
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806, F841
+ numRVs = preProcessDakota( # noqa: N806, F841
+ aimName, evtName, samName, edpName, simName, driverFile, runDakota, uqData
+ )
- #Create Template Directory and copy files
- st = os.stat(workflowDriverName)
- os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC)
- #shutil.copy(workflowDriverName, "templatedir")
- #shutil.copy("{}/dpreproSimCenter".format(scriptDir), os.getcwd())
- shutil.move(aimName, "aim.j")
- shutil.move(evtName, "evt.j")
- if os.path.isfile(samName): shutil.move(samName, "sam.j")
- shutil.move(edpName, "edp.j")
- #if os.path.isfile(simName): shutil.move(simName, "sim.j")
+ # Setting Workflow Driver Name
+ workflowDriverName = 'workflow_driver' # noqa: N806
+ if (platform.system() == 'Windows') and (runDakota == 'run'):
+ workflowDriverName = 'workflow_driver.bat' # noqa: N806
+
+ # Create Template Directory and copy files
+ st = os.stat(workflowDriverName) # noqa: PTH116
+ os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+ # shutil.copy(workflowDriverName, "templatedir")
+ # shutil.copy("{}/dpreproSimCenter".format(scriptDir), os.getcwd())
+ shutil.move(aimName, 'aim.j')
+ shutil.move(evtName, 'evt.j')
+ if os.path.isfile(samName): # noqa: PTH113
+ shutil.move(samName, 'sam.j')
+ shutil.move(edpName, 'edp.j')
+ # if os.path.isfile(simName): shutil.move(simName, "sim.j")
# copy the dakota input file to the main working dir for the structure
- shutil.move("dakota.in", "../")
+ shutil.move('dakota.in', '../')
# change dir to the main working dir for the structure
- os.chdir("../")
-
- if runDakota == "run":
+ os.chdir('../')
- dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
- print('running Dakota: ', dakotaCommand)
+ if runDakota == 'run':
+ dakotaCommand = ( # noqa: N806
+ 'dakota -input dakota.in -output dakota.out -error dakota.err'
+ )
+ print('running Dakota: ', dakotaCommand) # noqa: T201
try:
- result = subprocess.check_output(dakotaCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ dakotaCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
except subprocess.CalledProcessError as e:
result = e.output
returncode = e.returncode
- if args.detailedLog: # print detailed output if detailed log is requested
-
+ if args.detailedLog: # print detailed output if detailed log is requested
if platform.system() == 'Windows':
result = result.decode(sys.stdout.encoding)
- print(result, returncode)
+ print(result, returncode) # noqa: T201
-if __name__ == '__main__':
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/performUQ/dakota/DakotaFEM1.py b/modules/performUQ/dakota/DakotaFEM1.py
index 9f447b71f..2aea7cf30 100644
--- a/modules/performUQ/dakota/DakotaFEM1.py
+++ b/modules/performUQ/dakota/DakotaFEM1.py
@@ -1,40 +1,41 @@
-# import functions for Python 2.X support
-from __future__ import division, print_function
-import sys, os
-if sys.version.startswith('2'):
- range=xrange
- string_types = basestring
+# import functions for Python 2.X support # noqa: CPY001, D100, INP001
+import os
+import sys
+
+if sys.version.startswith('2'):
+ range = xrange # noqa: A001, F821
+ string_types = basestring # noqa: F821
else:
string_types = str
-sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) # noqa: PTH120
-import json
-import numpy as np
+import argparse
import platform
import shutil
-import subprocess
import stat
-import argparse
+import subprocess # noqa: S404
+
+import numpy as np
from preprocessJSON import preProcessDakota
-def main(args):
- #First we need to set the path and environment
- home = os.path.expanduser('~')
+def main(args): # noqa: D103
+ # First we need to set the path and environment
+ home = os.path.expanduser('~') # noqa: PTH111
env = os.environ
- if os.getenv("PEGASUS_WF_UUID") is not None:
- print("Pegasus job detected - Pegasus will set up the env")
+ if os.getenv('PEGASUS_WF_UUID') is not None:
+ print('Pegasus job detected - Pegasus will set up the env') # noqa: T201
elif platform.system() == 'Darwin':
- env["PATH"] = env["PATH"] + ':{}/bin'.format(home)
- env["PATH"] = env["PATH"] + ':{}/dakota/bin'.format(home)
+ env['PATH'] = env['PATH'] + f':{home}/bin' # noqa: PLR6104
+ env['PATH'] = env['PATH'] + f':{home}/dakota/bin' # noqa: PLR6104
elif platform.system() == 'Linux':
- env["PATH"] = env["PATH"] + ':{}/bin'.format(home)
- env["PATH"] = env["PATH"] + ':{}/dakota/dakota-6.5/bin'.format(home)
+ env['PATH'] = env['PATH'] + f':{home}/bin' # noqa: PLR6104
+ env['PATH'] = env['PATH'] + f':{home}/dakota/dakota-6.5/bin' # noqa: PLR6104
elif platform.system() == 'Windows':
pass
else:
- print("PLATFORM {} NOT RECOGNIZED".format(platform.system))
+ print(f'PLATFORM {platform.system} NOT RECOGNIZED') # noqa: T201
parser = argparse.ArgumentParser()
@@ -43,12 +44,12 @@ def main(args):
parser.add_argument('--filenameEVENT')
parser.add_argument('--filenameEDP')
parser.add_argument('--filenameSIM')
-
+
parser.add_argument('--driverFile')
-
- parser.add_argument('--method', default="LHS")
+
+ parser.add_argument('--method', default='LHS')
parser.add_argument('--samples', type=int, default=None)
- parser.add_argument('--seed', type=int, default=np.random.randint(1,1000))
+ parser.add_argument('--seed', type=int, default=np.random.randint(1, 1000))
parser.add_argument('--ismethod', default=None)
parser.add_argument('--dataMethod', default=None)
@@ -58,90 +59,101 @@ def main(args):
parser.add_argument('--samplingSamples', type=int, default=None)
parser.add_argument('--samplingSeed', type=int, default=None)
parser.add_argument('--samplingMethod', default=None)
-
+
parser.add_argument('--type')
parser.add_argument('--concurrency', type=int, default=None)
- parser.add_argument('--keepSamples', default="True")
+ parser.add_argument('--keepSamples', default='True')
parser.add_argument('--runType')
-
- args,unknowns = parser.parse_known_args()
-
- #Reading input arguments
- aimName = args.filenameBIM
- samName = args.filenameSAM
- evtName = args.filenameEVENT
- edpName = args.filenameEDP
- simName = args.filenameSIM
- driverFile = args.driverFile
-
- uqData = dict(
- method = args.method,
-
- samples = args.samples,
- seed = args.seed,
- ismethod = args.ismethod,
- dataMethod = args.dataMethod,
-
- samplingSamples = args.samplingSamples,
- samplingSeed = args.samplingSeed,
- samplingMethod = args.samplingMethod,
- trainingSamples = args.trainingSamples,
- trainingSeed = args.trainingSeed,
- trainingMethod = args.trainingMethod,
-
- concurrency = args.concurrency,
- keepSamples = args.keepSamples not in ["False", 'False', "false", 'false', False]
+
+ args, unknowns = parser.parse_known_args() # noqa: F841
+
+ # Reading input arguments
+ aimName = args.filenameBIM # noqa: N806
+ samName = args.filenameSAM # noqa: N806
+ evtName = args.filenameEVENT # noqa: N806
+ edpName = args.filenameEDP # noqa: N806
+ simName = args.filenameSIM # noqa: N806
+ driverFile = args.driverFile # noqa: N806
+
+ uqData = dict( # noqa: C408, N806
+ method=args.method,
+ samples=args.samples,
+ seed=args.seed,
+ ismethod=args.ismethod,
+ dataMethod=args.dataMethod,
+ samplingSamples=args.samplingSamples,
+ samplingSeed=args.samplingSeed,
+ samplingMethod=args.samplingMethod,
+ trainingSamples=args.trainingSamples,
+ trainingSeed=args.trainingSeed,
+ trainingMethod=args.trainingMethod,
+ concurrency=args.concurrency,
+ keepSamples=args.keepSamples
+ not in ['False', 'False', 'false', 'false', False], # noqa: PLR6201
)
- runDakota = args.runType
+ runDakota = args.runType # noqa: N806
- myScriptDir = os.path.dirname(os.path.realpath(__file__))
+ myScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
# desktop applications
- if uqData['samples'] is None: # this happens with new applications, workflow to change
-
- print("RUNNING PREPROCESSOR\n")
- osType = platform.system()
- preprocessorCommand = '"{}/preprocessDakota" {} {} {} {} {} {} {} {}'.format(myScriptDir, aimName, samName, evtName, edpName, simName, driverFile, runDakota, osType)
- subprocess.Popen(preprocessorCommand, shell=True).wait()
- print("DONE RUNNING PREPROCESSOR\n")
+ if (
+ uqData['samples'] is None
+ ): # this happens with new applications, workflow to change
+ print('RUNNING PREPROCESSOR\n') # noqa: T201
+ osType = platform.system() # noqa: N806
+ preprocessorCommand = f'"{myScriptDir}/preprocessDakota" {aimName} {samName} {evtName} {edpName} {simName} {driverFile} {runDakota} {osType}' # noqa: N806
+ subprocess.Popen(preprocessorCommand, shell=True).wait() # noqa: S602
+ print('DONE RUNNING PREPROCESSOR\n') # noqa: T201
else:
-
- scriptDir = os.path.dirname(os.path.realpath(__file__))
- numRVs = preProcessDakota(aimName, evtName, samName, edpName, simName, driverFile, runDakota, uqData)
-
- shutil.move(aimName, "aim.j")
- shutil.move(evtName, "evt.j")
- if os.path.isfile(samName): shutil.move(samName, "sam.j")
- shutil.move(edpName, "edp.j")
-
- #Setting Workflow Driver Name
- workflowDriverName = 'workflow_driver'
- if ((platform.system() == 'Windows') and (runDakota == 'runningLocal')):
- workflowDriverName = 'workflow_driver.bat'
-
- #Change permision of workflow driver
- st = os.stat(workflowDriverName)
- os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC)
+ scriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806, F841
+ numRVs = preProcessDakota( # noqa: N806, F841
+ aimName,
+ evtName,
+ samName,
+ edpName,
+ simName,
+ driverFile,
+ runDakota,
+ uqData,
+ )
+
+ shutil.move(aimName, 'aim.j')
+ shutil.move(evtName, 'evt.j')
+ if os.path.isfile(samName): # noqa: PTH113
+ shutil.move(samName, 'sam.j')
+ shutil.move(edpName, 'edp.j')
+
+ # Setting Workflow Driver Name
+ workflowDriverName = 'workflow_driver' # noqa: N806
+ if (platform.system() == 'Windows') and (runDakota == 'runningLocal'):
+ workflowDriverName = 'workflow_driver.bat' # noqa: N806
+
+ # Change permission of workflow driver
+ st = os.stat(workflowDriverName) # noqa: PTH116
+ os.chmod(workflowDriverName, st.st_mode | stat.S_IEXEC) # noqa: PTH101
# copy the dakota input file to the main working dir for the structure
- shutil.move("dakota.in", "../")
+ shutil.move('dakota.in', '../')
# change dir to the main working dir for the structure
- os.chdir("../")
-
- if runDakota == "runningLocal":
+ os.chdir('../')
- dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
- print('running Dakota: ', dakotaCommand)
+ if runDakota == 'runningLocal':
+ dakotaCommand = ( # noqa: N806
+ 'dakota -input dakota.in -output dakota.out -error dakota.err'
+ )
+ print('running Dakota: ', dakotaCommand) # noqa: T201
try:
- result = subprocess.check_output(dakotaCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ dakotaCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
except subprocess.CalledProcessError as e:
- result = e.output
- returncode = e.returncode
+ result = e.output # noqa: F841
+ returncode = e.returncode # noqa: F841
-if __name__ == '__main__':
+if __name__ == '__main__':
main(sys.argv[1:])
diff --git a/modules/performUQ/dakota/DakotaUQ.py b/modules/performUQ/dakota/DakotaUQ.py
index 96b358794..92362948d 100644
--- a/modules/performUQ/dakota/DakotaUQ.py
+++ b/modules/performUQ/dakota/DakotaUQ.py
@@ -1,155 +1,155 @@
-# written: UQ team @ SimCenter
+# written: UQ team @ SimCenter # noqa: CPY001, D100, INP001
# import functions for Python 2.X support
# from __future__ import division, print_function
# import sys
-# if sys.version.startswith('2'):
+# if sys.version.startswith('2'):
# range=xrange
# string_types = basestring
# else:
# string_types = str
-import shutil
+import argparse
import json
import os
+import platform
+import shutil
import stat
+import subprocess # noqa: S404
import sys
-import platform
-from subprocess import Popen, PIPE
-import subprocess
-import glob
-import argparse
-def main(args):
+def main(args): # noqa: C901, D103
parser = argparse.ArgumentParser()
parser.add_argument('--workflowInput')
- parser.add_argument('--workflowOutput')
+ parser.add_argument('--workflowOutput')
parser.add_argument('--driverFile')
parser.add_argument('--runType')
- args,unknowns = parser.parse_known_args()
+ args, unknowns = parser.parse_known_args() # noqa: F841
- inputFile = args.workflowInput
- runType = args.runType
+ inputFile = args.workflowInput # noqa: N806
+ runType = args.runType # noqa: N806
workflow_driver = args.driverFile
- outputFile = args.workflowOutput
+ outputFile = args.workflowOutput # noqa: N806, F841
#
# open input file and check for any rvFiles
# - need to know in case need to modify driver file
#
-
- with open(inputFile, 'r', encoding='utf-8') as f:
+
+ with open(inputFile, encoding='utf-8') as f: # noqa: PTH123
data = json.load(f)
-
+
workflow_driver1 = 'blank'
# run on local computer
- osType = platform.system()
- if runType in ['runningLocal',]:
-
- if (sys.platform == 'darwin' or sys.platform == "linux" or sys.platform == "linux2"):
- Dakota = 'dakota'
+ osType = platform.system() # noqa: N806
+ if runType == 'runningLocal':
+ if (
+ sys.platform == 'darwin'
+ or sys.platform == 'linux'
+ or sys.platform == 'linux2'
+ ):
+ Dakota = 'dakota' # noqa: N806
workflow_driver1 = 'workflow_driver1'
- osType = 'Linux'
+ osType = 'Linux' # noqa: N806
else:
- Dakota = 'dakota'
- workflow_driver = workflow_driver + ".bat"
+ Dakota = 'dakota' # noqa: N806
+ workflow_driver = workflow_driver + '.bat' # noqa: PLR6104
workflow_driver1 = 'workflow_driver1.bat'
- osType = 'Windows'
+ osType = 'Windows' # noqa: N806
- elif runType in ['runningRemote',]:
- Dakota = 'dakota'
+ elif runType == 'runningRemote':
+ Dakota = 'dakota' # noqa: N806
workflow_driver1 = 'workflow_driver1'
- osType = 'Linux'
+ osType = 'Linux' # noqa: N806
+
+ cwd = os.getcwd() # noqa: PTH109
+ print('CWD: ' + cwd) # noqa: T201
- cwd = os.getcwd()
- print('CWD: ' + cwd)
+ thisScriptDir = os.path.dirname(os.path.realpath(__file__)) # noqa: PTH120, N806
- thisScriptDir = os.path.dirname(os.path.realpath(__file__))
-
- preprocessorCommand = '"{}/preprocessDakota" "{}" "{}" "{}" "{}" "{}" '.format(thisScriptDir,
- inputFile,
- workflow_driver,
- workflow_driver1,
- runType,
- osType)
+ preprocessorCommand = f'"{thisScriptDir}/preprocessDakota" "{inputFile}" "{workflow_driver}" "{workflow_driver1}" "{runType}" "{osType}" ' # noqa: N806
- subprocess.Popen(preprocessorCommand, shell=True).wait()
+ subprocess.Popen(preprocessorCommand, shell=True).wait() # noqa: S602
- if runType in ['runningLocal']:
- os.chmod(workflow_driver, stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
- os.chmod(workflow_driver1, stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH)
+ if runType == 'runningLocal':
+ os.chmod( # noqa: PTH101
+ workflow_driver,
+ stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH,
+ )
+ os.chmod( # noqa: PTH101
+ workflow_driver1,
+ stat.S_IWUSR | stat.S_IXUSR | stat.S_IRUSR | stat.S_IXOTH,
+ )
- command = Dakota + ' -input dakota.in -output dakota.out -error dakota.err'
+ command = Dakota + ' -input dakota.in -output dakota.out -error dakota.err' # noqa: F841
- #Change permission of workflow driver
- st = os.stat(workflow_driver)
- os.chmod(workflow_driver, st.st_mode | stat.S_IEXEC)
- os.chmod(workflow_driver1, st.st_mode | stat.S_IEXEC)
+ # Change permission of workflow driver
+ st = os.stat(workflow_driver) # noqa: PTH116
+ os.chmod(workflow_driver, st.st_mode | stat.S_IEXEC) # noqa: PTH101
+ os.chmod(workflow_driver1, st.st_mode | stat.S_IEXEC) # noqa: PTH101
# copy the dakota input file to the main working dir for the structure
- shutil.copy("dakota.in", "../")
+ shutil.copy('dakota.in', '../')
# If calibration data files exist, copy to the main working directory
- if os.path.isfile("calibrationDataFilesToMove.cal"):
- calDataFileList = open("calibrationDataFilesToMove.cal", 'r')
- datFileList = calDataFileList.readlines()
+ if os.path.isfile('calibrationDataFilesToMove.cal'): # noqa: PTH113
+ calDataFileList = open('calibrationDataFilesToMove.cal') # noqa: N806, PLW1514, PTH123, SIM115
+ datFileList = calDataFileList.readlines() # noqa: N806
for line in datFileList:
- datFile = line.strip()
+ datFile = line.strip() # noqa: N806
if datFile.split('.')[-1] == 'tmpFile':
- shutil.copy(datFile, "../{}".format(datFile[:-8]))
+ shutil.copy(datFile, f'../{datFile[:-8]}')
else:
- shutil.copy(datFile, "../")
+ shutil.copy(datFile, '../')
# os.remove("calibrationDataFilesToMove.cal")
# change dir to the main working dir for the structure
- os.chdir("../")
+ os.chdir('../')
- cwd = os.getcwd()
+ cwd = os.getcwd() # noqa: PTH109
- if runType in ['runningLocal']:
-
+ if runType == 'runningLocal':
# p = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
# for line in p.stdout:
# print(str(line))
-
- dakotaCommand = "dakota -input dakota.in -output dakota.out -error dakota.err"
+
+ dakotaCommand = ( # noqa: N806
+ 'dakota -input dakota.in -output dakota.out -error dakota.err'
+ )
if 'parType' in data:
- print(data['parType'])
+ print(data['parType']) # noqa: T201
if data['parType'] == 'parRUN':
- dakotaCommand = data['mpiExec'] + ' -n 1 ' + dakotaCommand
+ dakotaCommand = data['mpiExec'] + ' -n 1 ' + dakotaCommand # noqa: N806
- print('running Dakota: ', dakotaCommand)
+ print('running Dakota: ', dakotaCommand) # noqa: T201
try:
- result = subprocess.check_output(dakotaCommand, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output( # noqa: S602
+ dakotaCommand, stderr=subprocess.STDOUT, shell=True
+ )
returncode = 0
except subprocess.CalledProcessError as e:
result = e.output
- print('RUNNING DAKOTA ERROR: ', result)
- returncode = e.returncode
-
+ print('RUNNING DAKOTA ERROR: ', result) # noqa: T201
+ returncode = e.returncode # noqa: F841
- dakotaErrFile = os.path.join(os.getcwd(), 'dakota.err');
- dakotaOutFile = os.path.join(os.getcwd(), 'dakota.out');
+ dakotaErrFile = os.path.join(os.getcwd(), 'dakota.err') # noqa: PTH109, PTH118, N806
+ dakotaOutFile = os.path.join(os.getcwd(), 'dakota.out') # noqa: PTH109, PTH118, N806
+ checkErrFile = os.path.getsize(dakotaErrFile) # noqa: PTH202, N806
+ checkOutFile = os.path.exists(dakotaOutFile) # noqa: PTH110, N806
- checkErrFile = os.path.getsize(dakotaErrFile)
- checkOutFile = os.path.exists(dakotaOutFile)
-
- if(checkOutFile == False and checkErrFile == 0 ):
- with open(dakotaErrFile, 'a') as file:
- file.write(result.decode("utf-8"))
+ if checkOutFile == False and checkErrFile == 0: # noqa: E712
+ with open(dakotaErrFile, 'a') as file: # noqa: PLW1514, PTH123
+ file.write(result.decode('utf-8'))
else:
pass
-
if __name__ == '__main__':
-
main(sys.argv[1:])
-
diff --git a/modules/performUQ/dakota/dpreproSimCenter b/modules/performUQ/dakota/dpreproSimCenter
index c77374404..44a844c2d 100755
--- a/modules/performUQ/dakota/dpreproSimCenter
+++ b/modules/performUQ/dakota/dpreproSimCenter
@@ -62,7 +62,7 @@ open (DAKOTA_PARAMS, "<$params_file") || die "Can't open $params_file: $!";
# tag/value pairs in %values_p1. Numeric fields from DAKOTA are either integral
# or exponential notation and there is only one tag/value pair per line
# (exception: standard format up to DAKOTA v3.3 uses "# variables # functions"
-# on the first line). However, to accomodate the possibility that parameters
+# on the first line). However, to accommodate the possibility that parameters
# files may be generated from other sources, floats are also supported and
# multiple tag/value pairs are allowed per line. Compound expressions are not
# currently allowed.
@@ -355,7 +355,7 @@ sub format_value {
my $local_format = shift(@_);
# optional $local_format for the field, extracted from template,
- # takes precendence over optional $output_format defined by
+ # takes precedence over optional $output_format defined by
# command line option
if (defined $local_format) {
diff --git a/modules/performUQ/other/HeirBayesRunner.py b/modules/performUQ/other/HeirBayesRunner.py
index c616f5062..1f8631da2 100644
--- a/modules/performUQ/other/HeirBayesRunner.py
+++ b/modules/performUQ/other/HeirBayesRunner.py
@@ -1,15 +1,15 @@
-# written: Aakash Bangalore Satish @ NHERI SimCenter, UC Berkeley
+# written: Aakash Bangalore Satish @ NHERI SimCenter, UC Berkeley # noqa: CPY001, D100, INP001
+import importlib
import json
import os
import sys
import time
-import importlib
from uqRunner import UqRunner
-class HeirBayesRunner(UqRunner):
+class HeirBayesRunner(UqRunner): # noqa: D101
def __init__(self) -> None:
super().__init__()
self.n_samples = 0
@@ -17,64 +17,68 @@ def __init__(self) -> None:
self.tuning_interval = 0
self.seed = 0
- def storeUQData(self, uqData):
- for val in uqData["Parameters"]:
- if val["name"] == "File To Run":
- self.file_to_run = val["value"]
- elif val["name"] == "# Samples":
- self.n_samples = int(val["value"])
- elif val["name"] == "# Burn-in":
- self.n_burn_in = int(val["value"])
- elif val["name"] == "Tuning Interval":
- self.tuning_interval = int(val["value"])
- elif val["name"] == "Seed":
- self.seed = int(val["value"])
-
- def performHeirBayesSampling(self):
- self.dir_name = os.path.dirname(self.file_to_run)
+ def storeUQData(self, uqData): # noqa: N802, N803, D102
+ for val in uqData['Parameters']:
+ if val['name'] == 'File To Run':
+ self.file_to_run = val['value']
+ elif val['name'] == '# Samples':
+ self.n_samples = int(val['value'])
+ elif val['name'] == '# Burn-in':
+ self.n_burn_in = int(val['value'])
+ elif val['name'] == 'Tuning Interval':
+ self.tuning_interval = int(val['value'])
+ elif val['name'] == 'Seed':
+ self.seed = int(val['value'])
+
+ def performHeirBayesSampling(self): # noqa: N802, D102
+ self.dir_name = os.path.dirname(self.file_to_run) # noqa: PTH120
sys.path.append(self.dir_name)
- module_name = os.path.basename(self.file_to_run)
+ module_name = os.path.basename(self.file_to_run) # noqa: PTH119
module = importlib.import_module(module_name[:-3])
self.heir_code = module.HeirBayesSampler()
- self.trace, self.time_taken, self.inf_object, self.num_coupons = self.heir_code.perform_sampling(
- n_samples=self.n_samples,
- n_burn_in=self.n_burn_in,
- tuning_interval=self.tuning_interval,
- seed=self.seed
+ self.trace, self.time_taken, self.inf_object, self.num_coupons = (
+ self.heir_code.perform_sampling(
+ n_samples=self.n_samples,
+ n_burn_in=self.n_burn_in,
+ tuning_interval=self.tuning_interval,
+ seed=self.seed,
+ )
)
- def saveResultsToPklFile(self):
+ def saveResultsToPklFile(self): # noqa: N802, D102
self.saved_pickle_filename = self.heir_code.save_results(
- self.trace, self.time_taken, self.inf_object, prefix="synthetic_data"
+ self.trace, self.time_taken, self.inf_object, prefix='synthetic_data'
)
- def createHeadingStringsList(self):
- self.params = ["fy", "E", "b", "cR1", "cR2", "a1", "a3"]
+ def createHeadingStringsList(self): # noqa: N802, D102
+ self.params = ['fy', 'E', 'b', 'cR1', 'cR2', 'a1', 'a3']
self.num_params = len(self.params)
- self.heading_list = ["Sample#", "interface"]
+ self.heading_list = ['Sample#', 'interface']
for i in range(self.num_coupons):
for j in range(self.num_params):
self.heading_list.append(
- "".join(["Coupon_", str(i + 1), "_", self.params[j]])
+ ''.join(['Coupon_', str(i + 1), '_', self.params[j]])
)
for row in range(self.num_params):
for col in range(row + 1):
- self.heading_list.append("".join(["Cov_", str(row + 1), str(col + 1)]))
+ self.heading_list.append(
+ ''.join(['Cov_', str(row + 1), str(col + 1)])
+ )
for par in self.params:
- self.heading_list.append("".join(["Mean_", par]))
+ self.heading_list.append(''.join(['Mean_', par])) # noqa: FLY002
for sig in range(self.num_coupons):
- self.heading_list.append("".join(["ErrorVariance_", str(sig + 1)]))
+ self.heading_list.append(''.join(['ErrorVariance_', str(sig + 1)]))
- def makeHeadingRow(self, separator="\t"):
- self.headingRow = separator.join([item for item in self.heading_list])
+ def makeHeadingRow(self, separator='\t'): # noqa: N802, D102
+ self.headingRow = separator.join([item for item in self.heading_list]) # noqa: C416
- def makeOneRowString(self, sample_num, sample, separator="\t"):
- initial_string = separator.join([str(sample_num), "1"])
+ def makeOneRowString(self, sample_num, sample, separator='\t'): # noqa: N802, D102
+ initial_string = separator.join([str(sample_num), '1'])
coupon_string = separator.join(
[
str(sample[i][j])
@@ -101,65 +105,68 @@ def makeOneRowString(self, sample_num, sample, separator="\t"):
row_string = separator.join(
[initial_string, coupon_string, cov_string, mean_string, error_string]
)
- return row_string
+ return row_string # noqa: RET504
- def makeTabularResultsFile(self, save_file_name="tabularResults.out", separator="\t"):
+ def makeTabularResultsFile( # noqa: N802, D102
+ self,
+ save_file_name='tabularResults.out',
+ separator='\t',
+ ):
self.createHeadingStringsList()
self.makeHeadingRow(separator=separator)
- cwd = os.getcwd()
- save_file_dir = os.path.dirname(cwd)
- save_file_full_path = os.path.join(save_file_dir, save_file_name)
- with open(save_file_full_path, "w") as f:
+ cwd = os.getcwd() # noqa: PTH109
+ save_file_dir = os.path.dirname(cwd) # noqa: PTH120
+ save_file_full_path = os.path.join(save_file_dir, save_file_name) # noqa: PTH118
+ with open(save_file_full_path, 'w') as f: # noqa: PLW1514, PTH123
f.write(self.headingRow)
- f.write("\n")
+ f.write('\n')
for sample_num, sample in enumerate(self.trace):
row = self.makeOneRowString(
sample_num=sample_num, sample=sample, separator=separator
)
f.write(row)
- f.write("\n")
+ f.write('\n')
- def startTimer(self):
+ def startTimer(self): # noqa: N802, D102
self.startingTime = time.time()
- def computeTimeElapsed(self):
+ def computeTimeElapsed(self): # noqa: N802, D102
self.timeElapsed = time.time() - self.startingTime
- def printTimeElapsed(self):
+ def printTimeElapsed(self): # noqa: N802, D102
self.computeTimeElapsed()
- print("Time elapsed: {:0.2f} minutes".format(self.timeElapsed / 60))
+ print(f'Time elapsed: {self.timeElapsed / 60:0.2f} minutes') # noqa: T201
- def startSectionTimer(self):
+ def startSectionTimer(self): # noqa: N802, D102
self.sectionStartingTime = time.time()
- def resetSectionTimer(self):
+ def resetSectionTimer(self): # noqa: N802, D102
self.startSectionTimer()
- def computeSectionTimeElapsed(self):
+ def computeSectionTimeElapsed(self): # noqa: N802, D102
self.sectionTimeElapsed = time.time() - self.sectionStartingTime
- def printSectionTimeElapsed(self):
+ def printSectionTimeElapsed(self): # noqa: N802, D102
self.computeSectionTimeElapsed()
- print("Time elapsed: {:0.2f} minutes".format(self.sectionTimeElapsed / 60))
+ print(f'Time elapsed: {self.sectionTimeElapsed / 60:0.2f} minutes') # noqa: T201
@staticmethod
- def printEndMessages():
- print("Heirarchical Bayesian estimation done!")
+ def printEndMessages(): # noqa: N802, D102
+ print('Heirarchical Bayesian estimation done!') # noqa: T201
- def runUQ(
+ def runUQ( # noqa: N802
self,
- uqData,
- simulationData,
- randomVarsData,
- demandParams,
- workingDir,
- runType,
- localAppDir,
- remoteAppDir,
+ uqData, # noqa: N803
+ simulationData, # noqa: ARG002, N803
+ randomVarsData, # noqa: ARG002, N803
+ demandParams, # noqa: ARG002, N803
+ workingDir, # noqa: N803
+ runType, # noqa: ARG002, N803
+ localAppDir, # noqa: ARG002, N803
+ remoteAppDir, # noqa: ARG002, N803
):
- """
- This function configures and runs hierarchical Bayesian estimation based on the
+ """This function configures and runs hierarchical Bayesian estimation based on the
input UQ configuration, simulation configuration, random variables,
and requested demand parameters
@@ -174,7 +181,7 @@ def runUQ(
runType: Specifies whether computations are being run locally or on an HPC cluster
localAppDir: Directory containing apps for local run
remoteAppDir: Directory containing apps for remote run
- """
+ """ # noqa: D205, D400, D401, D404
self.startTimer()
self.storeUQData(uqData=uqData)
os.chdir(workingDir)
@@ -185,32 +192,34 @@ def runUQ(
self.printEndMessages()
-class testRunUQ:
+class testRunUQ: # noqa: D101
def __init__(self, json_file_path_string) -> None:
self.json_file_path_string = json_file_path_string
self.getUQData()
self.createRunner()
self.runTest()
- def getUQData(self):
- with open(os.path.abspath(self.json_file_path_string), "r") as f:
+ def getUQData(self): # noqa: N802, D102
+ with open(os.path.abspath(self.json_file_path_string)) as f: # noqa: PLW1514, PTH100, PTH123
input_data = json.load(f)
- self.ApplicationData = input_data["Applications"]
- self.uqData = input_data["UQ"]
- self.simulationData = self.ApplicationData["FEM"]
- self.randomVarsData = input_data["randomVariables"]
- self.demandParams = input_data["EDP"]
- self.localAppDir = input_data["localAppDir"]
- self.remoteAppDir = input_data["remoteAppDir"]
- self.workingDir = input_data["workingDir"]
- self.workingDir = os.path.join(self.workingDir, "tmp.SimCenter", "templateDir")
- self.runType = "runningLocal"
-
- def createRunner(self):
+ self.ApplicationData = input_data['Applications']
+ self.uqData = input_data['UQ']
+ self.simulationData = self.ApplicationData['FEM']
+ self.randomVarsData = input_data['randomVariables']
+ self.demandParams = input_data['EDP']
+ self.localAppDir = input_data['localAppDir']
+ self.remoteAppDir = input_data['remoteAppDir']
+ self.workingDir = input_data['workingDir']
+ self.workingDir = os.path.join( # noqa: PTH118
+ self.workingDir, 'tmp.SimCenter', 'templateDir'
+ )
+ self.runType = 'runningLocal'
+
+ def createRunner(self): # noqa: N802, D102
self.runner = HeirBayesRunner()
- def runTest(self):
+ def runTest(self): # noqa: N802, D102
self.runner.runUQ(
uqData=self.uqData,
simulationData=self.simulationData,
@@ -223,13 +232,18 @@ def runTest(self):
)
-def main():
- filename = os.path.abspath(os.path.join(os.path.dirname(__file__), "test_CustomUQ/HeirBayesSyntheticData/templatedir/scInput.json"))
- if os.path.exists(filename):
+def main(): # noqa: D103
+ filename = os.path.abspath( # noqa: PTH100
+ os.path.join( # noqa: PTH118
+ os.path.dirname(__file__), # noqa: PTH120
+ 'test_CustomUQ/HeirBayesSyntheticData/templatedir/scInput.json',
+ )
+ )
+ if os.path.exists(filename): # noqa: PTH110
testRunUQ(filename)
else:
- print(f'Test input json file {filename} not found. Not running the test.')
+ print(f'Test input json file {filename} not found. Not running the test.') # noqa: T201
-if __name__ == "__main__":
+if __name__ == '__main__':
main()
diff --git a/modules/performUQ/other/UQpyRunner.py b/modules/performUQ/other/UQpyRunner.py
index 3899d4d89..251765fdb 100644
--- a/modules/performUQ/other/UQpyRunner.py
+++ b/modules/performUQ/other/UQpyRunner.py
@@ -1,33 +1,38 @@
-# written: Michael Gardner @ UNR
+# written: Michael Gardner @ UNR # noqa: CPY001, D100, INP001
# updated Aakash Bangalore Satish, June 11 2024
import os
-from uqRunner import UqRunnerFactory
-from uqRunner import UqRunner
+import shutil
+import time
+
+from createTemplate import createTemplate
+from UQpy.distributions.collection.Uniform import Uniform
+from UQpy.run_model.model_execution.ThirdPartyModel import ThirdPartyModel
+from UQpy.run_model.RunModel import RunModel
# THIS IS FOR WHEN MESSING AROUND WITH UQpy SOURCE
# import sys
# sys.path.append(os.path.abspath("/home/michael/UQpy/src"))
-
-from UQpy.sampling.MonteCarloSampling import MonteCarloSampling as MCS
-from UQpy.run_model.RunModel import RunModel
-from UQpy.run_model.model_execution.ThirdPartyModel import ThirdPartyModel
-from UQpy.distributions.collection.Uniform import Uniform
-from createTemplate import createTemplate
-import time
-import csv
-import json
-import shutil
+from UQpy.sampling.MonteCarloSampling import MonteCarloSampling as MCS # noqa: N817
+from uqRunner import UqRunner
-class UQpyRunner(UqRunner):
- def runUQ(self, uqData, simulationData, randomVarsData, demandParams,
- workingDir, runType, localAppDir, remoteAppDir):
- """
- This function configures and runs a UQ simulation using UQpy based on the
+class UQpyRunner(UqRunner): # noqa: D101
+ def runUQ( # noqa: C901, N802, PLR6301
+ self,
+ uqData, # noqa: N803
+ simulationData, # noqa: ARG002, N803
+ randomVarsData, # noqa: N803
+ demandParams, # noqa: N803
+ workingDir, # noqa: N803
+ runType, # noqa: N803
+ localAppDir, # noqa: N803
+ remoteAppDir, # noqa: ARG002, N803
+ ):
+ """This function configures and runs a UQ simulation using UQpy based on the
input UQ configuration, simulation configuration, random variables,
and requested demand parameters
-
+
Input:
uqData: JsonObject that UQ options as input into the quoFEM GUI
simulationData: JsonObject that contains information on the analysis package to run and its
@@ -39,127 +44,152 @@ def runUQ(self, uqData, simulationData, randomVarsData, demandParams,
runType: Specifies whether computations are being run locally or on an HPC cluster
localAppDir: Directory containing apps for local run
remoteAppDir: Directory containing apps for remote run
- """
-
+ """ # noqa: D205, D400, D401, D404
# There is still plenty of configuration that can and should be added here. This currently does MCS sampling with Uniform
# distributions only, though this is easily expanded
-
+
# Copy required python files to template directory
- shutil.copyfile(os.path.join(localAppDir, 'applications/performUQ/other/runWorkflowDriver.py'),
- os.path.join(workingDir, 'runWorkflowDriver.py'))
- shutil.copyfile(os.path.join(localAppDir, 'applications/performUQ/other/createTemplate.py'),
- os.path.join(workingDir, 'createTemplate.py'))
- shutil.copyfile(os.path.join(localAppDir, 'applications/performUQ/other/processUQpyOutput.py'),
- os.path.join(workingDir, 'processUQpyOutput.py'))
-
- # Parse configuration for UQ
- distributionNames = []
- distributionParams = []
- variableNames = []
- distributionObjects = []
+ shutil.copyfile(
+ os.path.join( # noqa: PTH118
+ localAppDir, 'applications/performUQ/other/runWorkflowDriver.py'
+ ),
+ os.path.join(workingDir, 'runWorkflowDriver.py'), # noqa: PTH118
+ )
+ shutil.copyfile(
+ os.path.join( # noqa: PTH118
+ localAppDir, 'applications/performUQ/other/createTemplate.py'
+ ),
+ os.path.join(workingDir, 'createTemplate.py'), # noqa: PTH118
+ )
+ shutil.copyfile(
+ os.path.join( # noqa: PTH118
+ localAppDir, 'applications/performUQ/other/processUQpyOutput.py'
+ ),
+ os.path.join(workingDir, 'processUQpyOutput.py'), # noqa: PTH118
+ )
+
+ # Parse configuration for UQ
+ distributionNames = [] # noqa: N806
+ distributionParams = [] # noqa: N806
+ variableNames = [] # noqa: N806
+ distributionObjects = [] # noqa: N806
samples = []
- samplingMethod = ""
- numberOfSamples = 0
- modelScript = 'runWorkflowDriver.py'
- inputTemplate = 'params.template'
+ samplingMethod = '' # noqa: N806
+ numberOfSamples = 0 # noqa: N806
+ modelScript = 'runWorkflowDriver.py' # noqa: N806
+ inputTemplate = 'params.template' # noqa: N806
# outputObjectName = 'OutputProcessor'
- outputObjectName = 'output_function'
- outputScript = 'processUQpyOutput.py'
- numberOfTasks = 1
- numberOfNodes = 1
- coresPerTask = 1
- clusterRun = False
- resumeRun = False
+ outputObjectName = 'output_function' # noqa: N806
+ outputScript = 'processUQpyOutput.py' # noqa: N806
+ numberOfTasks = 1 # noqa: N806
+ numberOfNodes = 1 # noqa: N806
+ coresPerTask = 1 # noqa: N806
+ clusterRun = False # noqa: N806
+ resumeRun = False # noqa: N806, F841
seed = 1
# If computations are being executed on HPC, enable UQpy to start computations using srun
- if runType == "runningRemote":
- clusterRun = True
+ if runType == 'runningRemote':
+ clusterRun = True # noqa: N806, F841
for val in randomVarsData:
- if val["distribution"] == "Uniform":
+ if val['distribution'] == 'Uniform':
distributionNames.append('Uniform')
- variableNames.append(val["name"])
- distributionParams.append([val["lowerbound"], val["upperbound"]])
+ variableNames.append(val['name'])
+ distributionParams.append([val['lowerbound'], val['upperbound']])
else:
- raise IOError("ERROR: You'll need to update UQpyRunner.py to run your" +\
- " specified RV distribution!")
+ raise OSError( # noqa: DOC501
+ "ERROR: You'll need to update UQpyRunner.py to run your" # noqa: ISC003
+ + ' specified RV distribution!'
+ )
- for val in uqData["Parameters"]:
- if val["name"] == "Sampling Method":
- samplingMethod = val["value"]
-
- if val["name"] == "Number of Samples":
- numberOfSamples = int(val["value"])
+ for val in uqData['Parameters']:
+ if val['name'] == 'Sampling Method':
+ samplingMethod = val['value'] # noqa: N806
- if val["name"] == "Number of Concurrent Tasks":
- numberOfTasks = val["value"]
+ if val['name'] == 'Number of Samples':
+ numberOfSamples = int(val['value']) # noqa: N806
- if val["name"] == "Number of Nodes":
- numberOfNodes = val["value"]
+ if val['name'] == 'Number of Concurrent Tasks':
+ numberOfTasks = val['value'] # noqa: N806
- if val["name"] == "Cores per Task":
- coresPerTask = val["value"]
-
- if val["name"] == "Seed":
- seed = int(val["value"])
+ if val['name'] == 'Number of Nodes':
+ numberOfNodes = val['value'] # noqa: N806, F841
+ if val['name'] == 'Cores per Task':
+ coresPerTask = val['value'] # noqa: N806, F841
+
+ if val['name'] == 'Seed':
+ seed = int(val['value'])
# Create distribution objects
- for index, val in enumerate(distributionNames, 0):
- distributionObjects.append(Uniform(distributionParams[index][0], distributionParams[index][1]-distributionParams[index][0]))
+ for index, val in enumerate(distributionNames, 0): # noqa: B007
+ distributionObjects.append(
+ Uniform(
+ distributionParams[index][0],
+ distributionParams[index][1] - distributionParams[index][0],
+ )
+ )
createTemplate(variableNames, inputTemplate)
-
+
# Generate samples
- if samplingMethod == "MCS":
- samples = MCS(distributionObjects,\
- nsamples=numberOfSamples, random_state=seed)
+ if samplingMethod == 'MCS':
+ samples = MCS(
+ distributionObjects, nsamples=numberOfSamples, random_state=seed
+ )
else:
- raise IOError("ERROR: You'll need to update UQpyRunner.py to run your specified" +\
- " sampling method!")
+ raise OSError( # noqa: DOC501
+ "ERROR: You'll need to update UQpyRunner.py to run your specified" # noqa: ISC003
+ + ' sampling method!'
+ )
# Change workdir to the template directory
- os.chdir(workingDir)
-
+ os.chdir(workingDir)
+
# Run model based on input config
- startTime = time.time()
+ startTime = time.time() # noqa: N806
# model = RunModel(samples=samples.samples, model_script=modelScript,
# input_template=inputTemplate, var_names=variableNames,
# output_script=outputScript, output_object_name=outputObjectName,
# verbose=True, ntasks=numberOfTasks,
# nodes=numberOfNodes, cores_per_task=coresPerTask,
# cluster=clusterRun, resume=resumeRun)
- model = ThirdPartyModel(model_script=modelScript, input_template=inputTemplate, var_names=variableNames,
- output_script=outputScript, output_object_name=outputObjectName)
+ model = ThirdPartyModel(
+ model_script=modelScript,
+ input_template=inputTemplate,
+ var_names=variableNames,
+ output_script=outputScript,
+ output_object_name=outputObjectName,
+ )
m = RunModel(ntasks=numberOfTasks, model=model)
m.run(samples.samples)
-
- runTime = time.time() - startTime
- print("\nTotal time for all experiments: ", runTime)
- with open(os.path.join(workingDir, '..', 'tabularResults.out'), 'w') as f:
- f.write("%eval_id\t interface\t")
+ runTime = time.time() - startTime # noqa: N806
+ print('\nTotal time for all experiments: ', runTime) # noqa: T201
+
+ with open(os.path.join(workingDir, '..', 'tabularResults.out'), 'w') as f: # noqa: PLW1514, PTH118, PTH123
+ f.write('%eval_id\t interface\t')
for val in variableNames:
- f.write("%s\t" % val)
+ f.write('%s\t' % val) # noqa: UP031
for val in demandParams:
- f.write("%s\t" % val["name"])
+ f.write('%s\t' % val['name']) # noqa: UP031
- f.write("\n")
+ f.write('\n')
for i in range(numberOfSamples):
- string = f"{i+1} \tcustom\t"
+ string = f'{i + 1} \tcustom\t'
for sample in samples.samples[i]:
- string += f"{sample}\t"
+ string += f'{sample}\t'
for qoi in m.qoi_list[i]:
for val in qoi:
- string += f"{val}\t"
- string += "\n"
+ string += f'{val}\t'
+ string += '\n'
f.write(string)
-
+
# Factory for creating UQpy runner
- class Factory:
- def create(self):
+ class Factory: # noqa: D106
+ def create(self): # noqa: D102, PLR6301
return UQpyRunner()
diff --git a/modules/performUQ/other/configureAndRunUQ.py b/modules/performUQ/other/configureAndRunUQ.py
index 306da090a..ac04362ac 100644
--- a/modules/performUQ/other/configureAndRunUQ.py
+++ b/modules/performUQ/other/configureAndRunUQ.py
@@ -1,14 +1,22 @@
-# written: Michael Gardner @ UNR, Aakash Bangalore Satish @ UCB
+# written: Michael Gardner @ UNR, Aakash Bangalore Satish @ UCB # noqa: CPY001, D100, INP001
# Use the UQpy driver as a starting point if you want to add other UQ capabilities
-def configureAndRunUQ(uqData, simulationData, randomVarsData, demandParams, workingDir,
- runType, localAppDir, remoteAppDir):
- """
- This function configures and runs a UQ simulation based on the input
+
+def configureAndRunUQ( # noqa: N802
+ uqData, # noqa: N803
+ simulationData, # noqa: N803
+ randomVarsData, # noqa: N803
+ demandParams, # noqa: N803
+ workingDir, # noqa: N803
+ runType, # noqa: N803
+ localAppDir, # noqa: N803
+ remoteAppDir, # noqa: N803
+):
+ """This function configures and runs a UQ simulation based on the input
UQ driver and its associated inputs, simulation configuration, random
variables, and requested demand parameters
-
+
Input:
uqData: JsonObject that specifies the UQ driver and other options as input into the quoFEM GUI
simulationData: JsonObject that contains information on the analysis package to run and its
@@ -20,24 +28,32 @@ def configureAndRunUQ(uqData, simulationData, randomVarsData, demandParams, work
runType: Specifies whether computations are being run locally or on an HPC cluster
localAppDir: Directory containing apps for local run
remoteAppDir: Directory containing apps for remote run
- """
+ """ # noqa: D205, D400, D401, D404
+ uqDriverOptions = ['UQpy', 'HeirBayes'] # noqa: N806
+
+ for val in uqData['Parameters']:
+ if val['name'] == 'UQ Driver':
+ uqDriver = val['value'] # noqa: N806
- uqDriverOptions = ["UQpy", "HeirBayes"]
-
- for val in uqData["Parameters"]:
- if val["name"] == "UQ Driver":
- uqDriver = val["value"]
-
if uqDriver not in uqDriverOptions:
- raise ValueError("ERROR: configureAndRunUQ.py: UQ driver not recognized."+\
- " Either input incorrectly or class to run UQ driver not"+\
- " implemented: ", uqDriver)
- else:
- if uqDriver in ["UQpy"]:
- from UQpyRunner import UQpyRunner
- elif uqDriver in ["HeirBayes"]:
- from HeirBayesRunner import HeirBayesRunner
+ raise ValueError( # noqa: DOC501
+ 'ERROR: configureAndRunUQ.py: UQ driver not recognized.' # noqa: ISC003
+ + ' Either input incorrectly or class to run UQ driver not'
+ + ' implemented: ',
+ uqDriver,
+ )
+ else: # noqa: RET506
+ if uqDriver == 'UQpy' or uqDriver == 'HeirBayes': # noqa: PLR1714
+ pass
- uqDriverClass = locals()[uqDriver+"Runner"]
- uqDriverClass().runUQ(uqData, simulationData, randomVarsData, demandParams,
- workingDir, runType, localAppDir, remoteAppDir)
+ uqDriverClass = locals()[uqDriver + 'Runner'] # noqa: N806
+ uqDriverClass().runUQ(
+ uqData,
+ simulationData,
+ randomVarsData,
+ demandParams,
+ workingDir,
+ runType,
+ localAppDir,
+ remoteAppDir,
+ )
diff --git a/modules/performUQ/other/createTemplate.py b/modules/performUQ/other/createTemplate.py
index d161dcec9..a509a03a5 100644
--- a/modules/performUQ/other/createTemplate.py
+++ b/modules/performUQ/other/createTemplate.py
@@ -1,12 +1,13 @@
-from pathlib import Path
+from pathlib import Path # noqa: CPY001, D100, INP001
-def createTemplate(variableNames, templateName):
- filePath = Path("./" + templateName)
- with open(filePath, 'w') as f:
- f.write('{}\n'.format(len(variableNames)))
+def createTemplate(variableNames, templateName): # noqa: N802, N803, D103
+ filePath = Path('./' + templateName) # noqa: N806
+
+ with open(filePath, 'w') as f: # noqa: PLW1514, PTH123
+ f.write(f'{len(variableNames)}\n')
for name in variableNames:
- f.write('{} <{}>\n'.format(name, name))
-
- f.close()
+ f.write(f'{name} <{name}>\n')
+
+ f.close()
diff --git a/modules/performUQ/other/prepareUQ.py b/modules/performUQ/other/prepareUQ.py
index 903a7af16..a6260c6f4 100644
--- a/modules/performUQ/other/prepareUQ.py
+++ b/modules/performUQ/other/prepareUQ.py
@@ -1,61 +1,71 @@
-# written: Michael Gardner @ UNR
+# written: Michael Gardner @ UNR # noqa: CPY001, D100, INP001
# import sys
-import preProcessUQ
-def prepareUQ(paramsFile, inputFile, outputFile, rvSpecifier):
+
+def prepareUQ(paramsFile, inputFile, outputFile, rvSpecifier): # noqa: C901, N802, N803, D103
# These are the delimiter choices, which can expanded as more UQ programs are added. Remember to also
- # extend the factory in rvDelimiter to handle addtional cases
- rvDelimiterChoices=["SimCenterDelimiter", "UQpyDelimiter"]
-
+ # extend the factory in rvDelimiter to handle additional cases
+ rvDelimiterChoices = ['SimCenterDelimiter', 'UQpyDelimiter'] # noqa: N806
+
if rvSpecifier not in rvDelimiterChoices:
- except IOError:
- print("ERROR: preProcessUQ.py: Symbol identifying value as random variable not recognized : ", rvSpecifier)
-
+ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ # Tue Jul 30 05:16:30 PM PDT 2024
+ # JVM
+ # Commenting out the following, since it is invalid.
+
+ # except IOError:
+ # print("ERROR: preProcessUQ.py: Symbol identifying value as random variable not recognized : ", rvSpecifier)
+
+ pass
+ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
# Open parameters file and read parameter settings
- numRVs = 0
- lineCount = 0
- rvNames = []
- rvSettings = []
-
+ numRVs = 0 # noqa: N806
+ lineCount = 0 # noqa: N806
+ rvNames = [] # noqa: N806
+ rvSettings = [] # noqa: N806
+
try:
- with open(paramsFile, 'r') as params:
+ with open(paramsFile) as params: # noqa: PLW1514, PTH123
for line in params:
if lineCount == 0:
- rvNames = [i.strip() for i in line.split(',')]
- numRVs = len(rvNames)
+ rvNames = [i.strip() for i in line.split(',')] # noqa: N806
+ numRVs = len(rvNames) # noqa: N806, F841
# Replace RV names based on delimiter
for i, rv in enumerate(rvNames):
rvNames[i] = rvSpecifier.replaceRV(rv)
else:
- rvSettings = [i.strip() for i in line.split(',')]
+ rvSettings = [i.strip() for i in line.split(',')] # noqa: N806
+
+ lineCount = lineCount + 1 # noqa: N806, PLR6104
- lineCount = lineCount + 1
-
- except IOError:
- print("ERROR: preProcessUQ.py could not open parameters file: " + paramsFile)
+ except OSError:
+ print('ERROR: preProcessUQ.py could not open parameters file: ' + paramsFile) # noqa: T201
# Next, open input file and search for random variables that need to be replaced by parameter realizations
- inputTemplate = "inputTemplate"
- realizationOutput = "outputFile"
+ inputTemplate = 'inputTemplate' # noqa: N806
+ realizationOutput = 'outputFile' # noqa: N806
try:
- inputTemplate = open(inputFile, 'r')
- except IOError:
- print("ERROR: preProcessUQ.py could not open input template file: " + inputFile)
+ inputTemplate = open(inputFile) # noqa: N806, PLW1514, PTH123, SIM115
+ except OSError:
+ print( # noqa: T201
+ 'ERROR: preProcessUQ.py could not open input template file: ' + inputFile
+ )
try:
- realizationOutput = open(outputFile, 'w')
- except IOError:
- print("ERROR: preProcessUQ.py could not open output file: " + outputFile)
+ realizationOutput = open(outputFile, 'w') # noqa: N806, PLW1514, PTH123, SIM115
+ except OSError:
+ print('ERROR: preProcessUQ.py could not open output file: ' + outputFile) # noqa: T201
# Iterate over all lines in input template
for line in inputTemplate:
# Iterate over all RVs to check they need to be replaced
for i, rv in enumerate(rvNames):
- try:
- line = line.replace(rv, rvSettings[i])
- except:
+ try: # noqa: SIM105
+ line = line.replace(rv, rvSettings[i]) # noqa: PLW2901
+ except: # noqa: S110, PERF203, E722
pass
realizationOutput.write(line)
diff --git a/modules/performUQ/other/processUQpyOutput.py b/modules/performUQ/other/processUQpyOutput.py
index 3d402c411..251583f8e 100644
--- a/modules/performUQ/other/processUQpyOutput.py
+++ b/modules/performUQ/other/processUQpyOutput.py
@@ -1,6 +1,8 @@
+from pathlib import Path # noqa: CPY001, D100, INP001
+
import numpy as np
-from pathlib import Path
-def output_function(index):
- filePath = Path('./results.out').resolve()
- return np.atleast_2d(np.genfromtxt(filePath))
\ No newline at end of file
+
+def output_function(index): # noqa: ARG001, D103
+ filePath = Path('./results.out').resolve() # noqa: N806
+ return np.atleast_2d(np.genfromtxt(filePath))
diff --git a/modules/performUQ/other/runOtherUQ.py b/modules/performUQ/other/runOtherUQ.py
index 02a8b597e..bf345574b 100644
--- a/modules/performUQ/other/runOtherUQ.py
+++ b/modules/performUQ/other/runOtherUQ.py
@@ -1,14 +1,13 @@
-# written: Michael Gardner @ UNR
+# written: Michael Gardner @ UNR # noqa: CPY001, D100, INP001
+import argparse
import json
import os
-import sys
-import platform
-import argparse
+
from configureAndRunUQ import configureAndRunUQ
-from pathlib import Path
-def main():
+
+def main(): # noqa: D103
# KEEP THIS FOR NOW--MAYBE BACKEND WILL BE UPDATED ACCEPT DIFFERENT ARGUMENTS...
# parser = argparse.ArgumentParser(description='Generate workflow driver based on input configuration')
# parser.add_argument('--mainWorkDir', '-m', required=True, help="Main work directory")
@@ -16,8 +15,8 @@ def main():
# parser.add_argument('--runType', '-r', required=True, help="Type of run")
# parser.add_argument('--inputFile', '-i', required=True, help="Input JSON file with configuration from UI")
# Options for run type
- runTypeOptions=["runningLocal", "runningRemote"]
-
+ runTypeOptions = ['runningLocal', 'runningRemote'] # noqa: N806
+
# args = parser.parse_args()
# workDirMain = args.mainWorkDir
@@ -27,45 +26,55 @@ def main():
parser = argparse.ArgumentParser()
- parser.add_argument("--workflowInput")
- parser.add_argument("--workflowOutput")
- parser.add_argument("--driverFile")
- parser.add_argument("--runType")
+ parser.add_argument('--workflowInput')
+ parser.add_argument('--workflowOutput')
+ parser.add_argument('--driverFile')
+ parser.add_argument('--runType')
- args, unknowns = parser.parse_known_args()
+ args, unknowns = parser.parse_known_args() # noqa: F841
- inputFile = args.workflowInput
- runType = args.runType
- workflowDriver = args.driverFile
- outputFile = args.workflowOutput
+ inputFile = args.workflowInput # noqa: N806
+ runType = args.runType # noqa: N806
+ workflowDriver = args.driverFile # noqa: N806, F841
+ outputFile = args.workflowOutput # noqa: N806, F841
+
+ cwd = os.getcwd() # noqa: PTH109
+ workDirTemp = cwd # noqa: N806
- cwd = os.getcwd()
- workDirTemp = cwd
-
if runType not in runTypeOptions:
- raise ValueError("ERROR: Input run type has to be either local or remote")
-
+ raise ValueError('ERROR: Input run type has to be either local or remote') # noqa: EM101, TRY003
+
# change workdir to the templatedir
# os.chdir(workDirTemp)
# cwd = os.getcwd()
-
+
# Open input file
- inputdata = {}
- with open(inputFile) as data_file:
- inputData = json.load(data_file)
-
- applicationsData = inputData["Applications"]
+ inputdata = {} # noqa: F841
+ with open(inputFile) as data_file: # noqa: PLW1514, PTH123
+ inputData = json.load(data_file) # noqa: N806
+
+ applicationsData = inputData['Applications'] # noqa: N806
# Get data to pass to UQ driver
- uqData = inputData["UQ"]
- simulationData = applicationsData["FEM"]
- randomVarsData = inputData["randomVariables"]
- demandParams = inputData["EDP"]
- localAppDir = inputData["localAppDir"]
- remoteAppDir = inputData["remoteAppDir"]
+ uqData = inputData['UQ'] # noqa: N806
+ simulationData = applicationsData['FEM'] # noqa: N806
+ randomVarsData = inputData['randomVariables'] # noqa: N806
+ demandParams = inputData['EDP'] # noqa: N806
+ localAppDir = inputData['localAppDir'] # noqa: N806
+ remoteAppDir = inputData['remoteAppDir'] # noqa: N806
# Run UQ based on data and selected UQ engine--if you need to preprocess files with custom delimiters, use preprocessUQ.py
- configureAndRunUQ(uqData, simulationData, randomVarsData, demandParams, workDirTemp, runType, localAppDir, remoteAppDir)
+ configureAndRunUQ(
+ uqData,
+ simulationData,
+ randomVarsData,
+ demandParams,
+ workDirTemp,
+ runType,
+ localAppDir,
+ remoteAppDir,
+ )
+
if __name__ == '__main__':
main()
diff --git a/modules/performUQ/other/runWorkflowDriver.py b/modules/performUQ/other/runWorkflowDriver.py
index d900eff49..1c8486df2 100644
--- a/modules/performUQ/other/runWorkflowDriver.py
+++ b/modules/performUQ/other/runWorkflowDriver.py
@@ -1,22 +1,31 @@
-import os
-import fire
-from sys import platform
+import os # noqa: CPY001, D100, INP001
import shutil
+from sys import platform
-def runWorkflow(index):
+import fire
+
+
+def runWorkflow(index): # noqa: N802, D103
index = int(index)
- shutil.copy(os.path.join(os.getcwd(), 'InputFiles', 'params_' + str(index) + '.template'),
- os.path.join(os.getcwd(), 'params.in'))
+ shutil.copy(
+ os.path.join( # noqa: PTH118
+ os.getcwd(), # noqa: PTH109
+ 'InputFiles',
+ 'params_' + str(index) + '.template',
+ ),
+ os.path.join(os.getcwd(), 'params.in'), # noqa: PTH109, PTH118
+ )
- command2 = "blank"
- if platform == "linux" or platform == "linux2" or platform == "darwin":
- command2 = os.path.join(os.getcwd(), "driver")
- elif platform == "win32":
- command2 = os.path.join(os.getcwd(), "driver.bat")
+ command2 = 'blank'
+ if platform == 'linux' or platform == 'linux2' or platform == 'darwin':
+ command2 = os.path.join(os.getcwd(), 'driver') # noqa: PTH109, PTH118
+ elif platform == 'win32':
+ command2 = os.path.join(os.getcwd(), 'driver.bat') # noqa: PTH109, PTH118
# os.system(command1)
- os.system(command2)
+ os.system(command2) # noqa: S605
+
if __name__ == '__main__':
fire.Fire(runWorkflow)
diff --git a/modules/performUQ/other/uqRunner.py b/modules/performUQ/other/uqRunner.py
index cfee4ec16..e24353bc5 100644
--- a/modules/performUQ/other/uqRunner.py
+++ b/modules/performUQ/other/uqRunner.py
@@ -1,21 +1,22 @@
-# written: Michael Gardner
+# written: Michael Gardner # noqa: CPY001, D100, INP001
# DO NOT CHANGE THE FACTORY, JUST IMPORT IT INTO ADDITIONAL DERIVED CLASSES
# Polymorhophic factory for running UQ apps
-class UqRunnerFactory:
- factories = {}
- def addFactory(id, runnerFactory):
+class UqRunnerFactory: # noqa: D101
+ factories = {} # noqa: RUF012
+
+ @staticmethod
+ def addFactory(id, runnerFactory): # noqa: A002, D102, N802, N803
UqRunnerFactory.factories.put[id] = runnerFactory
- addFactory = staticmethod(addFactory)
+
# A Template Method:
- def createRunner(id):
+ @staticmethod
+ def createRunner(id): # noqa: A002, D102, N802
if id not in UqRunnerFactory.factories:
- UqRunnerFactory.factories[id] = \
- eval(id + '.Factory()')
+ UqRunnerFactory.factories[id] = eval(id + '.Factory()') # noqa: S307
return UqRunnerFactory.factories[id].create()
-
- createRunner = staticmethod(createRunner)
+
# Abstract base class
-class UqRunner(object):
+class UqRunner: # noqa: D101
pass
diff --git a/modules/performanceAssessment/REDi/REDiWrapper.py b/modules/performanceAssessment/REDi/REDiWrapper.py
index 4e9c9b334..9af341681 100644
--- a/modules/performanceAssessment/REDi/REDiWrapper.py
+++ b/modules/performanceAssessment/REDi/REDiWrapper.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2019 The Regents of the University of California
# Copyright (c) 2019 Leland Stanford Junior University
#
@@ -36,61 +35,68 @@
# Contributors:
# Stevan Gavrilovic
-import json, io, os, sys, time, math, argparse
+import argparse
+import io
+import json
+import math
+import os
+import sys
+import time
+import zipfile
+from io import StringIO
from pathlib import Path
-from typing import List, Dict, Any
+from typing import Any, Dict, List
+
import numpy as np
-from io import StringIO
-import zipfile
import pandas as pd
-
from REDi.go_redi import go_redi
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
main_dir = this_dir.parents[1]
sys.path.insert(0, str(main_dir / 'common'))
-from simcenter_common import get_scale_factors
+from simcenter_common import get_scale_factors # noqa: E402
-class NumpyEncoder(json.JSONEncoder) :
+
+class NumpyEncoder(json.JSONEncoder): # noqa: D101
# Encode the numpy datatypes to json
-
- def default(self, obj):
+
+ def default(self, obj): # noqa: D102
if isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj)
-def get_stats(arr : np.array) -> dict :
+def get_stats(arr: np.array) -> dict: # noqa: D103
# Returns a dictionary of summary stats from the array
if np.min(arr) > 0.0:
log_std = np.std(np.log(arr))
else:
- log_std = ""
-
+ log_std = ''
+
return {
- 'mean' : np.mean(arr),
- 'std' : np.std(arr),
- 'log_std' : log_std,
- 'count' : len(arr),
- 'min' : np.min(arr),
- 'max' : np.max(arr),
- '0.10%' : np.percentile(arr, 0.1),
- '2.3%' : np.percentile(arr, 2.3),
- '10%' : np.percentile(arr, 10),
- '15.9%' : np.percentile(arr, 15.9),
- '50%' : np.percentile(arr, 50),
- '84.1%' : np.percentile(arr, 84.1),
- '90%' : np.percentile(arr, 90),
- '97.7%' : np.percentile(arr, 97.7),
- '99.9%' : np.percentile(arr, 99.9)
+ 'mean': np.mean(arr),
+ 'std': np.std(arr),
+ 'log_std': log_std,
+ 'count': len(arr),
+ 'min': np.min(arr),
+ 'max': np.max(arr),
+ '0.10%': np.percentile(arr, 0.1),
+ '2.3%': np.percentile(arr, 2.3),
+ '10%': np.percentile(arr, 10),
+ '15.9%': np.percentile(arr, 15.9),
+ '50%': np.percentile(arr, 50),
+ '84.1%': np.percentile(arr, 84.1),
+ '90%': np.percentile(arr, 90),
+ '97.7%': np.percentile(arr, 97.7),
+ '99.9%': np.percentile(arr, 99.9),
}
-def clean_up_results(res : dict, keys_to_remove : List[str]) -> dict :
+def clean_up_results(res: dict, keys_to_remove: List[str]) -> dict: # noqa: FA100, D103
# Remove extra keys not needed here
-
+
for key in keys_to_remove:
if key in res:
del res[key]
@@ -98,140 +104,148 @@ def clean_up_results(res : dict, keys_to_remove : List[str]) -> dict :
return res
-def clean_up_nistr(nistr : str) -> str :
+def clean_up_nistr(nistr: str) -> str: # noqa: D103
# helper function to convert from Pelicun tag to REDi tag
-
+
indices_to_remove = [1, 4]
for index in sorted(indices_to_remove, reverse=True):
- nistr = nistr[:index] + nistr[index+1:]
+ nistr = nistr[:index] + nistr[index + 1 :]
return nistr
-def get_replacement_response(replacement_time : float) :
-
+def get_replacement_response(replacement_time: float): # noqa: D103
return {
- 'repair_class': 'replacement',
- 'damage_by_component_all_DS': None,
- 'repair_schedule': 'replacement',
- 'component_qty': None,
- 'consequence_by_component_by_floor': None,
- 'impeding_delays': None,
- 'max_delay': 0.0,
- 'building_total_downtime': [replacement_time, replacement_time, replacement_time]
- }
-
-
-def get_first_value(val : dict, num_levels : int) -> int :
+ 'repair_class': 'replacement',
+ 'damage_by_component_all_DS': None,
+ 'repair_schedule': 'replacement',
+ 'component_qty': None,
+ 'consequence_by_component_by_floor': None,
+ 'impeding_delays': None,
+ 'max_delay': 0.0,
+ 'building_total_downtime': [
+ replacement_time,
+ replacement_time,
+ replacement_time,
+ ],
+ }
+
+
+def get_first_value(val: dict, num_levels: int) -> int: # noqa: D103
# Get the number of samples that pelicun returns
-
- next_val =next(iter(val.items()))[1]
+
+ next_val = next(iter(val.items()))[1]
if num_levels > 0:
- return get_first_value(val=next_val,num_levels=num_levels-1)
- else :
+ return get_first_value(val=next_val, num_levels=num_levels - 1)
+ else: # noqa: RET505
return next_val
-def main(args):
-
- print("***Running REDi Seismic Downtime engine***\n")
+def main(args): # noqa: C901, D103, PLR0914, PLR0915
+ print('***Running REDi Seismic Downtime engine***\n') # noqa: T201
pelicun_results_dir = Path(args.dirnameOutput)
- redi_input_dir = pelicun_results_dir/'REDi_input'
- redi_output_dir = pelicun_results_dir/'REDi_output'
-
-
+ redi_input_dir = pelicun_results_dir / 'REDi_input'
+ redi_output_dir = pelicun_results_dir / 'REDi_output'
+
# Create the directory if it doesn't exist
if not redi_input_dir.exists():
redi_input_dir.mkdir(parents=True)
-
-
+
# Create the directory if it doesn't exist
if not redi_output_dir.exists():
- redi_output_dir.mkdir(parents=True)
-
-
+ redi_output_dir.mkdir(parents=True)
+
# dictionary to hold the base input parameter that do not change with every pelicun iteration
- rediInputDict = dict()
+ rediInputDict = dict() # noqa: C408, N806
# load the risk parameters
- pathRiskParams = Path(args.riskParametersPath)
- with open(pathRiskParams, encoding="utf-8") as f:
+ pathRiskParams = Path(args.riskParametersPath) # noqa: N806
+ with open(pathRiskParams, encoding='utf-8') as f: # noqa: PTH123
risk_param_dict = json.load(f)
- rediInputDict['risk_parameters']=risk_param_dict
+ rediInputDict['risk_parameters'] = risk_param_dict
# import SimCenter's AIM.json file
- pathAim = pelicun_results_dir/'AIM.json'
- with open(pathAim, encoding="utf-8") as f:
- AIM = json.load(f)
+ pathAim = pelicun_results_dir / 'AIM.json' # noqa: N806
+ with open(pathAim, encoding='utf-8') as f: # noqa: PTH123
+ AIM = json.load(f) # noqa: N806
# Get the CMP_sample json from Pelicun
- pathComponent = pelicun_results_dir/'CMP_sample.json'
- with open(pathComponent, encoding="utf-8") as f:
- CMP = json.load(f)
+ pathComponent = pelicun_results_dir / 'CMP_sample.json' # noqa: N806
+ with open(pathComponent, encoding='utf-8') as f: # noqa: PTH123
+ CMP = json.load(f) # noqa: N806
# remove Units information - for now
- if "Units" in CMP:
+ if 'Units' in CMP:
del CMP['Units']
# Get the DMG_sample json from Pelicun
- pathComponentDmg = pelicun_results_dir/'DMG_sample.json'
- with open(pathComponentDmg, encoding="utf-8") as f:
- CMP_DMG = json.load(f)
+ pathComponentDmg = pelicun_results_dir / 'DMG_sample.json' # noqa: N806
+ with open(pathComponentDmg, encoding='utf-8') as f: # noqa: PTH123
+ CMP_DMG = json.load(f) # noqa: N806
# remove Units information - for now
- if "Units" in CMP_DMG:
+ if 'Units' in CMP_DMG:
del CMP_DMG['Units']
# Get the DV_repair_sample json from Pelicun
- pathComponentDV = pelicun_results_dir/'DV_repair_sample.json'
- with open(pathComponentDV, encoding="utf-8") as f:
- CMP_DV = json.load(f)
+ pathComponentDV = pelicun_results_dir / 'DV_repair_sample.json' # noqa: N806
+ with open(pathComponentDV, encoding='utf-8') as f: # noqa: PTH123
+ CMP_DV = json.load(f) # noqa: N806
# remove Units information - for now
- if "Units" in CMP_DV:
+ if 'Units' in CMP_DV:
del CMP_DV['Units']
# Load the csv version of the decision vars
- with zipfile.ZipFile(pelicun_results_dir/'DV_repair_sample.zip', 'r') as zip_ref:
+ with zipfile.ZipFile( # noqa: SIM117
+ pelicun_results_dir / 'DV_repair_sample.zip', 'r'
+ ) as zip_ref:
# Read the CSV file inside the zip file into memory
with zip_ref.open('DV_repair_sample.csv') as csv_file:
# Load the CSV data into a pandas DataFrame
- data = pd.read_csv(io.TextIOWrapper(csv_file, encoding='utf-8'), index_col=0)
+ data = pd.read_csv(
+ io.TextIOWrapper(csv_file, encoding='utf-8'), index_col=0
+ )
# Drop Units row for now to avoid breaking the code - would be good to use this info in the future
- data = data.drop("Units").astype(float)
-
+ data = data.drop('Units').astype(float)
+
# Get the number of samples
num_samples = data.shape[0]
# Define a list of keywords to search for in column names
keywords = ['replacement-collapse', 'replacement-irreparable']
- DVs = ['Cost','Time']
-
- DVReplacementDict = {}
- for DV in DVs :
- columns_to_check = [col for col in data.columns if any(f'{DV}-{keyword}' in col for keyword in keywords)]
+ DVs = ['Cost', 'Time'] # noqa: N806
+
+ DVReplacementDict = {} # noqa: N806
+ for DV in DVs: # noqa: N806
+ columns_to_check = [
+ col
+ for col in data.columns
+ if any(f'{DV}-{keyword}' in col for keyword in keywords)
+ ]
# Create a boolean vector indicating whether non-zero values are present in each column
result_vector = data[columns_to_check].apply(max, axis=1)
DVReplacementDict[DV] = result_vector
-
# Find columns containing replace or collapse keywords
- buildingirreparableOrCollapsed = (data[columns_to_check] != 0).any(axis=1)
+ buildingirreparableOrCollapsed = (data[columns_to_check] != 0).any(axis=1) # noqa: N806
sum_collapsed_buildings = sum(buildingirreparableOrCollapsed)
-
- print(f"There are {sum_collapsed_buildings} collapsed or irreparable buildings from Pelicun")
+
+ print( # noqa: T201
+ f'There are {sum_collapsed_buildings} collapsed or irreparable buildings from Pelicun'
+ )
# Get some general information
gen_info = AIM['DL']['Asset']
- nStories = int(gen_info['NumberOfStories'])
+ nStories = int(gen_info['NumberOfStories']) # noqa: N806
rediInputDict['nFloor'] = nStories
# Get the plan area
@@ -239,13 +253,13 @@ def main(args):
# Get the units
input_units = {'length': AIM['GeneralInformation']['units']['length']}
- output_units = {'length':'ft'}
-
+ output_units = {'length': 'ft'}
+
# scale the input data to the length unit used internally
f_scale_units = get_scale_factors(input_units, output_units)['length']
# Scale the plan area
- plan_area = plan_area*f_scale_units*f_scale_units
+ plan_area = plan_area * f_scale_units * f_scale_units
floor_areas = [plan_area for i in range(nStories + 1)]
rediInputDict['floor_areas'] = floor_areas
@@ -255,36 +269,38 @@ def main(args):
# Estimate the number of workers
# PACT provides a default setting of 0.001 which corresponds to one worker per 1000 square feet of floor area. Users should generally execute their assessment with this default value,
- num_workers = max(int(total_building_area/1000), 1)
-
+ num_workers = max(int(total_building_area / 1000), 1)
+
# Get the replacement cost and time
- DL_info = AIM['DL']['Losses']['Repair']
-
+ DL_info = AIM['DL']['Losses']['Repair'] # noqa: N806
+
# Note these are not the random
- replacementCost = DL_info['ReplacementCost']['Median']
- rediInputDict['replacement_cost'] = float(replacementCost)/1e6 #Needs to be in the millions of dollars
+ replacementCost = DL_info['ReplacementCost']['Median'] # noqa: N806
+ rediInputDict['replacement_cost'] = (
+ float(replacementCost) / 1e6
+ ) # Needs to be in the millions of dollars
- replacementTime = float(DL_info['ReplacementTime']['Median'])
+ replacementTime = float(DL_info['ReplacementTime']['Median']) # noqa: N806
# convert replacement time to days from worker_days
- replacementTime = replacementTime / num_workers
+ replacementTime = replacementTime / num_workers # noqa: N806, PLR6104
rediInputDict['replacement_time'] = replacementTime
- final_results_dict = dict()
- log_output : List[str] = []
-
- for sample in range(num_samples) :
+ final_results_dict = dict() # noqa: C408
+ log_output: List[str] = [] # noqa: FA100
- if buildingirreparableOrCollapsed[sample] :
-
+ for sample in range(num_samples): # noqa: PLR1702
+ if buildingirreparableOrCollapsed[sample]:
# Convert the replacement time coming out of Pelicun (worker-days) into days by dividing by the number of workers
replacement_time = DVReplacementDict['Time'][sample] / num_workers
-
- final_results_dict[sample] = get_replacement_response(replacement_time=replacement_time)
- continue
-
- ### REDi input map ###
+
+ final_results_dict[sample] = get_replacement_response(
+ replacement_time=replacement_time
+ )
+ continue
+
+ # REDi input map ###
# Assemble the component quantity vector
# components object is a list of lists where each item is a list of component on a particular floor.
# The components are a dictionary containing the component tag (NISTR) and an array of quantities (Qty) in each direction, i.e., [dir_1, dir_2]
@@ -294,192 +310,193 @@ def main(args):
# ...,
# {'NISTR' : nistr_id_n,
# 'Qty' : [dir_1, dir_2]}]
- components : List[List[Dict[str,Any]]]= [[] for i in range(nStories + 1)]
+ components: List[List[Dict[str, Any]]] = [[] for i in range(nStories + 1)] # noqa: FA100
- ### Pelicun output map ###
+ # Pelicun output map ###
# "B1033.061b": { <- component nistr
# "4": { <- floor
# "1": [ <- direction
- CMP = clean_up_results(res=CMP, keys_to_remove = ["collapse", "excessiveRID", "irreparable"])
- for nistr, floors in CMP.items() :
+ CMP = clean_up_results( # noqa: N806
+ res=CMP, keys_to_remove=['collapse', 'excessiveRID', 'irreparable']
+ )
+ for nistr, floors in CMP.items():
+ nistr = clean_up_nistr(nistr=nistr) # noqa: PLW2901
- nistr = clean_up_nistr(nistr=nistr)
-
- for floor, dirs in floors.items() :
-
- floor = int(floor)
+ for floor, dirs in floors.items():
+ floor = int(floor) # noqa: PLW2901
dir_1 = 0.0
dir_2 = 0.0
# If no directionality, i.e., direction is 0, divide the components evenly in the two directions
- if '0' in dirs :
+ if '0' in dirs:
qnty = float(dirs['0'][sample])
dir_1 = 0.5 * qnty
dir_2 = 0.5 * qnty
- elif '1' in dirs or '2' in dirs :
-
- if '1' in dirs :
+ elif '1' in dirs or '2' in dirs:
+ if '1' in dirs:
dir_1 = float(dirs['1'][sample])
- if '2' in dirs :
+ if '2' in dirs:
dir_2 = float(dirs['2'][sample])
- else :
- raise ValueError('Could not parse the directionality in the Pelicun output.')
+ else:
+ raise ValueError( # noqa: TRY003
+ 'Could not parse the directionality in the Pelicun output.' # noqa: EM101
+ )
- cmp_dict = {
- 'NISTR' : nistr,
- 'Qty' : [dir_1, dir_2]
- }
- components[floor-1].append(cmp_dict)
+ cmp_dict = {'NISTR': nistr, 'Qty': [dir_1, dir_2]}
+ components[floor - 1].append(cmp_dict)
-
- ### REDi input map ###
+ # REDi input map ###
# total_consequences = dict()
# Assemble the component damage vector
# component_damage object is a dictionary where each key is a component tag (NISTR) and the values is a list of a list.
# The highest level, outer list is associated with the number of damage states while the inner list corresponds to the number of floors
# [ds_1, ds_2, ..., ds_n]
# where ds_n = [num_dmg_units_floor_1, num_dmg_units_floor_2, ..., num_dmg_units_floor_n]
- component_damage : Dict[str,List[List[float]]] = {}
+ component_damage: Dict[str, List[List[float]]] = {} # noqa: FA100
- ### Pelicun output map ###
+ # Pelicun output map ###
# "B1033.061b": { <- component nistr
# "4": { <- floor
# "1": { <- direction
# "0": [ <- damage state -> Note that zero.. means undamaged
- CMP_DMG = clean_up_results(res=CMP_DMG, keys_to_remove = ["collapse", "excessiveRID", "irreparable"])
+ CMP_DMG = clean_up_results( # noqa: N806
+ res=CMP_DMG, keys_to_remove=['collapse', 'excessiveRID', 'irreparable']
+ )
collapse_flag = False
- for nistr, floors in CMP_DMG.items() :
-
- nistr = clean_up_nistr(nistr=nistr)
+ for nistr, floors in CMP_DMG.items():
+ nistr = clean_up_nistr(nistr=nistr) # noqa: PLW2901
# Get the number of damage states
- num_ds = len(get_first_value(val=floors,num_levels=1))
+ num_ds = len(get_first_value(val=floors, num_levels=1))
floor_qtys = [0.0 for i in range(nStories + 1)]
ds_qtys = [floor_qtys for i in range(num_ds)]
-
- for floor, dirs in floors.items() :
-
- floor = int(floor)
-
- for dir, dir_qty in dirs.items() :
- for ds, qtys in dir_qty.items() :
+ for floor, dirs in floors.items():
+ floor = int(floor) # noqa: PLW2901
- ds = int(ds)
+ for dir, dir_qty in dirs.items(): # noqa: B007, A001
+ for ds, qtys in dir_qty.items():
+ ds = int(ds) # noqa: PLW2901
qty = float(qtys[sample])
- if math.isnan(qty) :
- log_output.append(f'Collapse detected sample {sample}. Skipping REDi run.\n')
+ if math.isnan(qty):
+ log_output.append(
+ f'Collapse detected sample {sample}. Skipping REDi run.\n'
+ )
collapse_flag = True
break
# Sum up the damage states
- ds_qtys[ds][floor-1] += qty
+ ds_qtys[ds][floor - 1] += qty
- if collapse_flag :
+ if collapse_flag:
break
- if collapse_flag :
+ if collapse_flag:
break
- if collapse_flag :
+ if collapse_flag:
break
component_damage[nistr] = ds_qtys
# total_consequences[nistr] = component_damage
- if collapse_flag :
+ if collapse_flag:
continue
# Assemble the component decision variable vector
cost_dict = CMP_DV['Cost']
- cost_dict = clean_up_results(res=cost_dict, keys_to_remove = ["replacement"])
+ cost_dict = clean_up_results(res=cost_dict, keys_to_remove=['replacement'])
time_dict = CMP_DV['Time']
- time_dict = clean_up_results(res=time_dict, keys_to_remove = ["replacement"])
+ time_dict = clean_up_results(res=time_dict, keys_to_remove=['replacement'])
- ### REDi input map ###
+ # REDi input map ###
# Total_consequences is a list of lists of lists.
# The highest-level list (always length 4) corresponds to the 4 types of consequences at the component level: (1) repair cost [dollars], (2) repair time [worker days], (3) injuries, (4) fatalities.
# The second level list contains the number of stories, so a list with length 5 will be a 4-story building with a roof.
# The third-level list is based on the number of damage states (not including Damage State 0).
- total_consequences : Dict[str,List[List[float]]] = {}
+ total_consequences: Dict[str, List[List[float]]] = {} # noqa: FA100
- ### Pelicun output map ###
+ # Pelicun output map ###
# "COST": { <- cost/time key
# "B1033.061b": { <- component nistr *special case - this one to evaluate consequences (depends on occupancy type). Note that Component name will match in FEMA P-58 analysis (this case)
# "B1033.061b": { <- component nistr *special case - this one tells you about damage (depends on perhaps location in building or something else). Note that Component name will match in FEMA P-58 analysis (this case)
# "1": { <- damage state
# "4": { <- floor
# "1": [ <- direction
- for nistr in cost_dict.keys() :
-
+ for nistr in cost_dict.keys(): # noqa: SIM118
# Handle the case of the nested nistr which will be the same for FEMA P-58
cost_res = cost_dict[nistr][nistr]
time_res = time_dict[nistr][nistr]
num_ds = len(cost_res)
- ds_list = np.array([ 0.0 for i in range(num_ds)])
- floor_list = np.array([ ds_list for i in range(nStories+1)])
+ ds_list = np.array([0.0 for i in range(num_ds)])
+ floor_list = np.array([ds_list for i in range(nStories + 1)])
cost_floor_list = floor_list.copy()
time_floor_list = floor_list.copy()
- for ds in cost_res.keys() :
-
+ for ds in cost_res.keys(): # noqa: SIM118
cost_floor_dict = cost_res[ds]
time_floor_dict = time_res[ds]
- ds = int(ds)
+ ds = int(ds) # noqa: PLW2901
- for floor in cost_floor_dict.keys() :
-
+ for floor in cost_floor_dict.keys(): # noqa: SIM118
cost_dirs_dict = cost_floor_dict[floor]
time_dirs_dict = time_floor_dict[floor]
- floor = int(floor)
+ floor = int(floor) # noqa: PLW2901
- total_cost=0.0
- total_time=0.0
- for dir in cost_dirs_dict.keys() :
+ total_cost = 0.0
+ total_time = 0.0
+ for dir in cost_dirs_dict.keys(): # noqa: A001, SIM118
total_cost += float(cost_dirs_dict[dir][sample])
total_time += float(time_dirs_dict[dir][sample])
- cost_floor_list[floor-1][ds-1] = total_cost
- time_floor_list[floor-1][ds-1] = total_time
+ cost_floor_list[floor - 1][ds - 1] = total_cost
+ time_floor_list[floor - 1][ds - 1] = total_time
- nistr = clean_up_nistr(nistr=nistr)
+ nistr = clean_up_nistr(nistr=nistr) # noqa: PLW2901
# Last two items are empty because pelicun does not return injuries and fatalities.
- total_consequences[nistr] = [cost_floor_list,time_floor_list,floor_list,floor_list]
+ total_consequences[nistr] = [
+ cost_floor_list,
+ time_floor_list,
+ floor_list,
+ floor_list,
+ ]
# Save the building input file
this_it_input = rediInputDict
- this_it_input['components']=components
- this_it_input['component_damage']=component_damage
- this_it_input['total_consequences']=total_consequences
+ this_it_input['components'] = components
+ this_it_input['component_damage'] = component_damage
+ this_it_input['total_consequences'] = total_consequences
rediInputDict['_id'] = f'SimCenter_{sample}'
# Save the dictionary to a JSON file
- with open(redi_input_dir/f'redi_{sample}.json', 'w', encoding="utf-8") as f:
+ with open( # noqa: PTH123
+ redi_input_dir / f'redi_{sample}.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(this_it_input, f, indent=4, cls=NumpyEncoder)
# Create a StringIO object to capture the stdout
captured_output = StringIO()
-
+
# Redirect sys.stdout to the captured_output stream
sys.stdout = captured_output
-
+
try:
res = go_redi(building_dict=this_it_input)
finally:
@@ -493,20 +510,20 @@ def main(args):
final_results_dict[sample] = res
-
# Create a high-level json with detailed results
- print(f'Saving all samples to: {redi_output_dir}/redi_results_all_samples.json')
- with open(redi_output_dir/f'redi_results_all_samples.json', 'w', encoding="utf-8") as f:
+ print(f'Saving all samples to: {redi_output_dir}/redi_results_all_samples.json') # noqa: T201
+ with open( # noqa: PTH123
+ redi_output_dir / 'redi_results_all_samples.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(final_results_dict, f, cls=NumpyEncoder)
# Create a smaller summary stats json for recovery time and max delay
- dt_all_samples = [[]for i in range(3)]
+ dt_all_samples = [[] for i in range(3)]
max_delay_list = []
- for sample, res in final_results_dict.items() :
-
+ for sample, res in final_results_dict.items(): # noqa: B007
total_downtime = res['building_total_downtime']
# full recovery - functional recovery - immediate occupancy
- for i in range(3) :
+ for i in range(3):
dt_all_samples[i].append(total_downtime[i])
max_delay_list.append(res['max_delay'])
@@ -516,51 +533,68 @@ def main(args):
functional_recovery_list = np.array(dt_all_samples[1])
immediate_occupancy_list = np.array(dt_all_samples[2])
- summary_stats = {"Max delay" : get_stats(max_delay_list),
- "Full Recovery" : get_stats(full_recovery_list),
- "Functional Recovery" : get_stats(functional_recovery_list),
- "Immediate Occupancy" : get_stats(immediate_occupancy_list)
- }
+ summary_stats = {
+ 'Max delay': get_stats(max_delay_list),
+ 'Full Recovery': get_stats(full_recovery_list),
+ 'Functional Recovery': get_stats(functional_recovery_list),
+ 'Immediate Occupancy': get_stats(immediate_occupancy_list),
+ }
- print(f'Saving all samples to: {redi_output_dir}/redi_summary_stats.json')
- with open(redi_output_dir/f'redi_summary_stats.json', 'w', encoding="utf-8") as f:
+ print(f'Saving all samples to: {redi_output_dir}/redi_summary_stats.json') # noqa: T201
+ with open( # noqa: PTH123
+ redi_output_dir / 'redi_summary_stats.json', 'w', encoding='utf-8'
+ ) as f:
json.dump(summary_stats, f, indent=4, cls=NumpyEncoder)
# Write the log file
- print(f'Saving REDi log file at: {redi_output_dir}/redi_log.txt')
- with open(redi_output_dir/f'redi_log.txt', 'w', encoding="utf-8") as file:
+ print(f'Saving REDi log file at: {redi_output_dir}/redi_log.txt') # noqa: T201
+ with open(redi_output_dir / 'redi_log.txt', 'w', encoding='utf-8') as file: # noqa: PTH123
# Iterate through the list of strings and write each one to the file
for string in log_output:
file.write(string + '\n')
-
-if __name__ == "__main__":
-
-
- parser = argparse.ArgumentParser(description='REDi-Pelicun Python Package Wrapper')
- parser.add_argument('-w','--dirnameOutput', type=str, default=None, help='Path to the working directory containing the Pelicun results [str]')
- parser.add_argument('-r','--riskParametersPath', type=str, default=None, help='Path to the risk parameters json file [str]')
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='REDi-Pelicun Python Package Wrapper'
+ )
+ parser.add_argument(
+ '-w',
+ '--dirnameOutput',
+ type=str,
+ default=None,
+ help='Path to the working directory containing the Pelicun results [str]',
+ )
+ parser.add_argument(
+ '-r',
+ '--riskParametersPath',
+ type=str,
+ default=None,
+ help='Path to the risk parameters json file [str]',
+ )
args = parser.parse_args()
-
-
+
# Check for the required arguments
- if not args.dirnameOutput :
- print("Path to the working directory containing the Pelicun results is required")
- exit()
- else :
- if not Path(args.dirnameOutput).exists() :
- print(f"Provided path to the working directory {args.dirnameOutput} does not exist")
- exit()
-
- if not args.riskParametersPath :
- print("Path to the risk parameters JSON file is required")
- exit()
- else :
- if not Path(args.riskParametersPath).exists() :
- print(f"Provided path to the risk parameters JSON file {args.riskParametersPath} does not exist")
- exit()
+ if not args.dirnameOutput:
+ print( # noqa: T201
+ 'Path to the working directory containing the Pelicun results is required'
+ )
+ exit() # noqa: PLR1722
+ elif not Path(args.dirnameOutput).exists():
+ print( # noqa: T201
+ f'Provided path to the working directory {args.dirnameOutput} does not exist'
+ )
+ exit() # noqa: PLR1722
+
+ if not args.riskParametersPath:
+ print('Path to the risk parameters JSON file is required') # noqa: T201
+ exit() # noqa: PLR1722
+ elif not Path(args.riskParametersPath).exists():
+ print( # noqa: T201
+ f'Provided path to the risk parameters JSON file {args.riskParametersPath} does not exist'
+ )
+ exit() # noqa: PLR1722
start_time = time.time()
@@ -568,7 +602,7 @@ def main(args):
end_time = time.time()
elapsed_time = end_time - start_time
- print(f"REDi finished. Elapsed time: {elapsed_time:.2f} seconds")
+ print(f'REDi finished. Elapsed time: {elapsed_time:.2f} seconds') # noqa: T201
-"/opt/homebrew/anaconda3/envs/simcenter/bin/python" "/Users/stevan.gavrilovic/Desktop/SimCenter/SimCenterBackendApplications/applications/performanceAssessment/REDi/REDiWrapper.py" "--riskParametersPath" "/Users/stevan.gavrilovic/Desktop/SimCenter/build-PBE-Qt_6_5_1_for_macOS-Debug/PBE.app/Contents/MacOS/Examples/pbdl-0003/src/risk_params.json" "--dirnameOutput" "/Users/stevan.gavrilovic/Documents/PBE/LocalWorkDir/tmp.SimCenter"
+'/opt/homebrew/anaconda3/envs/simcenter/bin/python' '/Users/stevan.gavrilovic/Desktop/SimCenter/SimCenterBackendApplications/applications/performanceAssessment/REDi/REDiWrapper.py' '--riskParametersPath' '/Users/stevan.gavrilovic/Desktop/SimCenter/build-PBE-Qt_6_5_1_for_macOS-Debug/PBE.app/Contents/MacOS/Examples/pbdl-0003/src/risk_params.json' '--dirnameOutput' '/Users/stevan.gavrilovic/Documents/PBE/LocalWorkDir/tmp.SimCenter'
diff --git a/modules/systemPerformance/REWET/REWET/Damage.py b/modules/systemPerformance/REWET/REWET/Damage.py
index 9b9b1ba60..a7141352c 100644
--- a/modules/systemPerformance/REWET/REWET/Damage.py
+++ b/modules/systemPerformance/REWET/REWET/Damage.py
@@ -1,245 +1,308 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Mar 23 13:54:21 2020
-This module is responsble for calculating damage to t=different componenst of
-teh system, including pipe lines. pupmo and so.
+"""Created on Mon Mar 23 13:54:21 2020
+This module is responsible for calculating damage to t=different componenst of
+the system, including pipe lines. pupmo and so.
@author: snaeimi
-"""
-import wntrfr
-import pandas as pd
-from scipy.stats import lognorm
+""" # noqa: CPY001, D205, D400, N999
+
import logging
-import pickle
-import random
+import math
+import pickle # noqa: S403
+
import numpy as np
+import pandas as pd
+import wntrfr
+from EnhancedWNTR.morph.link import break_pipe, split_pipe
+from scipy.stats import lognorm
from wntrfr.network.model import LinkStatus
-from EnhancedWNTR.morph.link import split_pipe, break_pipe
logger = logging.getLogger(__name__)
-class EarthquakeScenario():
- def __init__(self, magnitude , depth, x_coord , y_coord , eq_time):
- self.M=abs(magnitude)
- self.depth=abs(depth)
- self.coordinate={}
- self.coordinate['X']=x_coord
- self.coordinate['Y']=y_coord
- self.time=abs(eq_time)
-
- def getWNTREarthquakeObject(self):
- return wntrfr.scenario.Earthquake((self.coordinate['X'] , self.coordinate['Y']), self.M, self.depth)
-
-class Damage:
+
+class EarthquakeScenario: # noqa: D101
+ def __init__(self, magnitude, depth, x_coord, y_coord, eq_time):
+ self.M = abs(magnitude)
+ self.depth = abs(depth)
+ self.coordinate = {}
+ self.coordinate['X'] = x_coord
+ self.coordinate['Y'] = y_coord
+ self.time = abs(eq_time)
+
+ def getWNTREarthquakeObject(self): # noqa: N802, D102
+ return wntrfr.scenario.Earthquake(
+ (self.coordinate['X'], self.coordinate['Y']), self.M, self.depth
+ )
+
+
+class Damage: # noqa: D101, PLR0904
def __init__(self, registry, scenario_set):
- self.scenario_set = scenario_set
- self.pipe_leak = pd.Series(dtype="O")
- self.pipe_break = pd.Series(dtype="O")
+ self.scenario_set = scenario_set
+ self.pipe_leak = pd.Series(dtype='O')
+ self.pipe_break = pd.Series(dtype='O')
self.pipe_all_damages = None
- self.tank_damage = pd.Series(dtype="O")
- self.node_damage = pd.Series(dtype="O")
- #self._earthquake = pd.Series(dtype="O")
- self._registry = registry
- self.default_time = 4
- #if damageEndTime==None:
- self.end_time = 100
- #else:
- #self.end_time=damageEndTime
+ self.tank_damage = pd.Series(dtype='O')
+ self.node_damage = pd.Series(dtype='O')
+ # self._earthquake = pd.Series(dtype="O")
+ self._registry = registry
+ self.default_time = 4
+ # if damageEndTime==None:
+ self.end_time = 100
+ # else:
+ # self.end_time=damageEndTime
self.is_timely_sorted = False
-
+
self._pipe_last_ratio = pd.Series(dtype='float64')
- self.damaged_pumps = pd.Series(dtype='float64')
+ self.damaged_pumps = pd.Series(dtype='float64')
self.nodal_equavalant_diameter = None
-
- #self._nodal_damage_method = None
- self._pipe_damage_method = 1
-
-
- def readDamageFromPickleFile(self, pickle_file_name, csv_file_name, csv_index = None ):
- """
- This function is only for teh sake of reading picke file that nafiseg gives to me
+
+ # self._nodal_damage_method = None
+ self._pipe_damage_method = 1
+
+ def readDamageFromPickleFile( # noqa: N802
+ self,
+ pickle_file_name,
+ csv_file_name,
+ csv_index=None,
+ ):
+ """This function is only for the sake of reading picke file that nafiseg gives to me
This function shall not be in any distribution that we release
Parameters
----------
pickle_file_name : string
- name file ot path + name file of the pickle file
- csv_file_name : name file ot path + name file of the csv file
+ name file of path + name file of the pickle file
+ csv_file_name : name file of path + name file of the csv file
Returns
-------
- """
- with open(pickle_file_name, 'rb') as pckf:
- w = pickle.load(pckf)
-
- name_list = pd.read_csv(csv_file_name, index_col = csv_index)
+ """ # noqa: D205, D400, D401, D404, D414
+ with open(pickle_file_name, 'rb') as pckf: # noqa: PTH123
+ w = pickle.load(pckf) # noqa: S301
+
+ name_list = pd.read_csv(csv_file_name, index_col=csv_index)
damage_name_list = []
- damage_mat_list = []
-
+ damage_mat_list = []
+
for ind, val in w.items():
- if ind[4] == 0 and val== 1:
+ if ind[4] == 0 and val == 1:
refr = ind[2]
damage_name_list.append(name_list.index[refr])
damage_mat_list.append(name_list['material'][refr])
-
- damage_list = pd.DataFrame()
- damage_list['name'] = damage_name_list
- damage_list['material'] = damage_mat_list
- damage_state_probability = {'STL':[0.2,0.8], 'CI':[0.2,0.8], 'DI':[0,1], 'CON':[0.2,0.8], 'RS':[0.2,0.8]}
- damage_list = self.sampleDamageStatesBasedOnMaterialFragility(damage_list, damage_state_probability)
- #return damage_list
+
+ damage_list = pd.DataFrame()
+ damage_list['name'] = damage_name_list
+ damage_list['material'] = damage_mat_list
+ damage_state_probability = {
+ 'STL': [0.2, 0.8],
+ 'CI': [0.2, 0.8],
+ 'DI': [0, 1],
+ 'CON': [0.2, 0.8],
+ 'RS': [0.2, 0.8],
+ }
+ damage_list = self.sampleDamageStatesBasedOnMaterialFragility(
+ damage_list, damage_state_probability
+ )
+ # return damage_list
self.addPipeDamageByDamageList(damage_list, 1, 0)
-
- #print(name_list)
-
- def readPumpDamage(self, file_name):
+
+ # print(name_list)
+
+ def readPumpDamage(self, file_name): # noqa: N802, D102
pump_list = pd.read_csv(file_name)
self.damaged_pumps = pump_list['Pump_ID']
-
- def readNodalDamage(self, file_address):
+
+ def readNodalDamage(self, file_address): # noqa: N802, D102
temp = pd.read_csv(file_address)
- for ind, val in temp.iterrows():
+ for ind, val in temp.iterrows(): # noqa: B007
temp_data = {}
- temp_data['node_name'] = str(val['NodeID'])
- temp_data['node_RR'] = val['RR']
- temp_data['node_Pre_EQ_Demand'] = val['Pre_EQ_Demand'] * 6.30901964/100000#*0.5
- temp_data['node_Post_EQ_Demand'] = val['Post_EQ_Demand'] * 6.30901964/100000#*0.5 # * 6.30901964/100000*(1+0.01*val['setNumDamages'])
- temp_data['node_Pipe_Length'] = val['PipeLength']
- temp_data['Number_of_damages'] = val['setNumDamages']
- temp_data['node_Customer'] = val['#Customer']
- temp_data['node_LargeUser'] = val['LargeUser']
-
+ temp_data['node_name'] = str(val['NodeID'])
+ temp_data['node_RR'] = val['RR']
+ temp_data['node_Pre_EQ_Demand'] = (
+ val['Pre_EQ_Demand'] * 6.30901964 / 100000
+ ) # *0.5
+ temp_data['node_Post_EQ_Demand'] = (
+ val['Post_EQ_Demand'] * 6.30901964 / 100000
+ ) # *0.5 # * 6.30901964/100000*(1+0.01*val['setNumDamages'])
+ temp_data['node_Pipe_Length'] = val['PipeLength']
+ temp_data['Number_of_damages'] = val['setNumDamages']
+ temp_data['node_Customer'] = val['#Customer']
+ temp_data['node_LargeUser'] = val['LargeUser']
+
self.node_damage = self.node_damage.append(pd.Series(data=[temp_data]))
-
- self.node_damage.reset_index(drop=True, inplace=True)
-
- def setNodalDamageModelParameter(self, damage_param):
+
+ self.node_damage.reset_index(drop=True, inplace=True) # noqa: PD002
+
+ def setNodalDamageModelParameter(self, damage_param): # noqa: N802, D102
self._registry.nodal_equavalant_diameter = damage_param
-
- def readDamageGiraffeFormat(self, break_file_name, leak_file_name):
+
+ def readDamageGiraffeFormat(self, break_file_name, leak_file_name): # noqa: N802, D102
break_temp = pd.read_csv(break_file_name)
leak_temp = pd.read_csv(leak_file_name)
-
- temp_break_pipe_ID = break_temp['PipeID']
- temp_leak_pipe_ID = leak_temp['PipeID']
-
+
+ temp_break_pipe_ID = break_temp['PipeID'] # noqa: N806
+ temp_leak_pipe_ID = leak_temp['PipeID'] # noqa: N806
+
if temp_break_pipe_ID.dtype != 'O':
- temp_break_pipe_ID = temp_break_pipe_ID.apply(lambda x: str(x))
+ temp_break_pipe_ID = temp_break_pipe_ID.apply(lambda x: str(x)) # noqa: N806, PLW0108
break_temp['PipeID'] = temp_break_pipe_ID
-
- if temp_leak_pipe_ID.dtype !='O':
- temp_leak_pipe_ID = temp_leak_pipe_ID.apply(lambda x: str(x))
+
+ if temp_leak_pipe_ID.dtype != 'O':
+ temp_leak_pipe_ID = temp_leak_pipe_ID.apply(lambda x: str(x)) # noqa: N806, PLW0108
leak_temp['PipeID'] = temp_leak_pipe_ID
-
-
- temp1 = break_temp[['PipeID','BreakRatio']]
- temp1._is_copy=None
- temp1['damage']='break'
- temp1.rename(columns={'BreakRatio':'ratio'}, inplace=True)
-
- temp2 = leak_temp[['PipeID','LeakRatio']]
- temp2._is_copy=None
- temp2.rename(columns={'LeakRatio':'ratio'}, inplace=True)
- temp2['damage']='leak'
-
- temp=pd.concat([temp1, temp2])
-
- temp=temp.sort_values(['PipeID','ratio'],ascending = (True, False))
-
- unique_pipe_ID=temp['PipeID'].unique().tolist()
-
- for pipe_ID in unique_pipe_ID:
- selected_damage=temp[temp['PipeID']==pipe_ID]
+
+ temp1 = break_temp[['PipeID', 'BreakRatio']]
+ temp1._is_copy = None # noqa: SLF001
+ temp1['damage'] = 'break'
+ temp1.rename(columns={'BreakRatio': 'ratio'}, inplace=True) # noqa: PD002
+
+ temp2 = leak_temp[['PipeID', 'LeakRatio']]
+ temp2._is_copy = None # noqa: SLF001
+ temp2.rename(columns={'LeakRatio': 'ratio'}, inplace=True) # noqa: PD002
+ temp2['damage'] = 'leak'
+
+ temp = pd.concat([temp1, temp2])
+
+ temp = temp.sort_values(['PipeID', 'ratio'], ascending=(True, False))
+
+ unique_pipe_ID = temp['PipeID'].unique().tolist() # noqa: N806
+
+ for pipe_ID in unique_pipe_ID: # noqa: N806
+ selected_damage = temp[temp['PipeID'] == pipe_ID]
if 'break' in selected_damage['damage'].tolist():
- number=len(selected_damage)
- tmp_break = {'pipe_id' : pipe_ID,'break_loc' : 0.5, 'break_time' : self.default_time, 'number':number}
- self.pipe_break=self.pipe_break.append(pd.Series(data=[tmp_break],index=[int(tmp_break['break_time']*3600)]))
-
+ number = len(selected_damage)
+ tmp_break = {
+ 'pipe_id': pipe_ID,
+ 'break_loc': 0.5,
+ 'break_time': self.default_time,
+ 'number': number,
+ }
+ self.pipe_break = self.pipe_break.append(
+ pd.Series(
+ data=[tmp_break], index=[int(tmp_break['break_time'] * 3600)]
+ )
+ )
+
else:
- number=len(selected_damage)
- temp_leak_D = pd.Series(data=selected_damage.index)
- temp_leak_D = temp_leak_D.apply(lambda x: leak_temp.loc[x,'LeakD'])
-
- leak_D=((temp_leak_D**2).sum())**0.5
- tmp_leak = {'pipe_id' : pipe_ID,'leak_loc' : 0.5, 'leakD':leak_D/100*2.54 , 'leak_type' : 1, 'leak_time' : self.default_time, 'number':number}
- self.pipe_leak=self.pipe_leak.append(pd.Series(data=[tmp_leak] , index=[int(tmp_leak['leak_time']*3600)]))
-
-
-
- def addPipeDamageByDamageList(self, damage_list, leak_type_ref, break_type_ref):
- #leaked_damage = damage_list[damage_list['damage_state']==leak_type_ref]
-
- for ind, row in damage_list.iterrows():
- if row['damage_state'] == 0: # break
- tmp_break = {'pipe_id' : row['name'],'break_loc' : 0.5, 'break_time' : self.default_time}
- self.pipe_break=self.pipe_break.append(pd.Series(data=[tmp_break],index=[int(tmp_break['break_time']*3600)]))
- elif row.damage_state == 1: # leak
- tmp_leak = {'pipe_id' : row['name'],'leak_loc' : 0.5, 'leak_type' : 1, 'leak_time' : self.default_time}
- self.pipe_leak=self.pipe_leak.append(pd.Series(data=[tmp_leak] , index=[int(tmp_leak['leak_time']*3600)]))
+ number = len(selected_damage)
+ temp_leak_D = pd.Series(data=selected_damage.index) # noqa: N806
+ temp_leak_D = temp_leak_D.apply(lambda x: leak_temp.loc[x, 'LeakD']) # noqa: N806
+
+ leak_D = ((temp_leak_D**2).sum()) ** 0.5 # noqa: N806
+ tmp_leak = {
+ 'pipe_id': pipe_ID,
+ 'leak_loc': 0.5,
+ 'leakD': leak_D / 100 * 2.54,
+ 'leak_type': 1,
+ 'leak_time': self.default_time,
+ 'number': number,
+ }
+ self.pipe_leak = self.pipe_leak.append(
+ pd.Series(
+ data=[tmp_leak], index=[int(tmp_leak['leak_time'] * 3600)]
+ )
+ )
+
+ def addPipeDamageByDamageList(self, damage_list, leak_type_ref, break_type_ref): # noqa: ARG002, N802, D102
+ # leaked_damage = damage_list[damage_list['damage_state']==leak_type_ref]
+
+ for ind, row in damage_list.iterrows(): # noqa: B007
+ if row['damage_state'] == 0: # break
+ tmp_break = {
+ 'pipe_id': row['name'],
+ 'break_loc': 0.5,
+ 'break_time': self.default_time,
+ }
+ self.pipe_break = self.pipe_break.append(
+ pd.Series(
+ data=[tmp_break], index=[int(tmp_break['break_time'] * 3600)]
+ )
+ )
+ elif row.damage_state == 1: # leak
+ tmp_leak = {
+ 'pipe_id': row['name'],
+ 'leak_loc': 0.5,
+ 'leak_type': 1,
+ 'leak_time': self.default_time,
+ }
+ self.pipe_leak = self.pipe_leak.append(
+ pd.Series(
+ data=[tmp_leak], index=[int(tmp_leak['leak_time'] * 3600)]
+ )
+ )
else:
- raise ValueError("There is an unknow damage type")
-
-
-
-
- def readDamageFromTextFile(self, path):
- """
- Reads a damage from scenario from a text file and add the information
+ raise ValueError('There is an unknown damage type') # noqa: EM101, TRY003
+
+ def readDamageFromTextFile(self, path): # noqa: N802
+ """Reads a damage from scenario from a text file and add the information
to the damage class object.
-
+
Parameters
----------
[path] : str
The input file name
- """
- if path==None:
- raise ValueError('None in path')
- file=open(path)
- lines=file.readlines()
- line_cnt=0;
+
+ """ # noqa: D205, D401
+ if path == None: # noqa: E711
+ raise ValueError('None in path') # noqa: DOC501, EM101, TRY003
+ file = open(path) # noqa: PLW1514, PTH123, SIM115
+ lines = file.readlines()
+ line_cnt = 0
for line in lines:
- line_cnt+=1
- sline=line.split()
- line_length=len(sline)
-
- if sline[0].lower()=='leak':
- #print(len(sline))
- temp_leak={}
- if line_length<4:
- raise IOError("There must be at least 4 arguement in line"+repr(line_cnt))
- #print('Probelm 1')
- temp_leak['pipe_id']=sline[1]
- temp_leak['leak_loc']=float(sline[2])
- temp_leak['leak_type']=int(sline[3])
- if line_length >4:
- temp_leak['leak_time']=float(sline[4])
+ line_cnt += 1 # noqa: SIM113
+ sline = line.split()
+ line_length = len(sline)
+
+ if sline[0].lower() == 'leak':
+ # print(len(sline))
+ temp_leak = {}
+ if line_length < 4: # noqa: PLR2004
+ raise OSError( # noqa: DOC501
+ 'There must be at least 4 arguments in line' + repr(line_cnt)
+ )
+ # print('Probelm 1')
+ temp_leak['pipe_id'] = sline[1]
+ temp_leak['leak_loc'] = float(sline[2])
+ temp_leak['leak_type'] = int(sline[3])
+ if line_length > 4: # noqa: PLR2004
+ temp_leak['leak_time'] = float(sline[4])
else:
- temp_leak['leak_time']=self.default_time
- self.pipe_leak=self.pipe_leak.append(pd.Series(data=[temp_leak] , index=[int(temp_leak['leak_time']*3600)]))
-
- elif sline[0].lower()=='break':
- if line_length<3:
- raise IOError("There most be at least 3 arguement in line")
- # print('Probelm 2')
- temp_break={}
- temp_break['pipe_id']=sline[1]
- temp_break['break_loc']=float(sline[2])
- if line_length >3:
- temp_break['break_time']=float(sline[3])
+ temp_leak['leak_time'] = self.default_time
+ self.pipe_leak = self.pipe_leak.append(
+ pd.Series(
+ data=[temp_leak], index=[int(temp_leak['leak_time'] * 3600)]
+ )
+ )
+
+ elif sline[0].lower() == 'break':
+ if line_length < 3: # noqa: PLR2004
+ raise OSError('Line cannot have more than three arguments') # noqa: DOC501, EM101, TRY003
+ # print('Probelm 2')
+ temp_break = {}
+ temp_break['pipe_id'] = sline[1]
+ temp_break['break_loc'] = float(sline[2])
+ if line_length > 3: # noqa: PLR2004
+ temp_break['break_time'] = float(sline[3])
else:
- temp_break['break_time']=self.default_time
- #print( type(temp_break['break_time']))
- self.pipe_break=self.pipe_break.append(pd.Series(data=[temp_break],index=[int(temp_break['break_time']*3600)]))
+ temp_break['break_time'] = self.default_time
+ # print( type(temp_break['break_time']))
+ self.pipe_break = self.pipe_break.append(
+ pd.Series(
+ data=[temp_break],
+ index=[int(temp_break['break_time'] * 3600)],
+ )
+ )
else:
logger.warning(sline)
- logger.warning('No recogniziable command in damage file, line' + repr(line_cnt) + '\n')
+ logger.warning(
+ 'No recogniziable command in damage file, line' # noqa: G003
+ + repr(line_cnt)
+ + '\n'
+ )
file.close()
-
-
- def applyNodalDamage(self, WaterNetwork, current_time):
- """
- Apply Nodal Damage
+
+ def applyNodalDamage(self, WaterNetwork, current_time): # noqa: C901, N802, N803
+ """Apply Nodal Damage
Parameters
----------
@@ -250,489 +313,720 @@ def applyNodalDamage(self, WaterNetwork, current_time):
-------
None.
- """
-
+ """ # noqa: D400
if self.node_damage.empty:
- print("no node damage at all")
+ print('no node damage at all') # noqa: T201
return
-
+
curren_time_node_damage = self.node_damage[current_time]
-
- if type(curren_time_node_damage) == dict:
- curren_time_node_damage = pd.Series([curren_time_node_damage], index=[current_time])
- elif type(curren_time_node_damage) == pd.Series:
+
+ if type(curren_time_node_damage) == dict: # noqa: E721
+ curren_time_node_damage = pd.Series(
+ [curren_time_node_damage], index=[current_time]
+ )
+ elif type(curren_time_node_damage) == pd.Series: # noqa: E721
if curren_time_node_damage.empty:
- print("No node damage at time " +str(current_time))
+ print('No node damage at time ' + str(current_time)) # noqa: T201
return
else:
- raise ValueError("Node damage has a unknown type: " + str(type(curren_time_node_damage) ) + " at time: " + str(current_time) )
-
- #self._nodal_damage_method = self._registry.settings['damage_node_model']
+ raise ValueError( # noqa: DOC501
+ 'Node damage has a unknown type: '
+ + str(type(curren_time_node_damage))
+ + ' at time: '
+ + str(current_time)
+ )
+
+ # self._nodal_damage_method = self._registry.settings['damage_node_model']
method = self._registry.settings['damage_node_model']
if method == 'Predefined_demand':
- for ind, val in curren_time_node_damage.items():
+ for ind, val in curren_time_node_damage.items(): # noqa: B007, PERF102
node_name = val['node_name']
- pre_EQ_Demand = val['node_Pre_EQ_Demand']
- post_EQ_Demand = val['node_Post_EQ_Demand']
-
-
- #if node_name not in WaterNetwork.node_name_list and icheck==True:
- #raise ValueError('Node in damage list is not in water network model: '+repr(node_name))
- #elif icheck==False:
- #continue
- node_cur_dem = WaterNetwork.get_node(node_name).demand_timeseries_list._list[0].base_value
- #print(str(pre_EQ_Demand) + ' ' + str(node_cur_dem))
- #print(node_name)
- if abs(pre_EQ_Demand - node_cur_dem) > 0.001:
- raise
-
- ratio = post_EQ_Demand/pre_EQ_Demand
-
- WaterNetwork.get_node(node_name).demand_timeseries_list._list[0].base_value = node_cur_dem*ratio
-
- self._registry.addNodalDemandChange(val['node_name'], node_cur_dem, post_EQ_Demand)
+ pre_EQ_Demand = val['node_Pre_EQ_Demand'] # noqa: N806
+ post_EQ_Demand = val['node_Post_EQ_Demand'] # noqa: N806
+
+ # if node_name not in WaterNetwork.node_name_list and icheck==True:
+ # raise ValueError('Node in damage list is not in water network model: '+repr(node_name))
+ # elif icheck==False:
+ # continue
+ node_cur_dem = (
+ WaterNetwork.get_node(node_name) # noqa: SLF001
+ .demand_timeseries_list._list[0]
+ .base_value
+ )
+ # print(str(pre_EQ_Demand) + ' ' + str(node_cur_dem))
+ # print(node_name)
+ if abs(pre_EQ_Demand - node_cur_dem) > 0.001: # noqa: PLR2004
+ raise # noqa: PLE0704
+
+ ratio = post_EQ_Demand / pre_EQ_Demand
+
+ WaterNetwork.get_node(node_name).demand_timeseries_list._list[ # noqa: SLF001
+ 0
+ ].base_value = node_cur_dem * ratio
+
+ self._registry.addNodalDemandChange(
+ val['node_name'], node_cur_dem, post_EQ_Demand
+ )
demand_damage = self.estimateNodalDamage()
self._registry.addNodalDamage(demand_damage)
-
- elif method == 'equal_diameter_emitter' or method == 'equal_diameter_reservoir':
- temp1=[]
- temp2=[]
+
+ elif (
+ method == 'equal_diameter_emitter' # noqa: PLR1714
+ or method == 'equal_diameter_reservoir'
+ ):
+ temp1 = []
+ temp2 = []
temp_new_explicit_leak_data = []
- for ind, val in curren_time_node_damage.items():
+ for ind, val in curren_time_node_damage.items(): # noqa: B007, PERF102
node_name = val['node_name']
number_of_damages = val['Number_of_damages']
- pipe_length = val['node_Pipe_Length']*1000
-
+ pipe_length = val['node_Pipe_Length'] * 1000
+
if node_name not in WaterNetwork.node_name_list:
- raise ValueError('Node name of damages not in node name list: '+ node_name)
-
- new_node_name, new_pipe_name, mp, q = self.addExplicitLeakWithReservoir(node_name, number_of_damages, pipe_length, WaterNetwork)
- #self._registry.nodal_damage_nodes.add(new_node_name)
- #post_EQ_Demand = val['node_Post_EQ_Demand']
- #pre_EQ_Demand = val['node_Pre_EQ_Demand']
- #self._registry.addNodalDemandChange(val['node_name'], pre_EQ_Demand, post_EQ_Demand)
+ raise ValueError( # noqa: DOC501
+ 'Node name of damages not in node name list: ' + node_name
+ )
+
+ new_node_name, new_pipe_name, mp, q = (
+ self.addExplicitLeakWithReservoir(
+ node_name, number_of_damages, pipe_length, WaterNetwork
+ )
+ )
+ # self._registry.nodal_damage_nodes.add(new_node_name)
+ # post_EQ_Demand = val['node_Post_EQ_Demand']
+ # pre_EQ_Demand = val['node_Pre_EQ_Demand']
+ # self._registry.addNodalDemandChange(val['node_name'], pre_EQ_Demand, post_EQ_Demand)
temp1.append(node_name)
temp2.append(val['Number_of_damages'])
-
- self._registry.active_nodal_damages.update({new_node_name:node_name})
- temp_data = {'mean_pressure':mp, 'new_pipe_name': new_pipe_name, 'new_node_name':new_node_name, 'pipe_length':pipe_length, 'orginal_flow':q}
+
+ self._registry.active_nodal_damages.update(
+ {new_node_name: node_name}
+ )
+ temp_data = {
+ 'mean_pressure': mp,
+ 'new_pipe_name': new_pipe_name,
+ 'new_node_name': new_node_name,
+ 'pipe_length': pipe_length,
+ 'orginal_flow': q,
+ }
temp_new_explicit_leak_data.append(temp_data)
-
- demand_damage=pd.Series(data=temp2, index=temp1)
+
+ demand_damage = pd.Series(data=temp2, index=temp1)
new_pipe_name_list = dict(zip(temp1, temp_new_explicit_leak_data))
self._registry.addNodalDamage(demand_damage, new_pipe_name_list)
elif method == 'SDD':
- for ind, val in curren_time_node_damage.items():
+ for ind, val in curren_time_node_damage.items(): # noqa: B007, PERF102
node_name = val['node_name']
number_of_damages = val['Number_of_damages']
- pipe_length = val['node_Pipe_Length']*1000
+ pipe_length = val['node_Pipe_Length'] * 1000
if node_name not in WaterNetwork.node_name_list:
- raise ValueError('Node name of damages not in node name list: '+ node_name)
+ raise ValueError( # noqa: DOC501
+ 'Node name of damages not in node name list: ' + node_name
+ )
maximum_node_demand = 10
- pipe_equal_length = pipe_length/10
+ pipe_equal_length = pipe_length / 10
_hl = 8
- _C = 100
- before_damage_pipe_length = pipe_equal_length/2
- over_designed_diameter = 10.67*(maximum_node_demand/_C)**1.852*(pipe_length/_hl)
- over_designed_diameter = over_designed_diameter**(1/4.8704)
-
- equavalant_damaged_pipe_diameter = self.scenario_set['equavalant_damage_diameter']
- equavalant_pipe_diameter = np.sqrt(number_of_damages)*equavalant_damaged_pipe_diameter
-
- node = WaterNetwork.get_node(node_name)
+ _C = 100 # noqa: N806
+ before_damage_pipe_length = pipe_equal_length / 2
+ over_designed_diameter = (
+ 10.67 * (maximum_node_demand / _C) ** 1.852 * (pipe_length / _hl)
+ )
+ over_designed_diameter = over_designed_diameter ** (1 / 4.8704) # noqa: PLR6104
+
+ equavalant_damaged_pipe_diameter = self.scenario_set[
+ 'equavalant_damage_diameter'
+ ]
+ equavalant_pipe_diameter = (
+ np.sqrt(number_of_damages) * equavalant_damaged_pipe_diameter
+ )
+
+ node = WaterNetwork.get_node(node_name)
new_elavation = node.elevation
-
- #Midlle junction definition
- new_coord = (node.coordinates[0]+10,node.coordinates[1]+10)
- middle_node_name = 'lk_mdl_'+node_name
- WaterNetwork.add_junction(middle_node_name, elevation = new_elavation, coordinates=new_coord)
-
- #Leak reservoir definition
- new_coord = (new_coord[0]+10,new_coord[1]+10)
- new_resevoir_name = 'lk_aux_'+node_name
- WaterNetwork.add_reservoir(new_resevoir_name, base_head=new_elavation, coordinates=new_coord)
-
- #Node-to-middle-junction pipe definition
- OVD_pipe_name = 'lk_ODP_'+node_name
- WaterNetwork.add_pipe(OVD_pipe_name, node_name, middle_node_name, length=before_damage_pipe_length, diameter= over_designed_diameter, roughness = _C)
-
- #Middle_node_to_reservoir pipe definition
- new_pipe_name = 'lk_pipe_'+node_name
- WaterNetwork.add_pipe(new_pipe_name, middle_node_name, new_resevoir_name, length=1, diameter= equavalant_pipe_diameter, roughness = 1000000)
-
- self._registry.explicit_nodal_damages[node_name] = {'ODD',over_designed_diameter, }
+
+ # Midlle junction definition
+ new_coord = (node.coordinates[0] + 10, node.coordinates[1] + 10)
+ middle_node_name = 'lk_mdl_' + node_name
+ WaterNetwork.add_junction(
+ middle_node_name, elevation=new_elavation, coordinates=new_coord
+ )
+
+ # Leak reservoir definition
+ new_coord = (new_coord[0] + 10, new_coord[1] + 10)
+ new_resevoir_name = 'lk_aux_' + node_name
+ WaterNetwork.add_reservoir(
+ new_resevoir_name, base_head=new_elavation, coordinates=new_coord
+ )
+
+ # Node-to-middle-junction pipe definition
+ OVD_pipe_name = 'lk_ODP_' + node_name # noqa: N806
+ WaterNetwork.add_pipe(
+ OVD_pipe_name,
+ node_name,
+ middle_node_name,
+ length=before_damage_pipe_length,
+ diameter=over_designed_diameter,
+ roughness=_C,
+ )
+
+ # Middle_node_to_reservoir pipe definition
+ new_pipe_name = 'lk_pipe_' + node_name
+ WaterNetwork.add_pipe(
+ new_pipe_name,
+ middle_node_name,
+ new_resevoir_name,
+ length=1,
+ diameter=equavalant_pipe_diameter,
+ roughness=1000000,
+ )
+
+ self._registry.explicit_nodal_damages[node_name] = {
+ 'ODD',
+ over_designed_diameter,
+ }
else:
- raise ValueError('Unknown nodal damage method')
-
- #return WaterNetwork
-
- def getNd(self, mp, number_of_damages, sum_of_length):
- rr = number_of_damages/sum_of_length*1000
-
- node_damage_parametrs = self._registry.settings['node_damage_model']
- #{'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
- x = node_damage_parametrs['x' ]
- a = node_damage_parametrs['a' ]
- aa = node_damage_parametrs['aa' ]
- b = node_damage_parametrs['b' ]
- bb = node_damage_parametrs['bb' ]
- c = node_damage_parametrs['c' ]
- cc = node_damage_parametrs['cc' ]
- d = node_damage_parametrs['d' ]
- dd = node_damage_parametrs['dd' ]
- e = node_damage_parametrs['e' ]
- ee1 = node_damage_parametrs['ee1']
- ee2 = node_damage_parametrs['ee2']
- f = node_damage_parametrs['f' ]
- ff1 = node_damage_parametrs['ff1']
- ff2 = node_damage_parametrs['ff2']
-
- #nd = 0.0036*mp + 0.9012 + (0.0248*mp-0.877)*rr
- nd = a*mp**aa + b*mp**bb + c*rr**cc + d*rr**dd + e*(mp**ee1)*(rr**ee2) + f*(mp**ff1)*(rr**ff2) + x
- nd = 0.0036*float(mp) + 0.9012 + (0.0248*float(mp) - 0.877) * float(rr)
- return nd
-
- def getNd2(self, mp, number_of_damages, sum_of_length):
- rr = number_of_damages/sum_of_length*1000
-
- node_damage_parametrs = self._registry.settings['node_damage_model']
- #{'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
- x = node_damage_parametrs['x' ]
- a = node_damage_parametrs['a' ]
- aa = node_damage_parametrs['aa' ]
- b = node_damage_parametrs['b' ]
- bb = node_damage_parametrs['bb' ]
- c = node_damage_parametrs['c' ]
- cc = node_damage_parametrs['cc' ]
- d = node_damage_parametrs['d' ]
- dd = node_damage_parametrs['dd' ]
- e = node_damage_parametrs['e' ]
- ee1 = node_damage_parametrs['ee1']
- ee2 = node_damage_parametrs['ee2']
- f = node_damage_parametrs['f' ]
- ff1 = node_damage_parametrs['ff1']
- ff2 = node_damage_parametrs['ff2']
-
- nd = a*mp**aa + b*mp**bb + c*rr**cc + d*rr**dd + e*(mp**ee1)*(rr**ee2) + f*(mp**ff1)*(rr**ff2) + x
-
- return nd
-
- def getEmitterCdAndElevation(self, real_node_name, wn, number_of_damages, sum_of_length, mp, q):
- mp = mp*1.4223 # this is because our CURRENT relationship is base on psi
- rr = number_of_damages/sum_of_length*1000
+ raise ValueError('Unknown nodal damage method') # noqa: DOC501, EM101, TRY003
+
+ # return WaterNetwork
+
+ def getNd(self, mp, number_of_damages, sum_of_length): # noqa: N802, D102
+ rr = number_of_damages / sum_of_length * 1000
+
+ node_damage_parametrs = self._registry.settings['node_damage_model']
+ # {'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
+ x = node_damage_parametrs['x']
+ a = node_damage_parametrs['a']
+ aa = node_damage_parametrs['aa']
+ b = node_damage_parametrs['b']
+ bb = node_damage_parametrs['bb']
+ c = node_damage_parametrs['c']
+ cc = node_damage_parametrs['cc']
+ d = node_damage_parametrs['d']
+ dd = node_damage_parametrs['dd']
+ e = node_damage_parametrs['e']
+ ee1 = node_damage_parametrs['ee1']
+ ee2 = node_damage_parametrs['ee2']
+ f = node_damage_parametrs['f']
+ ff1 = node_damage_parametrs['ff1']
+ ff2 = node_damage_parametrs['ff2']
+
+ # nd = 0.0036*mp + 0.9012 + (0.0248*mp-0.877)*rr
+ nd = (
+ a * mp**aa
+ + b * mp**bb
+ + c * rr**cc
+ + d * rr**dd
+ + e * (mp**ee1) * (rr**ee2)
+ + f * (mp**ff1) * (rr**ff2)
+ + x
+ )
+ nd = 0.0036 * float(mp) + 0.9012 + (0.0248 * float(mp) - 0.877) * float(rr)
+ return nd # noqa: RET504
+
+ def getNd2(self, mp, number_of_damages, sum_of_length): # noqa: N802, D102
+ rr = number_of_damages / sum_of_length * 1000
+
+ node_damage_parametrs = self._registry.settings['node_damage_model']
+ # {'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
+ x = node_damage_parametrs['x']
+ a = node_damage_parametrs['a']
+ aa = node_damage_parametrs['aa']
+ b = node_damage_parametrs['b']
+ bb = node_damage_parametrs['bb']
+ c = node_damage_parametrs['c']
+ cc = node_damage_parametrs['cc']
+ d = node_damage_parametrs['d']
+ dd = node_damage_parametrs['dd']
+ e = node_damage_parametrs['e']
+ ee1 = node_damage_parametrs['ee1']
+ ee2 = node_damage_parametrs['ee2']
+ f = node_damage_parametrs['f']
+ ff1 = node_damage_parametrs['ff1']
+ ff2 = node_damage_parametrs['ff2']
+
+ nd = (
+ a * mp**aa
+ + b * mp**bb
+ + c * rr**cc
+ + d * rr**dd
+ + e * (mp**ee1) * (rr**ee2)
+ + f * (mp**ff1) * (rr**ff2)
+ + x
+ )
+
+ return nd # noqa: RET504
+
+ def getEmitterCdAndElevation( # noqa: N802, D102
+ self,
+ real_node_name,
+ wn,
+ number_of_damages,
+ sum_of_length,
+ mp,
+ q,
+ ):
+ mp = ( # noqa: PLR6104
+ mp * 1.4223
+ ) # this is because our CURRENT relationship is base on psi
+ rr = number_of_damages / sum_of_length * 1000 # noqa: F841
nd = self.getNd(mp, number_of_damages, sum_of_length)
- #equavalant_pipe_diameter = ( ((nd-1)*q)**2 /(0.125*9.81*3.14**2 * mp/1.4223) )**(1/4) * 1
-
- if real_node_name == "CC1381":
- print(nd)
+ # equavalant_pipe_diameter = ( ((nd-1)*q)**2 /(0.125*9.81*3.14**2 * mp/1.4223) )**(1/4) * 1
+
+ if real_node_name == 'CC1381':
+ print(nd) # noqa: T201
nd2 = self.getNd2(mp, number_of_damages, sum_of_length)
- print(nd2)
-
-
- node = wn.get_node(real_node_name)
- #new_elavation = node.elevation
-
- nd = nd -1
- #nd0 = 0.0036*0 + 0.9012 + (0.0248*0-0.877)*rr
+ print(nd2) # noqa: T201
+
+ node = wn.get_node(real_node_name) # noqa: F841
+ # new_elavation = node.elevation
+
+ nd = nd - 1 # noqa: PLR6104
+ # nd0 = 0.0036*0 + 0.9012 + (0.0248*0-0.877)*rr
nd0 = self.getNd(0, number_of_damages, sum_of_length)
- if real_node_name == "CC1381":
- print(nd0)
+ if real_node_name == 'CC1381':
+ print(nd0) # noqa: T201
nd02 = self.getNd2(0, number_of_damages, sum_of_length)
- print(nd02)
- nd0 = nd0 -1
- alpha = (nd - nd0)/(mp)
+ print(nd02) # noqa: T201
+ nd0 = nd0 - 1 # noqa: PLR6104
+ alpha = (nd - nd0) / (mp)
mp0 = -1 * (nd0) / alpha
- mp0 = mp0 / 1.4223
+ mp0 = mp0 / 1.4223 # noqa: PLR6104
cd = alpha * q
return cd, mp0
-
- def addExplicitLeakWithReservoir(self, node_name, number_of_damages, sum_of_length, wn):
+
+ def addExplicitLeakWithReservoir( # noqa: N802, D102
+ self,
+ node_name,
+ number_of_damages,
+ sum_of_length,
+ wn,
+ ):
method = self._registry.settings['damage_node_model']
- if method == 'equal_diameter_emitter' or method == 'equal_diameter_reservoir':
- node = wn.get_node(node_name)
+ if (
+ method == 'equal_diameter_emitter' # noqa: PLR1714
+ or method == 'equal_diameter_reservoir'
+ ):
+ node = wn.get_node(node_name)
new_elavation = node.elevation
- new_coord = (node.coordinates[0]+10,node.coordinates[1]+10)
-
+ new_coord = (node.coordinates[0] + 10, node.coordinates[1] + 10)
+
pressure = self._registry.result.node['pressure'][node_name]
mp = pressure.mean()
-
- if mp < 0 :
+
+ if mp < 0:
mp = 1
- node = wn.get_node(node_name)
+ node = wn.get_node(node_name)
new_elavation = node.elevation
- new_coord = (node.coordinates[0]+10,node.coordinates[1]+10)
-
- new_node_name = 'lk_aux_'+node_name
- new_pipe_name = 'lk_pipe_'+node_name
- new_C = 100000000000
-
- equavalant_pipe_diameter =1
+ new_coord = (node.coordinates[0] + 10, node.coordinates[1] + 10)
+
+ new_node_name = 'lk_aux_' + node_name
+ new_pipe_name = 'lk_pipe_' + node_name
+ new_C = 100000000000 # noqa: N806
+
+ equavalant_pipe_diameter = 1
q = node.demand_timeseries_list[0].base_value
if method == 'equal_diameter_emitter':
- cd, mp0 = self.getEmitterCdAndElevation(node_name, wn, number_of_damages, sum_of_length, mp, q)
- wn.add_junction(new_node_name, elevation = new_elavation + mp0, coordinates = new_coord)
+ cd, mp0 = self.getEmitterCdAndElevation(
+ node_name, wn, number_of_damages, sum_of_length, mp, q
+ )
+ wn.add_junction(
+ new_node_name,
+ elevation=new_elavation + mp0,
+ coordinates=new_coord,
+ )
nn = wn.get_node(new_node_name)
- nn._emitter_coefficient = cd
- wn.options.hydraulic.emitter_exponent=1;
- wn.add_pipe(new_pipe_name, node_name, new_node_name, diameter=equavalant_pipe_diameter, length=1, roughness=new_C, check_valve=True)
- #wn.add_reservoir(new_node_name+'_res', base_head = new_elavation + 10000, coordinates = new_coord)
- #wn.add_pipe(new_pipe_name+'_res', node_name, new_node_name+'_res', diameter=1, length=1, roughness=new_C, check_valve_flag=True)
-
+ nn._emitter_coefficient = cd # noqa: SLF001
+ wn.options.hydraulic.emitter_exponent = 1
+ wn.add_pipe(
+ new_pipe_name,
+ node_name,
+ new_node_name,
+ diameter=equavalant_pipe_diameter,
+ length=1,
+ roughness=new_C,
+ check_valve=True,
+ )
+ # wn.add_reservoir(new_node_name+'_res', base_head = new_elavation + 10000, coordinates = new_coord)
+ # wn.add_pipe(new_pipe_name+'_res', node_name, new_node_name+'_res', diameter=1, length=1, roughness=new_C, check_valve_flag=True)
+
elif method == 'equal_diameter_reservoir':
nd = self.getNd(mp, number_of_damages, sum_of_length)
- equavalant_pipe_diameter = ( ((nd-1)*q)**2 /(0.125*9.81*3.14**2 * mp) )**(1/4) * 1
- wn.add_reservoir(new_node_name, base_head=new_elavation, coordinates=new_coord)
- wn.add_pipe(new_pipe_name, node_name, new_node_name, diameter=equavalant_pipe_diameter, length=1, roughness=new_C, check_valve=True, minor_loss=1)
- self._registry.addEquavalantDamageHistory(node_name, new_node_name, new_pipe_name, equavalant_pipe_diameter, number_of_damages)
-
+ equavalant_pipe_diameter = (
+ ((nd - 1) * q) ** 2 / (0.125 * 9.81 * math.pi**2 * mp)
+ ) ** (1 / 4) * 1
+ wn.add_reservoir(
+ new_node_name, base_head=new_elavation, coordinates=new_coord
+ )
+ wn.add_pipe(
+ new_pipe_name,
+ node_name,
+ new_node_name,
+ diameter=equavalant_pipe_diameter,
+ length=1,
+ roughness=new_C,
+ check_valve=True,
+ minor_loss=1,
+ )
+ self._registry.addEquavalantDamageHistory(
+ node_name,
+ new_node_name,
+ new_pipe_name,
+ equavalant_pipe_diameter,
+ number_of_damages,
+ )
+
elif method == 'SOD':
pass
- #first_pipe_length = sum_of_length/5*2
- #second_pipe_length = sum_of_length/5*3
- #new_coord_mid = (node.xxcoordinates[0]+10,node.coordinates[1]+10)
- #new_coord_dem = (node.coordinates[0]+20,node.coordinates[1]+20)
- #new_coord_res = (node.coordinates[0]+10,node.coordinates[1]+20)
-
-
+ # first_pipe_length = sum_of_length/5*2
+ # second_pipe_length = sum_of_length/5*3
+ # new_coord_mid = (node.xxcoordinates[0]+10,node.coordinates[1]+10)
+ # new_coord_dem = (node.coordinates[0]+20,node.coordinates[1]+20)
+ # new_coord_res = (node.coordinates[0]+10,node.coordinates[1]+20)
+
else:
- raise ValueError('Unkown Method')
+ raise ValueError('Unkown Method') # noqa: EM101, TRY003
return new_node_name, new_pipe_name, mp, q
-
- def estimateNodalDamage(self):
- #res = pd.Series()
+
+ def estimateNodalDamage(self): # noqa: N802, D102
+ # res = pd.Series()
temp1 = []
temp2 = []
- for ind, val in self.node_damage.items():
+ for ind, val in self.node_damage.items(): # noqa: B007, PERF102
pipes_length = val['node_Pipe_Length']
- pipes_RR = val['node_RR']
+ pipes_RR = val['node_RR'] # noqa: N806
temp1.append(val['node_name'])
- temp2.append(int(np.round(pipes_RR*pipes_length)))
+ temp2.append(int(np.round(pipes_RR * pipes_length)))
res = pd.Series(data=temp2, index=temp1)
- return res
-
- def getPipeDamageListAt(self, time):
+ return res # noqa: RET504
+
+ def getPipeDamageListAt(self, time): # noqa: N802, D102
damaged_pipe_name_list = []
-
+
if self.pipe_all_damages.empty:
return damaged_pipe_name_list
-
+
current_time_pipe_damages = self.pipe_all_damages[time]
- if type(current_time_pipe_damages) == pd.core.series.Series:
+ if type(current_time_pipe_damages) == pd.core.series.Series: # noqa: E721
current_time_pipe_damages = current_time_pipe_damages.to_list()
else:
current_time_pipe_damages = [current_time_pipe_damages]
-
- damaged_pipe_name_list = [cur_damage["pipe_id"] for cur_damage in current_time_pipe_damages]
- damaged_pipe_name_list = list(set(damaged_pipe_name_list) )
- return damaged_pipe_name_list
-
- def applyPipeDamages(self, WaterNetwork, current_time):
+
+ damaged_pipe_name_list = [
+ cur_damage['pipe_id'] for cur_damage in current_time_pipe_damages
+ ]
+ damaged_pipe_name_list = list(set(damaged_pipe_name_list))
+ return damaged_pipe_name_list # noqa: RET504
+
+ def applyPipeDamages(self, WaterNetwork, current_time): # noqa: C901, N802, N803
"""Apply the damage that we have in damage object. the damage is either
predicted or read from somewhere.
-
+
Parameters
----------
WaterNetwork : wntrfr.network.model.WaterNetworkModel
- water network model ro be modified accroding to the damage
-
+ water network model to be modified according to the damage
+
registry : Registry object
-
+
current_time : int
current time
- """
+
+ """ # noqa: D205
last_pipe_id = None
same_pipe_damage_cnt = None
-
+
if self.pipe_all_damages.empty:
- print("No Pipe damages at all")
+ print('No Pipe damages at all') # noqa: T201
return
-
+
current_time_pipe_damages = self.pipe_all_damages[current_time]
- if type(current_time_pipe_damages) == dict:
- current_time_pipe_damages = pd.Series([current_time_pipe_damages], index=[current_time])
- elif type(current_time_pipe_damages) == pd.Series:
+ if type(current_time_pipe_damages) == dict: # noqa: E721
+ current_time_pipe_damages = pd.Series(
+ [current_time_pipe_damages], index=[current_time]
+ )
+ elif type(current_time_pipe_damages) == pd.Series: # noqa: E721
if current_time_pipe_damages.empty:
- print("No Pipe damages at time " + str(current_time))
+ print('No Pipe damages at time ' + str(current_time)) # noqa: T201
return
else:
- raise ValueError("Pipe damage has a unknown type: " + str(type(current_time_pipe_damages) ) + " at time: " + str(current_time) )
+ raise ValueError( # noqa: DOC501
+ 'Pipe damage has a unknown type: '
+ + str(type(current_time_pipe_damages))
+ + ' at time: '
+ + str(current_time)
+ )
all_damages = current_time_pipe_damages.to_list()
for cur_damage in all_damages:
- #print(cur_damage)
-
+ # print(cur_damage)
+
pipe_id = cur_damage['pipe_id']
- #same_pipe_damage_cnt = 1
- if pipe_id==last_pipe_id:
- same_pipe_damage_cnt+=1;
+ # same_pipe_damage_cnt = 1
+ if pipe_id == last_pipe_id:
+ same_pipe_damage_cnt += 1
else:
last_pipe_id = pipe_id
same_pipe_damage_cnt = 1
-
+
if cur_damage['type'] == 'leak':
- damage_time = current_time/3600 #cur_damage['damage_time']
- new_node_id = pipe_id+'_leak_'+repr(same_pipe_damage_cnt);
- new_pipe_id = pipe_id+'_leak_B_'+repr(same_pipe_damage_cnt)
- material = cur_damage['Material']
+ damage_time = current_time / 3600 # cur_damage['damage_time']
+ new_node_id = pipe_id + '_leak_' + repr(same_pipe_damage_cnt)
+ new_pipe_id = pipe_id + '_leak_B_' + repr(same_pipe_damage_cnt)
+ material = cur_damage['Material']
area = None
if 'leakD' in cur_damage:
diam = cur_damage['leakD']
- area = 3.14*(diam/2)**2
+ area = math.pi * (diam / 2) ** 2
else:
- #diam = 100*WaterNetwork.get_link(pipe_id).diameter
- #area= 0.6032*diam/10000
- #pipe_damage_factor = self.scenario_set['pipe_damage_diameter_factor']
+ # diam = 100*WaterNetwork.get_link(pipe_id).diameter
+ # area= 0.6032*diam/10000
+ # pipe_damage_factor = self.scenario_set['pipe_damage_diameter_factor']
diam_m = WaterNetwork.get_link(pipe_id).diameter
-
- #print(material)
+
+ # print(material)
if material in self._registry.settings['pipe_damage_model']:
- damage_parameters = self._registry.settings['pipe_damage_model'][material]
+ damage_parameters = self._registry.settings[
+ 'pipe_damage_model'
+ ][material]
else:
- damage_parameters = self._registry.settings['default_pipe_damage_model']
+ damage_parameters = self._registry.settings[
+ 'default_pipe_damage_model'
+ ]
alpha = damage_parameters['alpha']
- beta = damage_parameters['beta' ]
+ beta = damage_parameters['beta']
gamma = damage_parameters['gamma']
- a = damage_parameters['a' ]
- b = damage_parameters['b' ]
+ a = damage_parameters['a']
+ b = damage_parameters['b']
- dd = alpha*diam_m**a + beta*diam_m**b + gamma
- dd = dd * 1.2
+ dd = alpha * diam_m**a + beta * diam_m**b + gamma
+ dd = dd * 1.2 # noqa: PLR6104
- area = 3.14*dd**2/4
- last_ratio=1
+ area = math.pi * dd**2 / 4
+ last_ratio = 1
if pipe_id in self._pipe_last_ratio:
last_ratio = self._pipe_last_ratio.loc[pipe_id]
-
- ratio = cur_damage['damage_loc']/last_ratio
+
+ ratio = cur_damage['damage_loc'] / last_ratio
if ratio >= 1:
- raise ValueError('IN LEAK: ratio is bigger than or equal to 1 for pipe:'+repr(pipe_id)+' '+repr(ratio)+' '+repr(cur_damage['damage_loc'])+' '+repr(last_ratio))
+ raise ValueError( # noqa: DOC501
+ 'IN LEAK: ratio is bigger than or equal to 1 for pipe:'
+ + repr(pipe_id)
+ + ' '
+ + repr(ratio)
+ + ' '
+ + repr(cur_damage['damage_loc'])
+ + ' '
+ + repr(last_ratio)
+ )
self._pipe_last_ratio.loc[pipe_id] = ratio
-
+
number = 1
if 'number' in cur_damage:
number = cur_damage['number']
-
+
sub_type = 1
if 'sub_type' in cur_damage:
sub_type = cur_damage['sub_type']
-
- WaterNetwork = split_pipe(WaterNetwork, pipe_id, new_pipe_id, new_node_id, split_at_point=ratio, return_copy=False)
+
+ WaterNetwork = split_pipe( # noqa: N806
+ WaterNetwork,
+ pipe_id,
+ new_pipe_id,
+ new_node_id,
+ split_at_point=ratio,
+ return_copy=False,
+ )
leak_node = WaterNetwork.get_node(new_node_id)
- leak_node.add_leak(WaterNetwork, area=area, discharge_coeff=1, start_time=damage_time, end_time=self.end_time+1)
- self._registry.addPipeDamageToRegistry(new_node_id,{'number':number,'damage_type':'leak', 'damage_subtype':sub_type , 'pipe_A':pipe_id, 'pipe_B':new_pipe_id, 'orginal_pipe':pipe_id})
- #self._registry.addPipeDamageToDamageRestorationData(pipe_id, 'leak', damage_time)
-
+ leak_node.add_leak(
+ WaterNetwork,
+ area=area,
+ discharge_coeff=1,
+ start_time=damage_time,
+ end_time=self.end_time + 1,
+ )
+ self._registry.addPipeDamageToRegistry(
+ new_node_id,
+ {
+ 'number': number,
+ 'damage_type': 'leak',
+ 'damage_subtype': sub_type,
+ 'pipe_A': pipe_id,
+ 'pipe_B': new_pipe_id,
+ 'orginal_pipe': pipe_id,
+ },
+ )
+ # self._registry.addPipeDamageToDamageRestorationData(pipe_id, 'leak', damage_time)
+
elif cur_damage['type'] == 'break':
last_ratio = 1
if pipe_id in self._pipe_last_ratio:
last_ratio = self._pipe_last_ratio.loc[pipe_id]
-
- ratio = cur_damage['damage_loc']/last_ratio
+
+ ratio = cur_damage['damage_loc'] / last_ratio
if ratio >= 1:
- raise ValueError('IN BREAK: ratio is bigger than or equal to 1 for pipe:'+repr(pipe_id)+' '+repr(ratio)+' '+repr(cur_damage['damage_loc'])+' '+repr(last_ratio))
-
+ raise ValueError( # noqa: DOC501
+ 'IN BREAK: ratio is bigger than or equal to 1 for pipe:'
+ + repr(pipe_id)
+ + ' '
+ + repr(ratio)
+ + ' '
+ + repr(cur_damage['damage_loc'])
+ + ' '
+ + repr(last_ratio)
+ )
+
self._pipe_last_ratio.loc[pipe_id] = ratio
-
- number=1
+
+ number = 1
if 'number' in cur_damage:
number = cur_damage['number']
-
- damage_time = current_time/3600
- logger.debug("trying to break: " + cur_damage['pipe_id'] + repr(damage_time))
- #Naming new nodes and new pipe
- new_node_id_for_old_pipe = pipe_id + '_breakA_' + repr(same_pipe_damage_cnt)
- new_node_id_for_new_pipe = pipe_id + '_breakB_' + repr(same_pipe_damage_cnt)
+
+ damage_time = current_time / 3600
+ logger.debug(
+ 'trying to break: ' + cur_damage['pipe_id'] + repr(damage_time) # noqa: G003
+ )
+ # Naming new nodes and new pipe
+ new_node_id_for_old_pipe = (
+ pipe_id + '_breakA_' + repr(same_pipe_damage_cnt)
+ )
+ new_node_id_for_new_pipe = (
+ pipe_id + '_breakB_' + repr(same_pipe_damage_cnt)
+ )
new_pipe_id = pipe_id + '_Break_' + repr(same_pipe_damage_cnt)
new_node_id = new_node_id_for_old_pipe
- #breaking the node
- WaterNetwork = break_pipe(WaterNetwork, pipe_id, new_pipe_id, new_node_id_for_old_pipe, new_node_id_for_new_pipe, split_at_point=ratio, return_copy=False)
-
- diam=WaterNetwork.get_link(pipe_id).diameter
- area=(diam**2)*3.14/4
- break_node_for_old_pipe = WaterNetwork.get_node(new_node_id_for_old_pipe)
- break_node_for_old_pipe.add_leak(WaterNetwork, area=area, discharge_coeff=1, start_time=float(damage_time), end_time=self.end_time+0.1)
- break_node_for_new_pipe = WaterNetwork.get_node(new_node_id_for_new_pipe)
- break_node_for_new_pipe.add_leak(WaterNetwork, area=area, start_time=float(damage_time), end_time=self.end_time+0.1)
-
-
- self._registry.addPipeDamageToRegistry(new_node_id_for_old_pipe, {'number':number, 'damage_type':'break', 'pipe_A':pipe_id, 'pipe_B':new_pipe_id, 'orginal_pipe':pipe_id, 'node_A':new_node_id_for_old_pipe, 'node_B':new_node_id_for_new_pipe})
- #self._registry.addPipeDamageToDamageRestorationData(pipe_id, 'break', damage_time)
+ # breaking the node
+ WaterNetwork = break_pipe( # noqa: N806
+ WaterNetwork,
+ pipe_id,
+ new_pipe_id,
+ new_node_id_for_old_pipe,
+ new_node_id_for_new_pipe,
+ split_at_point=ratio,
+ return_copy=False,
+ )
+
+ diam = WaterNetwork.get_link(pipe_id).diameter
+ area = (diam**2) * math.pi / 4
+ break_node_for_old_pipe = WaterNetwork.get_node(
+ new_node_id_for_old_pipe
+ )
+ break_node_for_old_pipe.add_leak(
+ WaterNetwork,
+ area=area,
+ discharge_coeff=1,
+ start_time=float(damage_time),
+ end_time=self.end_time + 0.1,
+ )
+ break_node_for_new_pipe = WaterNetwork.get_node(
+ new_node_id_for_new_pipe
+ )
+ break_node_for_new_pipe.add_leak(
+ WaterNetwork,
+ area=area,
+ start_time=float(damage_time),
+ end_time=self.end_time + 0.1,
+ )
+
+ self._registry.addPipeDamageToRegistry(
+ new_node_id_for_old_pipe,
+ {
+ 'number': number,
+ 'damage_type': 'break',
+ 'pipe_A': pipe_id,
+ 'pipe_B': new_pipe_id,
+ 'orginal_pipe': pipe_id,
+ 'node_A': new_node_id_for_old_pipe,
+ 'node_B': new_node_id_for_new_pipe,
+ },
+ )
+ # self._registry.addPipeDamageToDamageRestorationData(pipe_id, 'break', damage_time)
else:
- raise ValueError('undefined damage type: '+repr(cur_damage['type'])+". Accpetale type of famages are either 'creack' or 'break'.")
- self._registry.addRestorationDataOnPipe(new_node_id, damage_time, cur_damage['type'])
- #return WaterNetwork
+ raise ValueError( # noqa: DOC501
+ 'undefined damage type: '
+ + repr(cur_damage['type'])
+ + ". Accpetale type of famages are either 'creack' or 'break'."
+ )
+ self._registry.addRestorationDataOnPipe(
+ new_node_id, damage_time, cur_damage['type']
+ )
+ # return WaterNetwork
- def applyTankDamages(self, WaterNetwork, current_time):
+ def applyTankDamages(self, WaterNetwork, current_time): # noqa: N802, N803, D102
if self.tank_damage.empty:
- print('No Tank Damage at all')
+ print('No Tank Damage at all') # noqa: T201
return
-
+
current_time_tank_damage = self.tank_damage[current_time]
- if type(current_time_tank_damage) != str:
+ if type(current_time_tank_damage) != str: # noqa: E721
if current_time_tank_damage.empty:
- print('No Tank Damage at time '+str(current_time))
+ print('No Tank Damage at time ' + str(current_time)) # noqa: T201
return
else:
- current_time_tank_damage = pd.Series([current_time_tank_damage], index=[current_time])
- #print(current_time_tank_damage)
- for ind, value in current_time_tank_damage.items():
-
- #if value not in WaterNetwork.tank_name_list:
- #continue #contibue if there is not a tank with such damage
- #connected_link_list = []
- link_name_list_connected_to_node = WaterNetwork.get_links_for_node(value) # must be here
- #for link_name in link_name_list_connected_to_node:
-
- #link = WaterNetwork.get_link(link_name)
- #if value == link.start_node.name:
- #connected_link_list.append((0, link_name))
- #elif value == link.end_node.name:
- #connected_link_list.append((1, link_name) )
-
+ current_time_tank_damage = pd.Series(
+ [current_time_tank_damage], index=[current_time]
+ )
+ # print(current_time_tank_damage)
+ for ind, value in current_time_tank_damage.items(): # noqa: B007, PERF102
+ # if value not in WaterNetwork.tank_name_list:
+ # continue #contibue if there is not a tank with such damage
+ # connected_link_list = []
+ link_name_list_connected_to_node = WaterNetwork.get_links_for_node(
+ value
+ ) # must be here
+ # for link_name in link_name_list_connected_to_node:
+
+ # link = WaterNetwork.get_link(link_name)
+ # if value == link.start_node.name:
+ # connected_link_list.append((0, link_name))
+ # elif value == link.end_node.name:
+ # connected_link_list.append((1, link_name) )
+
tank = WaterNetwork.get_node(value)
coord = tank.coordinates
- new_coord = (coord[0]+10,coord[1]+10)
+ new_coord = (coord[0] + 10, coord[1] + 10)
elevation = tank.elevation
- new_mid_node_name = value+'_tank_mid'
- WaterNetwork.add_junction(new_mid_node_name, elevation = elevation, coordinates = new_coord)
-
- new_pipe_name = value+'_tank_mid_pipe'
- #print(value + str(" -> " ) + new_pipe_name)
- WaterNetwork.add_pipe(new_pipe_name, value, new_mid_node_name, initial_status = 'CLOSED')
-
+ new_mid_node_name = value + '_tank_mid'
+ WaterNetwork.add_junction(
+ new_mid_node_name, elevation=elevation, coordinates=new_coord
+ )
+
+ new_pipe_name = value + '_tank_mid_pipe'
+ # print(value + str(" -> " ) + new_pipe_name)
+ WaterNetwork.add_pipe(
+ new_pipe_name, value, new_mid_node_name, initial_status='CLOSED'
+ )
+
new_node = WaterNetwork.get_node(new_mid_node_name)
-
+
for link_name in link_name_list_connected_to_node:
link = WaterNetwork.get_link(link_name)
-
+
if value == link.start_node.name:
link.start_node = new_node
elif value == link.end_node.name:
link.end_node = new_node
else:
- raise
-
- def applyPumpDamages(self, WaterNetwork, current_time):
- #print(type(self.damaged_pumps))
+ raise # noqa: PLE0704
+
+ def applyPumpDamages(self, WaterNetwork, current_time): # noqa: N802, N803, D102
+ # print(type(self.damaged_pumps))
if self.damaged_pumps.empty:
- print("No pump damage at all")
+ print('No pump damage at all') # noqa: T201
return
-
+
pump_damage_at_time = self.damaged_pumps[current_time]
- if type(pump_damage_at_time) != str:
+ if type(pump_damage_at_time) != str: # noqa: E721
if pump_damage_at_time.empty:
- print('No Pump Damage at time '+str(current_time))
+ print('No Pump Damage at time ' + str(current_time)) # noqa: T201
return
else:
- pump_damage_at_time = pd.Series([pump_damage_at_time], index=[current_time])
- for ind, values in pump_damage_at_time.items():
+ pump_damage_at_time = pd.Series(
+ [pump_damage_at_time], index=[current_time]
+ )
+ for ind, values in pump_damage_at_time.items(): # noqa: B007, PERF102
WaterNetwork.get_link(values).initial_status = LinkStatus(0)
-
-
+
def read_earthquake(self, earthquake_file_name):
- """
-
- Parameters
+ """Parameters
----------
earthquake_file_name : str
path to the text file that include earthquake definition file
@@ -740,122 +1034,145 @@ def read_earthquake(self, earthquake_file_name):
Raises
------
ValueError
- If teh file name is not provided, a valueError will be returned
+ If the file name is not provided, a valueError will be returned
IOError
If the information inside the text file is not valid, then IOError
- will be retuned
+ will be returned
Returns
-------
None.
- """
- if type(earthquake_file_name)!=str:
- raise ValueError('string is wanted for earthqiake fie name')
-
- file = open(earthquake_file_name)
+ """ # noqa: D205, DOC502
+ if type(earthquake_file_name) != str: # noqa: E721
+ raise ValueError('string is wanted for earthqiake fie name') # noqa: EM101, TRY003
+
+ file = open(earthquake_file_name) # noqa: PLW1514, PTH123, SIM115
lines = file.readlines()
- ct=0
+ ct = 0
for line in lines:
- ct += 1
+ ct += 1 # noqa: SIM113
sline = line.split()
line_length = len(sline)
- if line_length!=5:
- raise IOError("there should be 5 valie in line " + repr(ct) + "\n M[SPACE]depth[SPACE]X coordinate[SPACE]Y coordinate{SPACE]Time")
- temp_EQ = EarthquakeScenario(float(sline[0]) , float(sline[1]) , float(sline[2]) , float(sline[3]) , float(sline[4]) )
- self._earthquake = self._earthquake.append(pd.Series(temp_EQ, index=[int(temp_EQ.time)]))
+ if line_length != 5: # noqa: PLR2004
+ raise OSError( # noqa: DOC501
+ 'there should be 5 values in line '
+ + repr(ct)
+ + '\n M[SPACE]depth[SPACE]X coordinate[SPACE]Y coordinate{SPACE]Time'
+ )
+ temp_EQ = EarthquakeScenario( # noqa: N806
+ float(sline[0]),
+ float(sline[1]),
+ float(sline[2]),
+ float(sline[3]),
+ float(sline[4]),
+ )
+ self._earthquake = self._earthquake.append(
+ pd.Series(temp_EQ, index=[int(temp_EQ.time)])
+ )
file.close()
self.sortEarthquakeListTimely()
-
- def sortEarthquakeListTimely(self):
- """
- This functions sorts the list of earthquakes in a timely manner
+
+ def sortEarthquakeListTimely(self): # noqa: N802
+ """This functions sorts the list of earthquakes in a timely manner
Returns
-------
None.
- """
+ """ # noqa: D400, D401, D404
self._earthquake.sort_index()
- self.is_timely_sorted=True
-
- def predictDamage(self, wn, iClear= False):
- """
- This function predict the water network model damage based on probabilistic method.
+ self.is_timely_sorted = True
+
+ def predictDamage(self, wn, iClear=False): # noqa: FBT002, N802, N803
+ """This function predict the water network model damage based on probabilistic method.
+
Parameters
----------
wn : wntrfr.network.model.WaterNetworkModel
- Water Netwrok Model to be used to model the damages
+ Water Network Model to be used to model the damages
clear : TYPE, optional
Boolian value, determining if the leak and break list must be
- cleared before prediciting and adding. The default is False.
+ cleared before predicting and adding. The default is False.
Returns
-------
None.
- """
-
+ """ # noqa: D401, D404
if iClear:
- self.pipe_leak=pd.Series()
- self.pipe_break=pd.Series()
-
- for eq_in, eq in self._earthquake.items():
+ self.pipe_leak = pd.Series()
+ self.pipe_break = pd.Series()
+
+ for eq_in, eq in self._earthquake.items(): # noqa: B007, PERF102
wntr_eq = eq.getWNTREarthquakeObject()
- distance_to_pipes = wntr_eq.distance_to_epicenter(wn, element_type=wntrfr.network.Pipe)
+ distance_to_pipes = wntr_eq.distance_to_epicenter(
+ wn, element_type=wntrfr.network.Pipe
+ )
pga = wntr_eq.pga_attenuation_model(distance_to_pipes)
pgv = wntr_eq.pgv_attenuation_model(distance_to_pipes)
- repair_rate = wntr_eq.repair_rate_model(pgv)
+ repair_rate = wntr_eq.repair_rate_model(pgv) # noqa: F841
fc = wntrfr.scenario.FragilityCurve()
- fc.add_state('leak' , 1 , {'Default': lognorm(0.5 , scale=0.2)})
- fc.add_state('break' , 2 , {'Default': lognorm(0.5, scale=0.5)})
+ fc.add_state('leak', 1, {'Default': lognorm(0.5, scale=0.2)})
+ fc.add_state('break', 2, {'Default': lognorm(0.5, scale=0.5)})
failure_probability = fc.cdf_probability(pga)
damage_state = fc.sample_damage_state(failure_probability)
-
- for pipe_ID , ds in damage_state.items():
- #if wn.get_link(pipe_ID).status==0:
- #continue
- if ds==None:
+
+ for pipe_ID, ds in damage_state.items(): # noqa: N806
+ # if wn.get_link(pipe_ID).status==0:
+ # continue
+ if ds == None: # noqa: E711
continue
- if ds.lower()=='leak':
- temp={'pipe_id':pipe_ID , 'leak_loc':0.5 , 'leak_type':1 , 'leak_time':eq.time/3600}
- self.pipe_leak=self.pipe_leak.append(pd.Series(data=[temp], index=[int(eq.time)]))
- if ds.lower()=='break':
- temp={'pipe_id':pipe_ID , 'break_loc':0.5 , 'break_time':eq.time/3600}
- self.pipe_break=self.pipe_break.append(pd.Series(data=[temp], index=[int(eq.time)]))
-
+ if ds.lower() == 'leak':
+ temp = {
+ 'pipe_id': pipe_ID,
+ 'leak_loc': 0.5,
+ 'leak_type': 1,
+ 'leak_time': eq.time / 3600,
+ }
+ self.pipe_leak = self.pipe_leak.append(
+ pd.Series(data=[temp], index=[int(eq.time)])
+ )
+ if ds.lower() == 'break':
+ temp = {
+ 'pipe_id': pipe_ID,
+ 'break_loc': 0.5,
+ 'break_time': eq.time / 3600,
+ }
+ self.pipe_break = self.pipe_break.append(
+ pd.Series(data=[temp], index=[int(eq.time)])
+ )
+
def get_damage_distinct_time(self):
- """
- get distinct time for all kind of damages
+ """Get distinct time for all kind of damages
Returns
-------
damage_time_list : list
- Distict time for all kind of damages
+ Distinct time for all kind of damages
- """
+ """ # noqa: D400
pipe_damage_unique_time = self.pipe_all_damages.index.unique().tolist()
node_damage_unique_time = self.node_damage.index.unique().tolist()
tank_damage_unique_time = self.tank_damage.index.unique().tolist()
pump_damage_unique_time = self.damaged_pumps.index.unique().tolist()
-
+
all_damages_time = []
all_damages_time.extend(pipe_damage_unique_time)
all_damages_time.extend(node_damage_unique_time)
all_damages_time.extend(tank_damage_unique_time)
all_damages_time.extend(pump_damage_unique_time)
-
- all_damages_time = list(set(all_damages_time ) )
+
+ all_damages_time = list(set(all_damages_time))
all_damages_time.sort()
-
- #damage_time_list = all_pipe_damage_time.unique().tolist()
- #damage_time_list.sort()
+
+ # damage_time_list = all_pipe_damage_time.unique().tolist()
+ # damage_time_list.sort()
return all_damages_time
-
+
def get_earthquake_distict_time(self):
- """
- checks if the earthquake time are in order. Then the it will get
- distict earthquake time sand return it
+ """Checks if the earthquake time are in order. Then the it will get
+ distinct earthquake time sand return it
Raises
------
@@ -867,20 +1184,16 @@ def get_earthquake_distict_time(self):
pandas.Series()
a list of distinct time of earthquake.
- """
- reg=[]
- if self.is_timely_sorted==False:
+ """ # noqa: D205, D400, D401, DOC502
+ reg = []
+ if self.is_timely_sorted == False: # noqa: E712
self.sortEarthquakeListTimely()
-
+
time_list = self._earthquake.index
- last_value=None
+ last_value = None
for time in iter(time_list):
- if last_value==None:
+ if last_value == None or last_value < time: # noqa: E711
reg.append(time)
- last_value=time
- elif last_value20s} {:>20s} {:>20s} {:>20s} {:>20s} {:>20s} {:20s} {:20s}\n'
+_TANK_LABEL = (
+ '{:21s} {:>20s} {:>20s} {:>20s} {:>20s} {:>20s} {:>20s} {:20s} {:20s}\n'
+)
_PIPE_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {len:15.11g} {diam:15.11g} {rough:15.11g} {mloss:15.11g} {status:>20s} {com:>3s}\n'
_PIPE_LABEL = '{:21s} {:20s} {:20s} {:>20s} {:>20s} {:>20s} {:>20s} {:>20s}\n'
-_PUMP_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {ptype:8s} {params:20s} {com:>3s}\n'
+_PUMP_ENTRY = (
+ ' {name:20s} {node1:20s} {node2:20s} {ptype:8s} {params:20s} {com:>3s}\n'
+)
_PUMP_LABEL = '{:21s} {:20s} {:20s} {:20s}\n'
_VALVE_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {diam:15.11g} {vtype:4s} {set:15.11g} {mloss:15.11g} {com:>3s}\n'
@@ -76,6 +117,7 @@
_CURVE_ENTRY = ' {name:10s} {x:12f} {y:12f} {com:>3s}\n'
_CURVE_LABEL = '{:11s} {:12s} {:12s}\n'
+
def _split_line(line):
_vc = line.split(';', 1)
_cmnt = None
@@ -84,17 +126,16 @@ def _split_line(line):
pass
elif len(_vc) == 1:
_vals = _vc[0].split()
- elif _vc[0] == '':
+ elif _vc[0] == '': # noqa: PLC1901
_cmnt = _vc[1]
else:
_vals = _vc[0].split()
_cmnt = _vc[1]
return _vals, _cmnt
-def _is_number(s):
- """
- Checks if input is a number
+def _is_number(s):
+ """Checks if input is a number
Parameters
----------
@@ -104,19 +145,17 @@ def _is_number(s):
-------
bool
Input is a number
- """
+ """ # noqa: D400, D401
try:
float(s)
- return True
+ return True # noqa: TRY300
except ValueError:
return False
def _str_time_to_sec(s):
- """
- Converts EPANET time format to seconds.
-
+ """Converts EPANET time format to seconds.
Parameters
----------
@@ -128,33 +167,35 @@ def _str_time_to_sec(s):
-------
int
Integer value of time in seconds.
- """
+
+ """ # noqa: D401
pattern1 = re.compile(r'^(\d+):(\d+):(\d+)$')
time_tuple = pattern1.search(s)
if bool(time_tuple):
- return (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60 +
- int(round(float(time_tuple.groups()[2]))))
- else:
+ return (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ + int(round(float(time_tuple.groups()[2])))
+ )
+ else: # noqa: RET505
pattern2 = re.compile(r'^(\d+):(\d+)$')
time_tuple = pattern2.search(s)
if bool(time_tuple):
- return (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60)
- else:
+ return (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ )
+ else: # noqa: RET505
pattern3 = re.compile(r'^(\d+)$')
time_tuple = pattern3.search(s)
if bool(time_tuple):
- return int(time_tuple.groups()[0])*60*60
- else:
- raise RuntimeError("Time format in "
- "INP file not recognized. ")
-
+ return int(time_tuple.groups()[0]) * 60 * 60
+ else: # noqa: RET505
+ raise RuntimeError('Time format in ' 'INP file not recognized. ') # noqa: DOC501, EM101, TRY003
-def _clock_time_to_sec(s, am_pm):
- """
- Converts EPANET clocktime format to seconds.
+def _clock_time_to_sec(s, am_pm): # noqa: C901
+ """Converts EPANET clocktime format to seconds.
Parameters
----------
@@ -170,72 +211,81 @@ def _clock_time_to_sec(s, am_pm):
int
Integer value of time in seconds
- """
+ """ # noqa: D401
if am_pm.upper() == 'AM':
am = True
elif am_pm.upper() == 'PM':
am = False
else:
- raise RuntimeError('am_pm option not recognized; options are AM or PM')
+ raise RuntimeError('am_pm option not recognized; options are AM or PM') # noqa: DOC501, EM101, TRY003
pattern1 = re.compile(r'^(\d+):(\d+):(\d+)$')
time_tuple = pattern1.search(s)
if bool(time_tuple):
- time_sec = (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60 +
- int(round(float(time_tuple.groups()[2]))))
+ time_sec = (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ + int(round(float(time_tuple.groups()[2])))
+ )
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
- if time_sec >= 3600*12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ if time_sec >= 3600 * 12:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
+ else: # noqa: RET505
pattern2 = re.compile(r'^(\d+):(\d+)$')
time_tuple = pattern2.search(s)
if bool(time_tuple):
- time_sec = (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60)
+ time_sec = (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ )
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
if time_sec >= 3600 * 12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
+ else: # noqa: RET505
pattern3 = re.compile(r'^(\d+)$')
time_tuple = pattern3.search(s)
if bool(time_tuple):
- time_sec = int(time_tuple.groups()[0])*60*60
+ time_sec = int(time_tuple.groups()[0]) * 60 * 60
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
if time_sec >= 3600 * 12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
- raise RuntimeError("Time format in "
- "INP file not recognized. ")
+ else: # noqa: RET505
+ raise RuntimeError('Time format in ' 'INP file not recognized. ') # noqa: DOC501, EM101, TRY003
def _sec_to_string(sec):
- hours = int(sec/3600.)
- sec -= hours*3600
- mm = int(sec/60.)
- sec -= mm*60
+ hours = int(sec / 3600.0)
+ sec -= hours * 3600
+ mm = int(sec / 60.0)
+ sec -= mm * 60
return (hours, mm, int(sec))
-class InpFile(object):
- """
- EPANET INP file reader and writer class.
+class InpFile:
+ """EPANET INP file reader and writer class.
This class provides read and write functionality for EPANET INP files.
The EPANET Users Manual provides full documentation for the INP file format.
"""
+
def __init__(self):
self.sections = OrderedDict()
for sec in _INP_SECTIONS:
@@ -245,9 +295,8 @@ def __init__(self):
self.top_comments = []
self.curves = OrderedDict()
- def read(self, inp_files, wn=None):
- """
- Read an EPANET INP file and load data into a water network model object.
+ def read(self, inp_files, wn=None): # noqa: C901
+ """Read an EPANET INP file and load data into a water network model object.
Both EPANET 2.0 and EPANET 2.2 INP file options are recognized and handled.
Parameters
@@ -262,7 +311,7 @@ def read(self, inp_files, wn=None):
:class:`~wntrfr.network.model.WaterNetworkModel`
A water network model object
- """
+ """ # noqa: D205
if wn is None:
wn = WaterNetworkModel()
self.wn = wn
@@ -278,115 +327,121 @@ def read(self, inp_files, wn=None):
self.mass_units = None
self.flow_units = None
- for filename in inp_files:
- section = None
- lnum = 0
- edata = {'fname': filename}
- with io.open(filename, 'r', encoding='utf-8') as f:
- for line in f:
- lnum += 1
- edata['lnum'] = lnum
- line = line.strip()
- nwords = len(line.split())
- if len(line) == 0 or nwords == 0:
- # Blank line
- continue
- elif line.startswith('['):
- vals = line.split(None, 1)
- sec = vals[0].upper()
- # Add handlers to deal with extra 'S'es (or missing 'S'es) in INP file
- if sec not in _INP_SECTIONS:
- trsec = sec.replace(']','S]')
- if trsec in _INP_SECTIONS:
- sec = trsec
- if sec not in _INP_SECTIONS:
- trsec = sec.replace('S]',']')
- if trsec in _INP_SECTIONS:
- sec = trsec
- edata['sec'] = sec
- if sec in _INP_SECTIONS:
- section = sec
- #logger.info('%(fname)s:%(lnum)-6d %(sec)13s section found' % edata)
+ for filename in inp_files: # noqa: PLR1702
+ section = None
+ lnum = 0
+ edata = {'fname': filename}
+ with open(filename, encoding='utf-8') as f: # noqa: PTH123
+ for line in f:
+ lnum += 1
+ edata['lnum'] = lnum
+ line = line.strip() # noqa: PLW2901
+ nwords = len(line.split())
+ if len(line) == 0 or nwords == 0:
+ # Blank line
continue
- elif sec == '[END]':
- #logger.info('%(fname)s:%(lnum)-6d %(sec)13s end of file found' % edata)
- section = None
- break
- else:
- raise RuntimeError('%(fname)s:%(lnum)d: Invalid section "%(sec)s"' % edata)
- elif section is None and line.startswith(';'):
- self.top_comments.append(line[1:])
- continue
- elif section is None:
- logger.debug('Found confusing line: %s', repr(line))
- raise RuntimeError('%(fname)s:%(lnum)d: Non-comment outside of valid section!' % edata)
- # We have text, and we are in a section
- self.sections[section].append((lnum, line))
+ elif line.startswith('['): # noqa: RET507
+ vals = line.split(None, 1)
+ sec = vals[0].upper()
+ # Add handlers to deal with extra 'S'es (or missing 'S'es) in INP file
+ if sec not in _INP_SECTIONS:
+ trsec = sec.replace(']', 'S]')
+ if trsec in _INP_SECTIONS:
+ sec = trsec
+ if sec not in _INP_SECTIONS:
+ trsec = sec.replace('S]', ']')
+ if trsec in _INP_SECTIONS:
+ sec = trsec
+ edata['sec'] = sec
+ if sec in _INP_SECTIONS:
+ section = sec
+ # logger.info('%(fname)s:%(lnum)-6d %(sec)13s section found' % edata)
+ continue
+ elif sec == '[END]': # noqa: RET507
+ # logger.info('%(fname)s:%(lnum)-6d %(sec)13s end of file found' % edata)
+ section = None
+ break
+ else:
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Invalid section "%(sec)s"'
+ % edata
+ )
+ elif section is None and line.startswith(';'):
+ self.top_comments.append(line[1:])
+ continue
+ elif section is None:
+ logger.debug('Found confusing line: %s', repr(line))
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Non-comment outside of valid section!'
+ % edata
+ )
+ # We have text, and we are in a section
+ self.sections[section].append((lnum, line))
# Parse each of the sections
# The order of operations is important as certain things require prior knowledge
- ### OPTIONS
+ # OPTIONS
self._read_options()
- ### TIMES
+ # TIMES
self._read_times()
- ### CURVES
+ # CURVES
self._read_curves()
- ### PATTERNS
+ # PATTERNS
self._read_patterns()
- ### JUNCTIONS
+ # JUNCTIONS
self._read_junctions()
- ### RESERVOIRS
+ # RESERVOIRS
self._read_reservoirs()
- ### TANKS
+ # TANKS
self._read_tanks()
- ### PIPES
+ # PIPES
self._read_pipes()
- ### PUMPS
+ # PUMPS
self._read_pumps()
- ### VALVES
+ # VALVES
self._read_valves()
- ### COORDINATES
+ # COORDINATES
self._read_coordinates()
- ### SOURCES
+ # SOURCES
self._read_sources()
- ### STATUS
+ # STATUS
self._read_status()
- ### CONTROLS
+ # CONTROLS
self._read_controls()
- ### RULES
+ # RULES
self._read_rules()
- ### REACTIONS
+ # REACTIONS
self._read_reactions()
- ### TITLE
+ # TITLE
self._read_title()
- ### ENERGY
+ # ENERGY
self._read_energy()
- ### DEMANDS
+ # DEMANDS
self._read_demands()
- ### EMITTERS
+ # EMITTERS
self._read_emitters()
-
- ### QUALITY
+
+ # QUALITY
self._read_quality()
self._read_mixing()
@@ -394,30 +449,29 @@ def read(self, inp_files, wn=None):
self._read_vertices()
self._read_labels()
- ### Parse Backdrop
+ # Parse Backdrop
self._read_backdrop()
- ### TAGS
+ # TAGS
self._read_tags()
# Set the _inpfile io data inside the water network, so it is saved somewhere
- wn._inpfile = self
-
- ### Finish tags
+ wn._inpfile = self # noqa: SLF001
+
+ # Finish tags
self._read_end()
-
+
return self.wn
- def write(self, filename, wn, units=None, version=2.2, force_coordinates=False):
- """
- Write a water network model into an EPANET INP file.
+ def write(self, filename, wn, units=None, version=2.2, force_coordinates=False): # noqa: FBT002
+ """Write a water network model into an EPANET INP file.
.. note::
Please note that by default, an EPANET 2.2 formatted file is written by wntrfr. An INP file
- with version 2.2 options *will not* work with EPANET 2.0 (neither command line nor GUI).
+ with version 2.2 options *will not* work with EPANET 2.0 (neither command line nor GUI).
By default, WNTR will use the EPANET 2.2 toolkit.
-
+
Parameters
----------
@@ -426,20 +480,20 @@ def write(self, filename, wn, units=None, version=2.2, force_coordinates=False):
units : str, int or FlowUnits
Name of the units for the EPANET INP file to be written in.
version : float, {2.0, **2.2**}
- Defaults to 2.2; use 2.0 to guarantee backward compatability, but this will turn off PDD mode
- and supress the writing of other EPANET 2.2-specific options. If PDD mode is specified, a
+ Defaults to 2.2; use 2.0 to guarantee backward comparability, but this will turn off PDD mode
+ and suppress the writing of other EPANET 2.2-specific options. If PDD mode is specified, a
warning will be issued.
force_coordinates : bool
This only applies if `self.options.graphics.map_filename` is not `None`,
and will force the COORDINATES section to be written even if a MAP file is
provided. False by default, but coordinates **are** written by default since
the MAP file is `None` by default.
- """
+ """
if not isinstance(wn, WaterNetworkModel):
- raise ValueError('Must pass a WaterNetworkModel object')
+ raise ValueError('Must pass a WaterNetworkModel object') # noqa: DOC501, EM101, TRY003, TRY004
if units is not None and isinstance(units, str):
- units=units.upper()
+ units = units.upper()
self.flow_units = FlowUnits[units]
elif units is not None and isinstance(units, FlowUnits):
self.flow_units = units
@@ -454,7 +508,7 @@ def write(self, filename, wn, units=None, version=2.2, force_coordinates=False):
self.flow_units = FlowUnits.GPM
if self.mass_units is None:
self.mass_units = MassUnits.mg
- with io.open(filename, 'wb') as f:
+ with open(filename, 'wb') as f: # noqa: PTH123
self._write_title(f, wn)
self._write_junctions(f, wn)
self._write_reservoirs(f, wn)
@@ -490,64 +544,78 @@ def write(self, filename, wn, units=None, version=2.2, force_coordinates=False):
self._write_end(f, wn)
- ### Network Components
+ # Network Components
def _read_title(self):
lines = []
- for lnum, line in self.sections['[TITLE]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[TITLE]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
lines.append(line)
self.wn.title = lines
- def _write_title(self, f, wn):
+ def _write_title(self, f, wn): # noqa: PLR6301
if wn.name is not None:
- f.write('; Filename: {0}\n'.format(wn.name).encode(sys_default_enc))
- f.write('; WNTR: {}\n; Created: {:%Y-%m-%d %H:%M:%S}\n'.format(wntrfr.__version__, datetime.datetime.now()).encode(sys_default_enc))
+ f.write(f'; Filename: {wn.name}\n'.encode(sys_default_enc))
+ f.write(
+ f'; WNTR: {wntrfr.__version__}\n; Created: {datetime.datetime.now():%Y-%m-%d %H:%M:%S}\n'.encode( # noqa: DTZ005
+ sys_default_enc
+ )
+ )
f.write('[TITLE]\n'.encode(sys_default_enc))
if hasattr(wn, 'title'):
for line in wn.title:
- f.write('{}\n'.format(line).encode(sys_default_enc))
+ f.write(f'{line}\n'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_junctions(self):
-# try:
- for lnum, line in self.sections['[JUNCTIONS]']:
- line = line.split(';')[0]
- current = line.split()
- if current == []:
- continue
- if len(current) > 3:
- pat = current[3]
- elif self.wn.options.hydraulic.pattern:
- pat = self.wn.options.hydraulic.pattern
- else:
- pat = self.wn.patterns.default_pattern
- base_demand = 0.0
- if len(current) > 2:
- base_demand = to_si(self.flow_units, float(current[2]), HydParam.Demand)
- self.wn.add_junction(current[0],
- base_demand,
- pat,
- to_si(self.flow_units, float(current[1]), HydParam.Elevation),
- demand_category=None)
-# except Exception as e:
-# print(line)
-# raise e
+ # try:
+ for lnum, line in self.sections['[JUNCTIONS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
+ current = line.split()
+ if current == []:
+ continue
+ if len(current) > 3: # noqa: PLR2004
+ pat = current[3]
+ elif self.wn.options.hydraulic.pattern:
+ pat = self.wn.options.hydraulic.pattern
+ else:
+ pat = self.wn.patterns.default_pattern
+ base_demand = 0.0
+ if len(current) > 2: # noqa: PLR2004
+ base_demand = to_si(
+ self.flow_units, float(current[2]), HydParam.Demand
+ )
+ self.wn.add_junction(
+ current[0],
+ base_demand,
+ pat,
+ to_si(self.flow_units, float(current[1]), HydParam.Elevation),
+ demand_category=None,
+ )
+
+ # except Exception as e:
+ # print(line)
+ # raise e
def _write_junctions(self, f, wn):
f.write('[JUNCTIONS]\n'.encode(sys_default_enc))
- f.write(_JUNC_LABEL.format(';ID', 'Elevation', 'Demand', 'Pattern').encode(sys_default_enc))
+ f.write(
+ _JUNC_LABEL.format(';ID', 'Elevation', 'Demand', 'Pattern').encode(
+ sys_default_enc
+ )
+ )
nnames = list(wn.junction_name_list)
# nnames.sort()
for junction_name in nnames:
junction = wn.nodes[junction_name]
-
- if junction._is_isolated==True: #sina added this
+
+ # sina added this
+ if junction._is_isolated == True: # noqa: SLF001, E712
continue
-
+
if junction.demand_timeseries_list:
base_demands = junction.demand_timeseries_list.base_demand_list()
demand_patterns = junction.demand_timeseries_list.pattern_list()
@@ -565,29 +633,41 @@ def _write_junctions(self, f, wn):
else:
base_demand = 0.0
demand_pattern = None
- E = {'name': junction_name,
- 'elev': from_si(self.flow_units, junction.elevation, HydParam.Elevation),
- 'dem': from_si(self.flow_units, base_demand, HydParam.Demand),
- 'pat': '',
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': junction_name,
+ 'elev': from_si(
+ self.flow_units, junction.elevation, HydParam.Elevation
+ ),
+ 'dem': from_si(self.flow_units, base_demand, HydParam.Demand),
+ 'pat': '',
+ 'com': ';',
+ }
if demand_pattern is not None:
E['pat'] = str(demand_pattern)
f.write(_JUNC_ENTRY.format(**E).encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_reservoirs(self):
- for lnum, line in self.sections['[RESERVOIRS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[RESERVOIRS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
- if len(current) == 2:
- self.wn.add_reservoir(current[0],
- to_si(self.flow_units, float(current[1]), HydParam.HydraulicHead))
+ if len(current) == 2: # noqa: PLR2004
+ self.wn.add_reservoir(
+ current[0],
+ to_si(
+ self.flow_units, float(current[1]), HydParam.HydraulicHead
+ ),
+ )
else:
- self.wn.add_reservoir(current[0],
- to_si(self.flow_units, float(current[1]), HydParam.HydraulicHead),
- current[2])
+ self.wn.add_reservoir(
+ current[0],
+ to_si(
+ self.flow_units, float(current[1]), HydParam.HydraulicHead
+ ),
+ current[2],
+ )
def _write_reservoirs(self, f, wn):
f.write('[RESERVOIRS]\n'.encode(sys_default_enc))
@@ -596,13 +676,20 @@ def _write_reservoirs(self, f, wn):
# nnames.sort()
for reservoir_name in nnames:
reservoir = wn.nodes[reservoir_name]
-
- if reservoir._is_isolated==True: #sina added this
+
+ # sina added this
+ if reservoir._is_isolated == True: # noqa: SLF001, E712
continue
-
- E = {'name': reservoir_name,
- 'head': from_si(self.flow_units, reservoir.head_timeseries.base_value, HydParam.HydraulicHead),
- 'com': ';'}
+
+ E = { # noqa: N806
+ 'name': reservoir_name,
+ 'head': from_si(
+ self.flow_units,
+ reservoir.head_timeseries.base_value,
+ HydParam.HydraulicHead,
+ ),
+ 'com': ';',
+ }
if reservoir.head_timeseries.pattern is None:
E['pat'] = ''
else:
@@ -611,13 +698,13 @@ def _write_reservoirs(self, f, wn):
f.write('\n'.encode(sys_default_enc))
def _read_tanks(self):
- for lnum, line in self.sections['[TANKS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[TANKS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
volume = None
- if len(current) >= 8: # Volume curve provided
+ if len(current) >= 8: # Volume curve provided # noqa: PLR2004
volume = float(current[6])
curve_name = current[7]
if curve_name == '*':
@@ -629,59 +716,94 @@ def _read_tanks(self):
y = to_si(self.flow_units, point[1], HydParam.Volume)
curve_points.append((x, y))
self.wn.add_curve(curve_name, 'VOLUME', curve_points)
-# curve = self.wn.get_curve(curve_name)
- if len(current) == 9:
+ # curve = self.wn.get_curve(curve_name)
+ if len(current) == 9: # noqa: PLR2004
overflow = current[8]
else:
overflow = False
- elif len(current) == 7:
+ elif len(current) == 7: # noqa: PLR2004
curve_name = None
overflow = False
volume = float(current[6])
- elif len(current) == 6:
+ elif len(current) == 6: # noqa: PLR2004
curve_name = None
overflow = False
volume = 0.0
else:
- raise RuntimeError('Tank entry format not recognized.')
- self.wn.add_tank(current[0],
- to_si(self.flow_units, float(current[1]), HydParam.Elevation),
- to_si(self.flow_units, float(current[2]), HydParam.Length),
- to_si(self.flow_units, float(current[3]), HydParam.Length),
- to_si(self.flow_units, float(current[4]), HydParam.Length),
- to_si(self.flow_units, float(current[5]), HydParam.TankDiameter),
- to_si(self.flow_units, float(volume), HydParam.Volume),
- curve_name, overflow)
+ raise RuntimeError('Tank entry format not recognized.') # noqa: EM101, TRY003
+ self.wn.add_tank(
+ current[0],
+ to_si(self.flow_units, float(current[1]), HydParam.Elevation),
+ to_si(self.flow_units, float(current[2]), HydParam.Length),
+ to_si(self.flow_units, float(current[3]), HydParam.Length),
+ to_si(self.flow_units, float(current[4]), HydParam.Length),
+ to_si(self.flow_units, float(current[5]), HydParam.TankDiameter),
+ to_si(self.flow_units, float(volume), HydParam.Volume),
+ curve_name,
+ overflow,
+ )
def _write_tanks(self, f, wn, version=2.2):
f.write('[TANKS]\n'.encode(sys_default_enc))
- if version != 2.2:
- f.write(_TANK_LABEL.format(';ID', 'Elevation', 'Init Level', 'Min Level', 'Max Level',
- 'Diameter', 'Min Volume', 'Volume Curve','').encode(sys_default_enc))
+ if version != 2.2: # noqa: PLR2004
+ f.write(
+ _TANK_LABEL.format(
+ ';ID',
+ 'Elevation',
+ 'Init Level',
+ 'Min Level',
+ 'Max Level',
+ 'Diameter',
+ 'Min Volume',
+ 'Volume Curve',
+ '',
+ ).encode(sys_default_enc)
+ )
else:
- f.write(_TANK_LABEL.format(';ID', 'Elevation', 'Init Level', 'Min Level', 'Max Level',
- 'Diameter', 'Min Volume', 'Volume Curve','Overflow').encode(sys_default_enc))
+ f.write(
+ _TANK_LABEL.format(
+ ';ID',
+ 'Elevation',
+ 'Init Level',
+ 'Min Level',
+ 'Max Level',
+ 'Diameter',
+ 'Min Volume',
+ 'Volume Curve',
+ 'Overflow',
+ ).encode(sys_default_enc)
+ )
nnames = list(wn.tank_name_list)
# nnames.sort()
for tank_name in nnames:
tank = wn.nodes[tank_name]
-
- if tank._is_isolated==True: #sina added this
+
+ if tank._is_isolated == True: # sina added this # noqa: SLF001, E712
continue
-
- E = {'name': tank_name,
- 'elev': from_si(self.flow_units, tank.elevation, HydParam.Elevation),
- 'initlev': from_si(self.flow_units, tank.init_level, HydParam.HydraulicHead),
- 'minlev': from_si(self.flow_units, tank.min_level, HydParam.HydraulicHead),
- 'maxlev': from_si(self.flow_units, tank.max_level, HydParam.HydraulicHead),
- 'diam': from_si(self.flow_units, tank.diameter, HydParam.TankDiameter),
- 'minvol': from_si(self.flow_units, tank.min_vol, HydParam.Volume),
- 'curve': '',
- 'overflow': '',
- 'com': ';'}
+
+ E = { # noqa: N806
+ 'name': tank_name,
+ 'elev': from_si(self.flow_units, tank.elevation, HydParam.Elevation),
+ 'initlev': from_si(
+ self.flow_units, tank.init_level, HydParam.HydraulicHead
+ ),
+ 'minlev': from_si(
+ self.flow_units, tank.min_level, HydParam.HydraulicHead
+ ),
+ 'maxlev': from_si(
+ self.flow_units, tank.max_level, HydParam.HydraulicHead
+ ),
+ 'diam': from_si(
+ self.flow_units, tank.diameter, HydParam.TankDiameter
+ ),
+ 'minvol': from_si(self.flow_units, tank.min_vol, HydParam.Volume),
+ 'curve': '',
+ 'overflow': '',
+ 'com': ';',
+ }
if tank.vol_curve is not None:
E['curve'] = tank.vol_curve.name
- if version ==2.2:
+ if version == 2.2: # noqa: PLR2004
if tank.overflow:
E['overflow'] = 'YES'
if tank.vol_curve is None:
@@ -690,12 +812,12 @@ def _write_tanks(self, f, wn, version=2.2):
f.write('\n'.encode(sys_default_enc))
def _read_pipes(self):
- for lnum, line in self.sections['[PIPES]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[PIPES]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
- if len(current) == 8:
+ if len(current) == 8: # noqa: PLR2004
minor_loss = float(current[6])
if current[7].upper() == 'CV':
link_status = LinkStatus.Open
@@ -703,66 +825,84 @@ def _read_pipes(self):
else:
link_status = LinkStatus[current[7].upper()]
check_valve = False
- elif len(current) == 7:
+ elif len(current) == 7: # noqa: PLR2004
minor_loss = float(current[6])
link_status = LinkStatus.Open
check_valve = False
- elif len(current) == 6:
- minor_loss = 0.
+ elif len(current) == 6: # noqa: PLR2004
+ minor_loss = 0.0
link_status = LinkStatus.Open
check_valve = False
- self.wn.add_pipe(current[0],
- current[1],
- current[2],
- to_si(self.flow_units, float(current[3]), HydParam.Length),
- to_si(self.flow_units, float(current[4]), HydParam.PipeDiameter),
- float(current[5]),
- minor_loss,
- link_status,
- check_valve)
+ self.wn.add_pipe(
+ current[0],
+ current[1],
+ current[2],
+ to_si(self.flow_units, float(current[3]), HydParam.Length),
+ to_si(self.flow_units, float(current[4]), HydParam.PipeDiameter),
+ float(current[5]),
+ minor_loss,
+ link_status,
+ check_valve,
+ )
def _write_pipes(self, f, wn):
f.write('[PIPES]\n'.encode(sys_default_enc))
- f.write(_PIPE_LABEL.format(';ID', 'Node1', 'Node2', 'Length', 'Diameter',
- 'Roughness', 'Minor Loss', 'Status').encode(sys_default_enc))
+ f.write(
+ _PIPE_LABEL.format(
+ ';ID',
+ 'Node1',
+ 'Node2',
+ 'Length',
+ 'Diameter',
+ 'Roughness',
+ 'Minor Loss',
+ 'Status',
+ ).encode(sys_default_enc)
+ )
lnames = list(wn.pipe_name_list)
# lnames.sort()
for pipe_name in lnames:
pipe = wn.links[pipe_name]
-
- if pipe._is_isolated == True: #Sina added this
+
+ if pipe._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
- E = {'name': pipe_name,
- 'node1': pipe.start_node_name,
- 'node2': pipe.end_node_name,
- 'len': from_si(self.flow_units, pipe.length, HydParam.Length),
- 'diam': from_si(self.flow_units, pipe.diameter, HydParam.PipeDiameter),
- 'rough': pipe.roughness,
- 'mloss': pipe.minor_loss,
- 'status': str(pipe.initial_status),
- 'com': ';'}
+
+ E = { # noqa: N806
+ 'name': pipe_name,
+ 'node1': pipe.start_node_name,
+ 'node2': pipe.end_node_name,
+ 'len': from_si(self.flow_units, pipe.length, HydParam.Length),
+ 'diam': from_si(
+ self.flow_units, pipe.diameter, HydParam.PipeDiameter
+ ),
+ 'rough': pipe.roughness,
+ 'mloss': pipe.minor_loss,
+ 'status': str(pipe.initial_status),
+ 'com': ';',
+ }
if pipe.check_valve:
E['status'] = 'CV'
f.write(_PIPE_ENTRY.format(**E).encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
- def _read_pumps(self):
+ def _read_pumps(self): # noqa: C901
def create_curve(curve_name):
curve_points = []
- if curve_name not in self.wn.curve_name_list or \
- self.wn.get_curve(curve_name) is None:
+ if (
+ curve_name not in self.wn.curve_name_list
+ or self.wn.get_curve(curve_name) is None
+ ):
for point in self.curves[curve_name]:
x = to_si(self.flow_units, point[0], HydParam.Flow)
y = to_si(self.flow_units, point[1], HydParam.HydraulicHead)
- curve_points.append((x,y))
+ curve_points.append((x, y))
self.wn.add_curve(curve_name, 'HEAD', curve_points)
curve = self.wn.get_curve(curve_name)
- return curve
+ return curve # noqa: RET504
- for lnum, line in self.sections['[PUMPS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[PUMPS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
@@ -774,82 +914,103 @@ def create_curve(curve_name):
for i in range(3, len(current), 2):
if current[i].upper() == 'HEAD':
-# assert pump_type is None, 'In [PUMPS] entry, specify either HEAD or POWER once.'
+ # assert pump_type is None, 'In [PUMPS] entry, specify either HEAD or POWER once.'
pump_type = 'HEAD'
- value = create_curve(current[i+1]).name
+ value = create_curve(current[i + 1]).name
elif current[i].upper() == 'POWER':
-# assert pump_type is None, 'In [PUMPS] entry, specify either HEAD or POWER once.'
+ # assert pump_type is None, 'In [PUMPS] entry, specify either HEAD or POWER once.'
pump_type = 'POWER'
- value = to_si(self.flow_units, float(current[i+1]), HydParam.Power)
+ value = to_si(
+ self.flow_units, float(current[i + 1]), HydParam.Power
+ )
elif current[i].upper() == 'SPEED':
-# assert speed is None, 'In [PUMPS] entry, SPEED may only be specified once.'
- speed = float(current[i+1])
+ # assert speed is None, 'In [PUMPS] entry, SPEED may only be specified once.'
+ speed = float(current[i + 1])
elif current[i].upper() == 'PATTERN':
-# assert pattern is None, 'In [PUMPS] entry, PATTERN may only be specified once.'
- pattern = self.wn.get_pattern(current[i+1]).name
+ # assert pattern is None, 'In [PUMPS] entry, PATTERN may only be specified once.'
+ pattern = self.wn.get_pattern(current[i + 1]).name
else:
- raise RuntimeError('Pump keyword in inp file not recognized.')
+ raise RuntimeError('Pump keyword in inp file not recognized.') # noqa: EM101, TRY003
if speed is None:
speed = 1.0
if pump_type is None:
- raise RuntimeError('Either head curve id or pump power must be specified for all pumps.')
- self.wn.add_pump(current[0], current[1], current[2], pump_type, value, speed, pattern)
+ raise RuntimeError( # noqa: TRY003
+ 'Either head curve id or pump power must be specified for all pumps.' # noqa: EM101
+ )
+ self.wn.add_pump(
+ current[0], current[1], current[2], pump_type, value, speed, pattern
+ )
def _write_pumps(self, f, wn):
f.write('[PUMPS]\n'.encode(sys_default_enc))
- f.write(_PUMP_LABEL.format(';ID', 'Node1', 'Node2', 'Properties').encode(sys_default_enc))
+ f.write(
+ _PUMP_LABEL.format(';ID', 'Node1', 'Node2', 'Properties').encode(
+ sys_default_enc
+ )
+ )
lnames = list(wn.pump_name_list)
# lnames.sort()
for pump_name in lnames:
pump = wn.links[pump_name]
-
- if pump._is_isolated == True: #Sina added this
+
+ if pump._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
- E = {'name': pump_name,
- 'node1': pump.start_node_name,
- 'node2': pump.end_node_name,
- 'ptype': pump.pump_type,
- 'params': '',
-# 'speed_keyword': 'SPEED',
-# 'speed': pump.speed_timeseries.base_value,
- 'com': ';'}
+
+ E = { # noqa: N806
+ 'name': pump_name,
+ 'node1': pump.start_node_name,
+ 'node2': pump.end_node_name,
+ 'ptype': pump.pump_type,
+ 'params': '',
+ # 'speed_keyword': 'SPEED',
+ # 'speed': pump.speed_timeseries.base_value,
+ 'com': ';',
+ }
if pump.pump_type == 'HEAD':
E['params'] = pump.pump_curve_name
elif pump.pump_type == 'POWER':
- E['params'] = str(from_si(self.flow_units, pump.power, HydParam.Power))
+ E['params'] = str(
+ from_si(self.flow_units, pump.power, HydParam.Power)
+ )
else:
- raise RuntimeError('Only head or power info is supported of pumps.')
+ raise RuntimeError('Only head or power info is supported of pumps.') # noqa: EM101, TRY003
tmp_entry = _PUMP_ENTRY
if pump.speed_timeseries.base_value != 1:
E['speed_keyword'] = 'SPEED'
E['speed'] = pump.speed_timeseries.base_value
- tmp_entry = (tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {') +
- ' {speed_keyword:8s} {speed:15.11g} {com:>3s}\n')
+ tmp_entry = (
+ tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {')
+ + ' {speed_keyword:8s} {speed:15.11g} {com:>3s}\n'
+ )
if pump.speed_timeseries.pattern is not None:
- tmp_entry = (tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {') +
- ' {pattern_keyword:10s} {pattern:20s} {com:>3s}\n')
+ tmp_entry = (
+ tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {')
+ + ' {pattern_keyword:10s} {pattern:20s} {com:>3s}\n'
+ )
E['pattern_keyword'] = 'PATTERN'
E['pattern'] = pump.speed_timeseries.pattern.name
f.write(tmp_entry.format(**E).encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_valves(self):
- for lnum, line in self.sections['[VALVES]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[VALVES]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
- if len(current) == 6:
+ if len(current) == 6: # noqa: PLR2004
current.append(0.0)
- else:
- if len(current) != 7:
- raise RuntimeError('The [VALVES] section of an INP file must have 6 or 7 entries.')
+ elif len(current) != 7: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ 'The [VALVES] section of an INP file must have 6 or 7 entries.' # noqa: EM101
+ )
valve_type = current[4].upper()
- if valve_type in ['PRV', 'PSV', 'PBV']:
- valve_set = to_si(self.flow_units, float(current[5]), HydParam.Pressure)
+ if valve_type in ['PRV', 'PSV', 'PBV']: # noqa: PLR6201
+ valve_set = to_si(
+ self.flow_units, float(current[5]), HydParam.Pressure
+ )
elif valve_type == 'FCV':
valve_set = to_si(self.flow_units, float(current[5]), HydParam.Flow)
elif valve_type == 'TCV':
@@ -864,40 +1025,54 @@ def _read_valves(self):
self.wn.add_curve(curve_name, 'HEADLOSS', curve_points)
valve_set = curve_name
else:
- raise RuntimeError('VALVE type "%s" unrecognized' % valve_type)
- self.wn.add_valve(current[0],
- current[1],
- current[2],
- to_si(self.flow_units, float(current[3]), HydParam.PipeDiameter),
- current[4].upper(),
- float(current[6]),
- valve_set)
+ raise RuntimeError('VALVE type "%s" unrecognized' % valve_type) # noqa: UP031
+ self.wn.add_valve(
+ current[0],
+ current[1],
+ current[2],
+ to_si(self.flow_units, float(current[3]), HydParam.PipeDiameter),
+ current[4].upper(),
+ float(current[6]),
+ valve_set,
+ )
def _write_valves(self, f, wn):
f.write('[VALVES]\n'.encode(sys_default_enc))
- f.write(_VALVE_LABEL.format(';ID', 'Node1', 'Node2', 'Diameter', 'Type', 'Setting', 'Minor Loss').encode(sys_default_enc))
+ f.write(
+ _VALVE_LABEL.format(
+ ';ID', 'Node1', 'Node2', 'Diameter', 'Type', 'Setting', 'Minor Loss'
+ ).encode(sys_default_enc)
+ )
lnames = list(wn.valve_name_list)
# lnames.sort()
for valve_name in lnames:
valve = wn.links[valve_name]
-
- if valve._is_isolated == True: #Sina added this
+
+ if valve._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
- E = {'name': valve_name,
- 'node1': valve.start_node_name,
- 'node2': valve.end_node_name,
- 'diam': from_si(self.flow_units, valve.diameter, HydParam.PipeDiameter),
- 'vtype': valve.valve_type,
- 'set': valve.initial_setting,
- 'mloss': valve.minor_loss,
- 'com': ';'}
+
+ E = { # noqa: N806
+ 'name': valve_name,
+ 'node1': valve.start_node_name,
+ 'node2': valve.end_node_name,
+ 'diam': from_si(
+ self.flow_units, valve.diameter, HydParam.PipeDiameter
+ ),
+ 'vtype': valve.valve_type,
+ 'set': valve.initial_setting,
+ 'mloss': valve.minor_loss,
+ 'com': ';',
+ }
valve_type = valve.valve_type
formatter = _VALVE_ENTRY
- if valve_type in ['PRV', 'PSV', 'PBV']:
- valve_set = from_si(self.flow_units, valve.initial_setting, HydParam.Pressure)
+ if valve_type in ['PRV', 'PSV', 'PBV']: # noqa: PLR6201
+ valve_set = from_si(
+ self.flow_units, valve.initial_setting, HydParam.Pressure
+ )
elif valve_type == 'FCV':
- valve_set = from_si(self.flow_units, valve.initial_setting, HydParam.Flow)
+ valve_set = from_si(
+ self.flow_units, valve.initial_setting, HydParam.Flow
+ )
elif valve_type == 'TCV':
valve_set = valve.initial_setting
elif valve_type == 'GPV':
@@ -908,13 +1083,17 @@ def _write_valves(self, f, wn):
f.write('\n'.encode(sys_default_enc))
def _read_emitters(self):
- for lnum, line in self.sections['[EMITTERS]']: # Private attribute on junctions
- line = line.split(';')[0]
+ for lnum, line in self.sections[ # noqa: B007
+ '[EMITTERS]'
+ ]: # Private attribute on junctions
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
junction = self.wn.get_node(current[0])
- junction.emitter_coefficient = to_si(self.flow_units, float(current[1]), HydParam.EmitterCoeff)
+ junction.emitter_coefficient = to_si(
+ self.flow_units, float(current[1]), HydParam.EmitterCoeff
+ )
def _write_emitters(self, f, wn):
f.write('[EMITTERS]\n'.encode(sys_default_enc))
@@ -925,81 +1104,108 @@ def _write_emitters(self, f, wn):
# njunctions.sort()
for junction_name in njunctions:
junction = wn.nodes[junction_name]
-
- if junction._is_isolated == True: #Sina added this
+
+ # Sina added this
+ if junction._is_isolated == True: # noqa: SLF001, E712
continue
-
+
if junction.emitter_coefficient:
- val = from_si(self.flow_units, junction.emitter_coefficient, HydParam.EmitterCoeff)
- f.write(entry.format(junction_name, str(val)).encode(sys_default_enc))
+ val = from_si(
+ self.flow_units,
+ junction.emitter_coefficient,
+ HydParam.EmitterCoeff,
+ )
+ f.write(
+ entry.format(junction_name, str(val)).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
- ### System Operation
+ # System Operation
def _read_curves(self):
- for lnum, line in self.sections['[CURVES]']:
+ for lnum, line in self.sections['[CURVES]']: # noqa: B007
# It should be noted carefully that these lines are never directly
# applied to the WaterNetworkModel object. Because different curve
# types are treated differently, each of the curves are converted
# the first time they are used, and this is used to build up a
# dictionary for those conversions to take place.
- line = line.split(';')[0]
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
curve_name = current[0]
if curve_name not in self.curves:
self.curves[curve_name] = []
- self.curves[curve_name].append((float(current[1]),
- float(current[2])))
+ self.curves[curve_name].append((float(current[1]), float(current[2])))
self.wn.curves[curve_name] = None
-
- def _write_curves(self, f, wn):
+ def _write_curves(self, f, wn): # noqa: C901
f.write('[CURVES]\n'.encode(sys_default_enc))
- f.write(_CURVE_LABEL.format(';ID', 'X-Value', 'Y-Value').encode(sys_default_enc))
+ f.write(
+ _CURVE_LABEL.format(';ID', 'X-Value', 'Y-Value').encode(sys_default_enc)
+ )
curves = list(wn.curve_name_list)
# curves.sort()
for curve_name in curves:
curve = wn.get_curve(curve_name)
if curve.curve_type == 'VOLUME':
- f.write(';VOLUME: {}\n'.format(curve_name).encode(sys_default_enc))
+ f.write(f';VOLUME: {curve_name}\n'.encode(sys_default_enc))
for point in curve.points:
x = from_si(self.flow_units, point[0], HydParam.Length)
y = from_si(self.flow_units, point[1], HydParam.Volume)
- f.write(_CURVE_ENTRY.format(name=curve_name, x=x, y=y, com=';').encode(sys_default_enc))
+ f.write(
+ _CURVE_ENTRY.format(
+ name=curve_name, x=x, y=y, com=';'
+ ).encode(sys_default_enc)
+ )
elif curve.curve_type == 'HEAD':
- f.write(';PUMP: {}\n'.format(curve_name).encode(sys_default_enc))
+ f.write(f';PUMP: {curve_name}\n'.encode(sys_default_enc))
for point in curve.points:
x = from_si(self.flow_units, point[0], HydParam.Flow)
y = from_si(self.flow_units, point[1], HydParam.HydraulicHead)
- f.write(_CURVE_ENTRY.format(name=curve_name, x=x, y=y, com=';').encode(sys_default_enc))
+ f.write(
+ _CURVE_ENTRY.format(
+ name=curve_name, x=x, y=y, com=';'
+ ).encode(sys_default_enc)
+ )
elif curve.curve_type == 'EFFICIENCY':
- f.write(';EFFICIENCY: {}\n'.format(curve_name).encode(sys_default_enc))
+ f.write(f';EFFICIENCY: {curve_name}\n'.encode(sys_default_enc))
for point in curve.points:
x = from_si(self.flow_units, point[0], HydParam.Flow)
y = point[1]
- f.write(_CURVE_ENTRY.format(name=curve_name, x=x, y=y, com=';').encode(sys_default_enc))
+ f.write(
+ _CURVE_ENTRY.format(
+ name=curve_name, x=x, y=y, com=';'
+ ).encode(sys_default_enc)
+ )
elif curve.curve_type == 'HEADLOSS':
- f.write(';HEADLOSS: {}\n'.format(curve_name).encode(sys_default_enc))
+ f.write(f';HEADLOSS: {curve_name}\n'.encode(sys_default_enc))
for point in curve.points:
x = from_si(self.flow_units, point[0], HydParam.Flow)
y = from_si(self.flow_units, point[1], HydParam.HeadLoss)
- f.write(_CURVE_ENTRY.format(name=curve_name, x=x, y=y, com=';').encode(sys_default_enc))
+ f.write(
+ _CURVE_ENTRY.format(
+ name=curve_name, x=x, y=y, com=';'
+ ).encode(sys_default_enc)
+ )
else:
- f.write(';UNKNOWN: {}\n'.format(curve_name).encode(sys_default_enc))
+ f.write(f';UNKNOWN: {curve_name}\n'.encode(sys_default_enc))
for point in curve.points:
x = point[0]
y = point[1]
- f.write(_CURVE_ENTRY.format(name=curve_name, x=x, y=y, com=';').encode(sys_default_enc))
+ f.write(
+ _CURVE_ENTRY.format(
+ name=curve_name, x=x, y=y, com=';'
+ ).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_patterns(self):
_patterns = OrderedDict()
- for lnum, line in self.sections['[PATTERNS]']:
+ for lnum, line in self.sections['[PATTERNS]']: # noqa: B007
# read the lines for each pattern -- patterns can be multiple lines of arbitrary length
- line = line.split(';')[0]
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
@@ -1012,23 +1218,30 @@ def _read_patterns(self):
for i in current[1:]:
_patterns[pattern_name].append(float(i))
for pattern_name, pattern in _patterns.items():
- # add the patterns to the water newtork model
+ # add the patterns to the water network model
self.wn.add_pattern(pattern_name, pattern)
- if not self.wn.options.hydraulic.pattern and '1' in _patterns.keys():
+ if not self.wn.options.hydraulic.pattern and '1' in _patterns.keys(): # noqa: SIM118
# If there is a pattern called "1", then it is the default pattern if no other is supplied
self.wn.options.hydraulic.pattern = '1'
- elif self.wn.options.hydraulic.pattern not in _patterns.keys():
+ elif self.wn.options.hydraulic.pattern not in _patterns.keys(): # noqa: SIM118
# Sanity check - if the default pattern does not exist and it is not '1' then balk
# If default is '1' but it does not exist, then it is constant
# Any other default that does not exist is an error
- if self.wn.options.hydraulic.pattern is not None and self.wn.options.hydraulic.pattern != '1':
- raise KeyError('Default pattern {} is undefined'.format(self.wn.options.hydraulic.pattern))
+ if (
+ self.wn.options.hydraulic.pattern is not None
+ and self.wn.options.hydraulic.pattern != '1'
+ ):
+ raise KeyError( # noqa: TRY003
+ f'Default pattern {self.wn.options.hydraulic.pattern} is undefined' # noqa: EM102
+ )
self.wn.options.hydraulic.pattern = None
- def _write_patterns(self, f, wn):
+ def _write_patterns(self, f, wn): # noqa: PLR6301
num_columns = 6
f.write('[PATTERNS]\n'.encode(sys_default_enc))
- f.write('{:10s} {:10s}\n'.format(';ID', 'Multipliers').encode(sys_default_enc))
+ f.write(
+ '{:10s} {:10s}\n'.format(';ID', 'Multipliers').encode(sys_default_enc)
+ )
patterns = list(wn.pattern_name_list)
# patterns.sort()
for pattern_name in patterns:
@@ -1036,26 +1249,28 @@ def _write_patterns(self, f, wn):
count = 0
for i in pattern.multipliers:
if count % num_columns == 0:
- f.write('\n{:s} {:f}'.format(pattern_name, i).encode(sys_default_enc))
+ f.write(f'\n{pattern_name:s} {i:f}'.encode(sys_default_enc))
else:
- f.write(' {:f}'.format(i).encode(sys_default_enc))
- count += 1
+ f.write(f' {i:f}'.encode(sys_default_enc))
+ count += 1 # noqa: SIM113
f.write('\n'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
- def _read_energy(self):
- for lnum, line in self.sections['[ENERGY]']:
- line = line.split(';')[0]
+ def _read_energy(self): # noqa: C901
+ for lnum, line in self.sections['[ENERGY]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
# Only add head curves for pumps
if current[0].upper() == 'GLOBAL':
if current[1].upper() == 'PRICE':
- self.wn.options.energy.global_price = from_si(self.flow_units, float(current[2]), HydParam.Energy)
+ self.wn.options.energy.global_price = from_si(
+ self.flow_units, float(current[2]), HydParam.Energy
+ )
elif current[1].upper() == 'PATTERN':
self.wn.options.energy.global_pattern = current[2]
- elif current[1].upper() in ['EFFIC', 'EFFICIENCY']:
+ elif current[1].upper() in ['EFFIC', 'EFFICIENCY']: # noqa: PLR6201
self.wn.options.energy.global_efficiency = float(current[2])
else:
logger.warning('Unknown entry in ENERGY section: %s', line)
@@ -1065,10 +1280,12 @@ def _read_energy(self):
pump_name = current[1]
pump = self.wn.links[pump_name]
if current[2].upper() == 'PRICE':
- pump.energy_price = from_si(self.flow_units, float(current[3]), HydParam.Energy)
+ pump.energy_price = from_si(
+ self.flow_units, float(current[3]), HydParam.Energy
+ )
elif current[2].upper() == 'PATTERN':
pump.energy_pattern = current[3]
- elif current[2].upper() in ['EFFIC', 'EFFICIENCY']:
+ elif current[2].upper() in ['EFFIC', 'EFFICIENCY']: # noqa: PLR6201
curve_name = current[3]
curve_points = []
for point in self.curves[curve_name]:
@@ -1085,49 +1302,87 @@ def _read_energy(self):
def _write_energy(self, f, wn):
f.write('[ENERGY]\n'.encode(sys_default_enc))
- if True: #wn.energy is not None:
+ if True: # wn.energy is not None:
if wn.options.energy.global_efficiency is not None:
- f.write('GLOBAL EFFICIENCY {:.4f}\n'.format(wn.options.energy.global_efficiency).encode(sys_default_enc))
+ f.write(
+ f'GLOBAL EFFICIENCY {wn.options.energy.global_efficiency:.4f}\n'.encode(
+ sys_default_enc
+ )
+ )
if wn.options.energy.global_price is not None:
- f.write('GLOBAL PRICE {:.4f}\n'.format(to_si(self.flow_units, wn.options.energy.global_price, HydParam.Energy)).encode(sys_default_enc))
+ f.write(
+ 'GLOBAL PRICE {:.4f}\n'.format(
+ to_si(
+ self.flow_units,
+ wn.options.energy.global_price,
+ HydParam.Energy,
+ )
+ ).encode(sys_default_enc)
+ )
if wn.options.energy.demand_charge is not None:
- f.write('DEMAND CHARGE {:.4f}\n'.format(wn.options.energy.demand_charge).encode(sys_default_enc))
+ f.write(
+ f'DEMAND CHARGE {wn.options.energy.demand_charge:.4f}\n'.encode(
+ sys_default_enc
+ )
+ )
if wn.options.energy.global_pattern is not None:
- f.write('GLOBAL PATTERN {:s}\n'.format(wn.options.energy.global_pattern).encode(sys_default_enc))
+ f.write(
+ f'GLOBAL PATTERN {wn.options.energy.global_pattern:s}\n'.encode(
+ sys_default_enc
+ )
+ )
lnames = list(wn.pump_name_list)
lnames.sort()
for pump_name in lnames:
pump = wn.links[pump_name]
if pump.efficiency is not None:
- f.write('PUMP {:10s} EFFIC {:s}\n'.format(pump_name, pump.efficiency.name).encode(sys_default_enc))
+ f.write(
+ f'PUMP {pump_name:10s} EFFIC {pump.efficiency.name:s}\n'.encode(
+ sys_default_enc
+ )
+ )
if pump.energy_price is not None:
- f.write('PUMP {:10s} PRICE {:.4f}\n'.format(pump_name, to_si(self.flow_units, pump.energy_price, HydParam.Energy)).encode(sys_default_enc))
+ f.write(
+ f'PUMP {pump_name:10s} PRICE {to_si(self.flow_units, pump.energy_price, HydParam.Energy):.4f}\n'.encode(
+ sys_default_enc
+ )
+ )
if pump.energy_pattern is not None:
- f.write('PUMP {:10s} PATTERN {:s}\n'.format(pump_name, pump.energy_pattern).encode(sys_default_enc))
+ f.write(
+ f'PUMP {pump_name:10s} PATTERN {pump.energy_pattern:s}\n'.encode(
+ sys_default_enc
+ )
+ )
f.write('\n'.encode(sys_default_enc))
def _read_status(self):
- for lnum, line in self.sections['[STATUS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[STATUS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
-# assert(len(current) == 2), ("Error reading [STATUS] block, Check format.")
+ # assert(len(current) == 2), ("Error reading [STATUS] block, Check format.")
link = self.wn.get_link(current[0])
- if (current[1].upper() == 'OPEN' or
- current[1].upper() == 'CLOSED' or
- current[1].upper() == 'ACTIVE'):
+ if (
+ current[1].upper() == 'OPEN'
+ or current[1].upper() == 'CLOSED'
+ or current[1].upper() == 'ACTIVE'
+ ):
new_status = LinkStatus[current[1].upper()]
link.initial_status = new_status
- link._user_status = new_status
+ link._user_status = new_status # noqa: SLF001
else:
if isinstance(link, wntrfr.network.Valve):
new_status = LinkStatus.Active
valve_type = link.valve_type
- if valve_type in ['PRV', 'PSV', 'PBV']:
- setting = to_si(self.flow_units, float(current[1]), HydParam.Pressure)
+ if valve_type in ['PRV', 'PSV', 'PBV']: # noqa: PLR6201
+ setting = to_si(
+ self.flow_units, float(current[1]), HydParam.Pressure
+ )
elif valve_type == 'FCV':
- setting = to_si(self.flow_units, float(current[1]), HydParam.Flow)
+ setting = to_si(
+ self.flow_units, float(current[1]), HydParam.Flow
+ )
elif valve_type == 'TCV':
setting = float(current[1])
else:
@@ -1135,41 +1390,53 @@ def _read_status(self):
else:
new_status = LinkStatus.Open
setting = float(current[1])
-# link.setting = setting
+ # link.setting = setting
link.initial_setting = setting
- link._user_status = new_status
+ link._user_status = new_status # noqa: SLF001
link.initial_status = new_status
- def _write_status(self, f, wn):
+ def _write_status(self, f, wn): # noqa: PLR6301
f.write('[STATUS]\n'.encode(sys_default_enc))
- f.write( '{:10s} {:10s}\n'.format(';ID', 'Setting').encode(sys_default_enc))
+ f.write('{:10s} {:10s}\n'.format(';ID', 'Setting').encode(sys_default_enc))
pnames = list(wn.pump_name_list)
for pump_name in pnames:
pump = wn.links[pump_name]
-
- if pump._is_isolated == True: #Sina added this
+
+ if pump._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
- if pump.initial_status in (LinkStatus.Closed,):
- f.write('{:10s} {:10s}\n'.format(pump_name, LinkStatus(pump.initial_status).name).encode(sys_default_enc))
+
+ if pump.initial_status == LinkStatus.Closed:
+ f.write(
+ f'{pump_name:10s} {LinkStatus(pump.initial_status).name:10s}\n'.encode(
+ sys_default_enc
+ )
+ )
else:
setting = pump.initial_setting
if isinstance(setting, float) and setting != 1.0:
- f.write('{:10s} {:10.7g}\n'.format(pump_name, setting).encode(sys_default_enc))
-
+ f.write(
+ f'{pump_name:10s} {setting:10.7g}\n'.encode(sys_default_enc)
+ )
+
vnames = list(wn.valve_name_list)
# lnames.sort()
for valve_name in vnames:
valve = wn.links[valve_name]
-
- if valve._is_isolated == True: #Sina added this
+
+ if valve._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
- #valve_type = valve.valve_type
- if valve.initial_status not in (LinkStatus.Active,): #LinkStatus.Opened, LinkStatus.Open,
- f.write('{:10s} {:10s}\n'.format(valve_name, LinkStatus(valve.initial_status).name).encode(sys_default_enc))
+ # valve_type = valve.valve_type
+
+ if (
+ valve.initial_status != LinkStatus.Active
+ ): # LinkStatus.Opened, LinkStatus.Open,
+ f.write(
+ f'{valve_name:10s} {LinkStatus(valve.initial_status).name:10s}\n'.encode(
+ sys_default_enc
+ )
+ )
# if valve_type in ['PRV', 'PSV', 'PBV']:
# valve_set = from_si(self.flow_units, valve.initial_setting, HydParam.Pressure)
# elif valve_type == 'FCV':
@@ -1180,39 +1447,46 @@ def _write_status(self, f, wn):
# valve_set = None
# if valve_set is not None:
# f.write('{:10s} {:10.7g}\n'.format(valve_name, float(valve_set)).encode(sys_default_enc))
-
+
f.write('\n'.encode(sys_default_enc))
def _read_controls(self):
control_count = 0
- for lnum, line in self.sections['[CONTROLS]']:
-
+ for lnum, line in self.sections['[CONTROLS]']: # noqa: B007
control_count += 1
- control_name = 'control '+str(control_count)
-
- control_obj = _read_control_line(line, self.wn, self.flow_units, control_name)
+ control_name = 'control ' + str(control_count)
+
+ control_obj = _read_control_line(
+ line, self.wn, self.flow_units, control_name
+ )
if control_obj is None:
- control_count -= 1 # control was not found
+ control_count -= 1 # control was not found
continue
-
+
if control_name in self.wn.control_name_list:
- warnings.warn('One or more [CONTROLS] were duplicated in "{}"; duplicates are ignored.'.format(self.wn.name), stacklevel=0)
- logger.warning('Control already exists: "{}"'.format(control_name))
+ warnings.warn(
+ f'One or more [CONTROLS] were duplicated in "{self.wn.name}"; duplicates are ignored.',
+ stacklevel=0,
+ )
+ logger.warning(f'Control already exists: "{control_name}"')
else:
self.wn.add_control(control_name, control_obj)
- def _write_controls(self, f, wn):
+ def _write_controls(self, f, wn): # noqa: C901
def get_setting(control_action, control_name):
- value = control_action._value
- attribute = control_action._attribute.lower()
+ value = control_action._value # noqa: SLF001
+ attribute = control_action._attribute.lower() # noqa: SLF001
if attribute == 'status':
setting = LinkStatus(value).name
elif attribute == 'base_speed':
setting = str(value)
- elif attribute == 'setting' and isinstance(control_action._target_obj, Valve):
- valve = control_action._target_obj
+ elif attribute == 'setting' and isinstance(
+ control_action._target_obj, # noqa: SLF001
+ Valve,
+ ):
+ valve = control_action._target_obj # noqa: SLF001
valve_type = valve.valve_type
- if valve_type == 'PRV' or valve_type == 'PSV' or valve_type == 'PBV':
+ if valve_type == 'PRV' or valve_type == 'PSV' or valve_type == 'PBV': # noqa: PLR1714
setting = str(from_si(self.flow_units, value, HydParam.Pressure))
elif valve_type == 'FCV':
setting = str(from_si(self.flow_units, value, HydParam.Flow))
@@ -1226,95 +1500,126 @@ def get_setting(control_action, control_name):
setting = value
else:
setting = None
- logger.warning('Could not write control '+str(control_name)+' - skipping')
+ logger.warning(
+ 'Could not write control ' + str(control_name) + ' - skipping' # noqa: G003
+ )
return setting
f.write('[CONTROLS]\n'.encode(sys_default_enc))
# Time controls and conditional controls only
for text, all_control in wn.controls():
- control_action = all_control._then_actions[0]
-
- if control_action._target_obj._is_isolated==True: #Sina added this
+ control_action = all_control._then_actions[0] # noqa: SLF001
+
+ # Sina added this
+ if control_action._target_obj._is_isolated == True: # noqa: E712, SLF001
continue
-
+
if all_control.epanet_control_type is not _ControlType.rule:
- if len(all_control._then_actions) != 1 or len(all_control._else_actions) != 0:
- logger.error('Too many actions on CONTROL "%s"'%text)
- raise RuntimeError('Too many actions on CONTROL "%s"'%text)
+ if (
+ len(all_control._then_actions) != 1 # noqa: SLF001
+ or len(all_control._else_actions) != 0 # noqa: SLF001
+ ):
+ logger.error('Too many actions on CONTROL "%s"' % text) # noqa: G002, UP031
+ raise RuntimeError('Too many actions on CONTROL "%s"' % text) # noqa: UP031
if not isinstance(control_action.target()[0], Link):
continue
- if isinstance(all_control._condition, (SimTimeCondition, TimeOfDayCondition)):
+ if isinstance(
+ all_control._condition, # noqa: SLF001
+ (SimTimeCondition, TimeOfDayCondition),
+ ):
entry = '{ltype} {link} {setting} AT {compare} {time:g}\n'
- vals = {'ltype': control_action._target_obj.link_type,
- 'link': control_action._target_obj.name,
- 'setting': get_setting(control_action, text),
- 'compare': 'TIME',
- 'time': all_control._condition._threshold / 3600.0}
+ vals = {
+ 'ltype': control_action._target_obj.link_type, # noqa: SLF001
+ 'link': control_action._target_obj.name, # noqa: SLF001
+ 'setting': get_setting(control_action, text),
+ 'compare': 'TIME',
+ 'time': all_control._condition._threshold / 3600.0, # noqa: SLF001
+ }
if vals['setting'] is None:
continue
- if isinstance(all_control._condition, TimeOfDayCondition):
+ if isinstance(all_control._condition, TimeOfDayCondition): # noqa: SLF001
vals['compare'] = 'CLOCKTIME'
f.write(entry.format(**vals).encode(sys_default_enc))
- elif all_control._condition._source_obj._is_isolated == True: #Sina added this
+ elif (
+ all_control._condition._source_obj._is_isolated == True # noqa: SLF001, E712
+ ): # Sina added this
continue
- elif isinstance(all_control._condition, (ValueCondition)):
+ elif isinstance(all_control._condition, (ValueCondition)): # noqa: SLF001
entry = '{ltype} {link} {setting} IF {ntype} {node} {compare} {thresh}\n'
- vals = {'ltype': control_action._target_obj.link_type,
- 'link': control_action._target_obj.name,
- 'setting': get_setting(control_action, text),
- 'ntype': all_control._condition._source_obj.node_type,
- 'node': all_control._condition._source_obj.name,
- 'compare': 'above',
- 'thresh': 0.0}
+ vals = {
+ 'ltype': control_action._target_obj.link_type, # noqa: SLF001
+ 'link': control_action._target_obj.name, # noqa: SLF001
+ 'setting': get_setting(control_action, text),
+ 'ntype': all_control._condition._source_obj.node_type, # noqa: SLF001
+ 'node': all_control._condition._source_obj.name, # noqa: SLF001
+ 'compare': 'above',
+ 'thresh': 0.0,
+ }
if vals['setting'] is None:
continue
- if all_control._condition._relation in [np.less, np.less_equal, Comparison.le, Comparison.lt]:
+ if all_control._condition._relation in [ # noqa: PLR6201, SLF001
+ np.less,
+ np.less_equal,
+ Comparison.le,
+ Comparison.lt,
+ ]:
vals['compare'] = 'below'
- threshold = all_control._condition._threshold
- if isinstance(all_control._condition._source_obj, Tank):
- vals['thresh'] = from_si(self.flow_units, threshold, HydParam.HydraulicHead)
- elif isinstance(all_control._condition._source_obj, Junction):
- vals['thresh'] = from_si(self.flow_units, threshold, HydParam.Pressure)
- else:
- raise RuntimeError('Unknown control for EPANET INP files: %s' %type(all_control))
+ threshold = all_control._condition._threshold # noqa: SLF001
+ if isinstance(all_control._condition._source_obj, Tank): # noqa: SLF001
+ vals['thresh'] = from_si(
+ self.flow_units, threshold, HydParam.HydraulicHead
+ )
+ elif isinstance(all_control._condition._source_obj, Junction): # noqa: SLF001
+ vals['thresh'] = from_si(
+ self.flow_units, threshold, HydParam.Pressure
+ )
+ else:
+ raise RuntimeError( # noqa: TRY004
+ 'Unknown control for EPANET INP files: %s' # noqa: UP031
+ % type(all_control)
+ )
f.write(entry.format(**vals).encode(sys_default_enc))
elif not isinstance(all_control, Control):
- raise RuntimeError('Unknown control for EPANET INP files: %s' % type(all_control))
+ raise RuntimeError(
+ 'Unknown control for EPANET INP files: %s' # noqa: UP031
+ % type(all_control)
+ )
f.write('\n'.encode(sys_default_enc))
def _read_rules(self):
- rules = _EpanetRule.parse_rules_lines(self.sections['[RULES]'], self.flow_units, self.mass_units)
+ rules = _EpanetRule.parse_rules_lines(
+ self.sections['[RULES]'], self.flow_units, self.mass_units
+ )
for rule in rules:
ctrl = rule.generate_control(self.wn)
self.wn.add_control(ctrl.name, ctrl)
logger.debug('Added %s', str(ctrl))
# wn._en_rules = '\n'.join(self.sections['[RULES]'])
- #logger.warning('RULES are reapplied directly to an Epanet INP file on write; otherwise unsupported.')
+ # logger.warning('RULES are reapplied directly to an Epanet INP file on write; otherwise unsupported.')
def _write_rules(self, f, wn):
f.write('[RULES]\n'.encode(sys_default_enc))
for text, all_control in wn.controls():
entry = '{}\n'
if all_control.epanet_control_type == _ControlType.rule:
-
- #Sina added thsi begin
+ # Sina added this begin
try:
- if all_control._then_actions[0]._target_obj._is_isolated==True:
+ if all_control._then_actions[0]._target_obj._is_isolated == True: # noqa: SLF001, E712
continue
- except:
+ except: # noqa: S110, E722
pass
-
+
try:
- if all_control.condition._source_obj._is_isolated==True:
+ if all_control.condition._source_obj._is_isolated == True: # noqa: SLF001, E712
continue
- except:
+ except: # noqa: S110, E722
pass
-
- #Sina added thsi end
-
- if all_control.name == '':
- all_control._name = text
+
+ # Sina added this end
+
+ if all_control.name == '': # noqa: PLC1901
+ all_control._name = text # noqa: SLF001
rule = _EpanetRule('blah', self.flow_units, self.mass_units)
rule.from_if_then_else(all_control)
f.write(entry.format(str(rule)).encode(sys_default_enc))
@@ -1323,18 +1628,18 @@ def _write_rules(self, f, wn):
def _read_demands(self):
demand_num = 0
has_been_read = set()
- for lnum, line in self.sections['[DEMANDS]']:
+ for lnum, line in self.sections['[DEMANDS]']: # noqa: B007
ldata = line.split(';')
- if len(ldata) > 1 and (ldata[1] != ""):
+ if len(ldata) > 1 and (ldata[1] != ''): # noqa: PLC1901
category = ldata[1]
else:
category = None
current = ldata[0].split()
if current == []:
continue
- demand_num = demand_num + 1
+ demand_num = demand_num + 1 # noqa: PLR6104
node = self.wn.get_node(current[0])
- if len(current) == 2:
+ if len(current) == 2: # noqa: PLR2004
pattern = None
else:
pattern = self.wn.get_pattern(current[2])
@@ -1344,8 +1649,13 @@ def _read_demands(self):
del node.demand_timeseries_list[-1]
# In EPANET, the [DEMANDS] section overrides demands specified in [JUNCTIONS]
# node.demand_timeseries_list.remove_category('EN2 base')
- node.demand_timeseries_list.append((to_si(self.flow_units, float(current[1]), HydParam.Demand),
- pattern, category))
+ node.demand_timeseries_list.append(
+ (
+ to_si(self.flow_units, float(current[1]), HydParam.Demand),
+ pattern,
+ category,
+ )
+ )
def _write_demands(self, f, wn):
f.write('[DEMANDS]\n'.encode(sys_default_enc))
@@ -1355,76 +1665,101 @@ def _write_demands(self, f, wn):
nodes = list(wn.junction_name_list)
# nodes.sort()
for node in nodes:
- if wn.get_node(node)._is_isolated == True: #Sina added this
+ # Sina added this
+ if wn.get_node(node)._is_isolated == True: # noqa: E712, SLF001
continue
demands = wn.get_node(node).demand_timeseries_list
if len(demands) > 1:
- for ct, demand in enumerate(demands):
+ for ct, demand in enumerate(demands): # noqa: B007, FURB148
cat = str(demand.category)
- #if cat == 'EN2 base':
+ # if cat == 'EN2 base':
# cat = ''
if cat.lower() == 'none':
cat = ''
else:
cat = ' ;' + demand.category
- E = {'node': node,
- 'base': from_si(self.flow_units, demand.base_value, HydParam.Demand),
- 'pat': '',
- 'cat': cat }
+ E = { # noqa: N806
+ 'node': node,
+ 'base': from_si(
+ self.flow_units, demand.base_value, HydParam.Demand
+ ),
+ 'pat': '',
+ 'cat': cat,
+ }
if demand.pattern_name in wn.pattern_name_list:
E['pat'] = demand.pattern_name
- f.write(entry.format(E['node'], str(E['base']), E['pat'], E['cat']).encode(sys_default_enc))
+ f.write(
+ entry.format(
+ E['node'], str(E['base']), E['pat'], E['cat']
+ ).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
- ### Water Quality
+ # Water Quality
def _read_quality(self):
- for lnum, line in self.sections['[QUALITY]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[QUALITY]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
node = self.wn.get_node(current[0])
if self.wn.options.quality.parameter == 'CHEMICAL':
- quality = to_si(self.flow_units, float(current[1]), QualParam.Concentration, mass_units=self.mass_units)
+ quality = to_si(
+ self.flow_units,
+ float(current[1]),
+ QualParam.Concentration,
+ mass_units=self.mass_units,
+ )
elif self.wn.options.quality.parameter == 'AGE':
- quality = to_si(self.flow_units, float(current[1]), QualParam.WaterAge)
- else :
+ quality = to_si(
+ self.flow_units, float(current[1]), QualParam.WaterAge
+ )
+ else:
quality = float(current[1])
node.initial_quality = quality
def _write_quality(self, f, wn):
f.write('[QUALITY]\n'.encode(sys_default_enc))
entry = '{:10s} {:10s}\n'
- label = '{:10s} {:10s}\n'
+ label = '{:10s} {:10s}\n' # noqa: F841
nnodes = list(wn.nodes.keys())
# nnodes.sort()
for node_name in nnodes:
node = wn.nodes[node_name]
- if node._is_isolated==True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if node.initial_quality:
if wn.options.quality.parameter == 'CHEMICAL':
- quality = from_si(self.flow_units, node.initial_quality, QualParam.Concentration, mass_units=self.mass_units)
+ quality = from_si(
+ self.flow_units,
+ node.initial_quality,
+ QualParam.Concentration,
+ mass_units=self.mass_units,
+ )
elif wn.options.quality.parameter == 'AGE':
- quality = from_si(self.flow_units, node.initial_quality, QualParam.WaterAge)
+ quality = from_si(
+ self.flow_units, node.initial_quality, QualParam.WaterAge
+ )
else:
quality = node.initial_quality
- f.write(entry.format(node_name, str(quality)).encode(sys_default_enc))
+ f.write(
+ entry.format(node_name, str(quality)).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
- def _read_reactions(self):
- BulkReactionCoeff = QualParam.BulkReactionCoeff
- WallReactionCoeff = QualParam.WallReactionCoeff
+ def _read_reactions(self): # noqa: C901
+ BulkReactionCoeff = QualParam.BulkReactionCoeff # noqa: N806
+ WallReactionCoeff = QualParam.WallReactionCoeff # noqa: N806
if self.mass_units is None:
self.mass_units = MassUnits.mg
- for lnum, line in self.sections['[REACTIONS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[REACTIONS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
-# assert len(current) == 3, ('INP file option in [REACTIONS] block '
-# 'not recognized: ' + line)
+ # assert len(current) == 3, ('INP file option in [REACTIONS] block '
+ # 'not recognized: ' + line)
key1 = current[0].upper()
key2 = current[1].upper()
val3 = float(current[2])
@@ -1437,138 +1772,255 @@ def _read_reactions(self):
self.wn.options.reaction.tank_order = int(float(current[2]))
elif key1 == 'GLOBAL':
if key2 == 'BULK':
- self.wn.options.reaction.bulk_coeff = to_si(self.flow_units, val3, BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=self.wn.options.reaction.bulk_order)
+ self.wn.options.reaction.bulk_coeff = to_si(
+ self.flow_units,
+ val3,
+ BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=self.wn.options.reaction.bulk_order,
+ )
elif key2 == 'WALL':
- self.wn.options.reaction.wall_coeff = to_si(self.flow_units, val3, WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=self.wn.options.reaction.wall_order)
+ self.wn.options.reaction.wall_coeff = to_si(
+ self.flow_units,
+ val3,
+ WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=self.wn.options.reaction.wall_order,
+ )
elif key1 == 'BULK':
pipe = self.wn.get_link(current[1])
- pipe.bulk_coeff = to_si(self.flow_units, val3, BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=self.wn.options.reaction.bulk_order)
+ pipe.bulk_coeff = to_si(
+ self.flow_units,
+ val3,
+ BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=self.wn.options.reaction.bulk_order,
+ )
elif key1 == 'WALL':
pipe = self.wn.get_link(current[1])
- pipe.wall_coeff = to_si(self.flow_units, val3, WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=self.wn.options.reaction.wall_order)
+ pipe.wall_coeff = to_si(
+ self.flow_units,
+ val3,
+ WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=self.wn.options.reaction.wall_order,
+ )
elif key1 == 'TANK':
tank = self.wn.get_node(current[1])
- tank.bulk_coeff = to_si(self.flow_units, val3, BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=self.wn.options.reaction.bulk_order)
+ tank.bulk_coeff = to_si(
+ self.flow_units,
+ val3,
+ BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=self.wn.options.reaction.bulk_order,
+ )
elif key1 == 'LIMITING':
self.wn.options.reaction.limiting_potential = float(current[2])
elif key1 == 'ROUGHNESS':
self.wn.options.reaction.roughness_correl = float(current[2])
else:
- raise RuntimeError('Reaction option not recognized: %s'%key1)
+ raise RuntimeError('Reaction option not recognized: %s' % key1) # noqa: UP031
def _write_reactions(self, f, wn):
- f.write( '[REACTIONS]\n'.encode(sys_default_enc))
- f.write(';Type Pipe/Tank Coefficient\n'.encode(sys_default_enc))
+ f.write('[REACTIONS]\n'.encode(sys_default_enc))
+ f.write(
+ ';Type Pipe/Tank Coefficient\n'.encode(
+ sys_default_enc
+ )
+ )
entry_int = ' {:s} {:s} {:d}\n'
entry_float = ' {:s} {:s} {:<10.4f}\n'
for tank_name, tank in wn.nodes(Tank):
- if tank._is_isolated==True: #Sina added this
+ if tank._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if tank.bulk_coeff is not None:
- f.write(entry_float.format('TANK',tank_name,
- from_si(self.flow_units,
- tank.bulk_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.reaction.bulk_order)).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'TANK',
+ tank_name,
+ from_si(
+ self.flow_units,
+ tank.bulk_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.reaction.bulk_order,
+ ),
+ ).encode(sys_default_enc)
+ )
for pipe_name, pipe in wn.links(Pipe):
- if pipe._is_isolated==True: #Sina added this
+ if pipe._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if pipe.bulk_coeff is not None:
- f.write(entry_float.format('BULK',pipe_name,
- from_si(self.flow_units,
- pipe.bulk_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.reaction.bulk_order)).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'BULK',
+ pipe_name,
+ from_si(
+ self.flow_units,
+ pipe.bulk_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.reaction.bulk_order,
+ ),
+ ).encode(sys_default_enc)
+ )
if pipe.wall_coeff is not None:
- f.write(entry_float.format('WALL',pipe_name,
- from_si(self.flow_units,
- pipe.wall_coeff,
- QualParam.WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.reaction.wall_order)).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'WALL',
+ pipe_name,
+ from_si(
+ self.flow_units,
+ pipe.wall_coeff,
+ QualParam.WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.reaction.wall_order,
+ ),
+ ).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
-# f.write('[REACTIONS]\n'.encode(sys_default_enc)) # EPANET GUI puts this line in here
- f.write(entry_int.format('ORDER', 'BULK', int(wn.options.reaction.bulk_order)).encode(sys_default_enc))
- f.write(entry_int.format('ORDER', 'TANK', int(wn.options.reaction.tank_order)).encode(sys_default_enc))
- f.write(entry_int.format('ORDER', 'WALL', int(wn.options.reaction.wall_order)).encode(sys_default_enc))
- f.write(entry_float.format('GLOBAL','BULK',
- from_si(self.flow_units,
- wn.options.reaction.bulk_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.reaction.bulk_order)).encode(sys_default_enc))
- f.write(entry_float.format('GLOBAL','WALL',
- from_si(self.flow_units,
- wn.options.reaction.wall_coeff,
- QualParam.WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.reaction.wall_order)).encode(sys_default_enc))
+ # f.write('[REACTIONS]\n'.encode(sys_default_enc)) # EPANET GUI puts this line in here
+ f.write(
+ entry_int.format(
+ 'ORDER', 'BULK', int(wn.options.reaction.bulk_order)
+ ).encode(sys_default_enc)
+ )
+ f.write(
+ entry_int.format(
+ 'ORDER', 'TANK', int(wn.options.reaction.tank_order)
+ ).encode(sys_default_enc)
+ )
+ f.write(
+ entry_int.format(
+ 'ORDER', 'WALL', int(wn.options.reaction.wall_order)
+ ).encode(sys_default_enc)
+ )
+ f.write(
+ entry_float.format(
+ 'GLOBAL',
+ 'BULK',
+ from_si(
+ self.flow_units,
+ wn.options.reaction.bulk_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.reaction.bulk_order,
+ ),
+ ).encode(sys_default_enc)
+ )
+ f.write(
+ entry_float.format(
+ 'GLOBAL',
+ 'WALL',
+ from_si(
+ self.flow_units,
+ wn.options.reaction.wall_coeff,
+ QualParam.WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.reaction.wall_order,
+ ),
+ ).encode(sys_default_enc)
+ )
if wn.options.reaction.limiting_potential is not None:
- f.write(entry_float.format('LIMITING','POTENTIAL',wn.options.reaction.limiting_potential).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'LIMITING', 'POTENTIAL', wn.options.reaction.limiting_potential
+ ).encode(sys_default_enc)
+ )
if wn.options.reaction.roughness_correl is not None:
- f.write(entry_float.format('ROUGHNESS','CORRELATION',wn.options.reaction.roughness_correl).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'ROUGHNESS', 'CORRELATION', wn.options.reaction.roughness_correl
+ ).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
def _read_sources(self):
source_num = 0
- for lnum, line in self.sections['[SOURCES]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[SOURCES]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
-# assert(len(current) >= 3), ("Error reading sources. Check format.")
- source_num = source_num + 1
+ # assert(len(current) >= 3), ("Error reading sources. Check format.")
+ source_num = source_num + 1 # noqa: PLR6104
if current[0].upper() == 'MASS':
- strength = to_si(self.flow_units, float(current[2]), QualParam.SourceMassInject, self.mass_units)
+ strength = to_si(
+ self.flow_units,
+ float(current[2]),
+ QualParam.SourceMassInject,
+ self.mass_units,
+ )
else:
- strength = to_si(self.flow_units, float(current[2]), QualParam.Concentration, self.mass_units)
- if len(current) == 3:
- self.wn.add_source('INP'+str(source_num), current[0], current[1], strength, None)
+ strength = to_si(
+ self.flow_units,
+ float(current[2]),
+ QualParam.Concentration,
+ self.mass_units,
+ )
+ if len(current) == 3: # noqa: PLR2004
+ self.wn.add_source(
+ 'INP' + str(source_num), current[0], current[1], strength, None
+ )
else:
- self.wn.add_source('INP'+str(source_num), current[0], current[1], strength, current[3])
+ self.wn.add_source(
+ 'INP' + str(source_num),
+ current[0],
+ current[1],
+ strength,
+ current[3],
+ )
def _write_sources(self, f, wn):
f.write('[SOURCES]\n'.encode(sys_default_enc))
entry = '{:10s} {:10s} {:10s} {:10s}\n'
label = '{:10s} {:10s} {:10s} {:10s}\n'
- f.write(label.format(';Node', 'Type', 'Quality', 'Pattern').encode(sys_default_enc))
- nsources = list(wn._sources.keys())
+ f.write(
+ label.format(';Node', 'Type', 'Quality', 'Pattern').encode(
+ sys_default_enc
+ )
+ )
+ nsources = list(wn._sources.keys()) # noqa: SLF001
# nsources.sort()
for source_name in nsources:
- source = wn._sources[source_name]
-
- if source._is_isolated==True: #Sina added this
+ source = wn._sources[source_name] # noqa: SLF001
+
+ if source._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
-
+
if source.source_type.upper() == 'MASS':
- strength = from_si(self.flow_units, source.strength_timeseries.base_value, QualParam.SourceMassInject, self.mass_units)
- else: # CONC, SETPOINT, FLOWPACED
- strength = from_si(self.flow_units, source.strength_timeseries.base_value, QualParam.Concentration, self.mass_units)
-
- E = {'node': source.node_name,
- 'type': source.source_type,
- 'quality': str(strength),
- 'pat': ''}
+ strength = from_si(
+ self.flow_units,
+ source.strength_timeseries.base_value,
+ QualParam.SourceMassInject,
+ self.mass_units,
+ )
+ else: # CONC, SETPOINT, FLOWPACED
+ strength = from_si(
+ self.flow_units,
+ source.strength_timeseries.base_value,
+ QualParam.Concentration,
+ self.mass_units,
+ )
+
+ E = { # noqa: N806
+ 'node': source.node_name,
+ 'type': source.source_type,
+ 'quality': str(strength),
+ 'pat': '',
+ }
if source.strength_timeseries.pattern_name is not None:
E['pat'] = source.strength_timeseries.pattern_name
- f.write(entry.format(E['node'], E['type'], str(E['quality']), E['pat']).encode(sys_default_enc))
+ f.write(
+ entry.format(
+ E['node'], E['type'], str(E['quality']), E['pat']
+ ).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
def _read_mixing(self):
- for lnum, line in self.sections['[MIXING]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[MIXING]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
@@ -1577,56 +2029,86 @@ def _read_mixing(self):
tank = self.wn.get_node(tank_name)
if key == 'MIXED':
tank.mixing_model = MixType.Mix1
- elif key == '2COMP' and len(current) > 2:
+ elif key == '2COMP' and len(current) > 2: # noqa: PLR2004
tank.mixing_model = MixType.Mix2
tank.mixing_fraction = float(current[2])
- elif key == '2COMP' and len(current) < 3:
- raise RuntimeError('Mixing model 2COMP requires fraction on tank %s'%tank_name)
+ elif key == '2COMP' and len(current) < 3: # noqa: PLR2004
+ raise RuntimeError(
+ 'Mixing model 2COMP requires fraction on tank %s' % tank_name # noqa: UP031
+ )
elif key == 'FIFO':
tank.mixing_model = MixType.FIFO
elif key == 'LIFO':
tank.mixing_model = MixType.LIFO
- def _write_mixing(self, f, wn):
+ def _write_mixing(self, f, wn): # noqa: PLR6301
f.write('[MIXING]\n'.encode(sys_default_enc))
- f.write('{:20s} {:5s} {}\n'.format(';Tank ID', 'Model', 'Fraction').encode(sys_default_enc))
+ f.write(
+ '{:20s} {:5s} {}\n'.format(';Tank ID', 'Model', 'Fraction').encode(
+ sys_default_enc
+ )
+ )
lnames = list(wn.tank_name_list)
# lnames.sort()
for tank_name in lnames:
tank = wn.nodes[tank_name]
- if tank._mixing_model is not None:
- if tank._is_isolated == True: #Sina added this
+ if tank._mixing_model is not None: # noqa: SLF001
+ # Sina added this
+ if tank._is_isolated == True: # noqa: SLF001, E712
continue
- if tank._mixing_model in [MixType.Mixed, MixType.Mix1, 0]:
- f.write(' {:19s} MIXED\n'.format(tank_name).encode(sys_default_enc))
- elif tank._mixing_model in [MixType.TwoComp, MixType.Mix2, '2comp', '2COMP', 1]:
- f.write(' {:19s} 2COMP {}\n'.format(tank_name, tank.mixing_fraction).encode(sys_default_enc))
- elif tank._mixing_model in [MixType.FIFO, 2]:
- f.write(' {:19s} FIFO\n'.format(tank_name).encode(sys_default_enc))
- elif tank._mixing_model in [MixType.LIFO, 3]:
- f.write(' {:19s} LIFO\n'.format(tank_name).encode(sys_default_enc))
- elif isinstance(tank._mixing_model, str) and tank.mixing_fraction is not None:
- f.write(' {:19s} {} {}\n'.format(tank_name, tank._mixing_model, tank.mixing_fraction).encode(sys_default_enc))
- elif isinstance(tank._mixing_model, str):
- f.write(' {:19s} {}\n'.format(tank_name, tank._mixing_model).encode(sys_default_enc))
+ if tank._mixing_model in [MixType.Mixed, MixType.Mix1, 0]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} MIXED\n'.encode(sys_default_enc))
+ elif tank._mixing_model in [ # noqa: PLR6201, SLF001
+ MixType.TwoComp,
+ MixType.Mix2,
+ '2comp',
+ '2COMP',
+ 1,
+ ]:
+ f.write(
+ f' {tank_name:19s} 2COMP {tank.mixing_fraction}\n'.encode(
+ sys_default_enc
+ )
+ )
+ elif tank._mixing_model in [MixType.FIFO, 2]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} FIFO\n'.encode(sys_default_enc))
+ elif tank._mixing_model in [MixType.LIFO, 3]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} LIFO\n'.encode(sys_default_enc))
+ elif (
+ isinstance(tank._mixing_model, str) # noqa: SLF001
+ and tank.mixing_fraction is not None
+ ):
+ f.write(
+ f' {tank_name:19s} {tank._mixing_model} {tank.mixing_fraction}\n'.encode( # noqa: SLF001
+ sys_default_enc
+ )
+ )
+ elif isinstance(tank._mixing_model, str): # noqa: SLF001
+ f.write(
+ f' {tank_name:19s} {tank._mixing_model}\n'.encode( # noqa: SLF001
+ sys_default_enc
+ )
+ )
else:
- logger.warning('Unknown mixing model: %s', tank._mixing_model)
+ logger.warning('Unknown mixing model: %s', tank._mixing_model) # noqa: SLF001
f.write('\n'.encode(sys_default_enc))
- ### Options and Reporting
+ # Options and Reporting
- def _read_options(self):
+ def _read_options(self): # noqa: C901, PLR0912
edata = OrderedDict()
wn = self.wn
opts = wn.options
- for lnum, line in self.sections['[OPTIONS]']:
+ for lnum, line in self.sections['[OPTIONS]']: # noqa: PLR1702
edata['lnum'] = lnum
edata['sec'] = '[OPTIONS]'
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) < 2:
+ if len(words) < 2: # noqa: PLR2004
edata['key'] = words[0]
- raise RuntimeError('%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ raise RuntimeError(
+ '%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata
+ )
key = words[0].upper()
if key == 'UNITS':
self.flow_units = FlowUnits[words[1].upper()]
@@ -1638,15 +2120,15 @@ def _read_options(self):
opts.hydraulic.hydraulics_filename = words[2]
elif key == 'QUALITY':
mode = words[1].upper()
- if mode in ['NONE', 'AGE']:
+ if mode in ['NONE', 'AGE']: # noqa: PLR6201
opts.quality.parameter = words[1].upper()
- elif mode in ['TRACE']:
+ elif mode == 'TRACE':
opts.quality.parameter = 'TRACE'
opts.quality.trace_node = words[2]
else:
opts.quality.parameter = 'CHEMICAL'
opts.quality.chemical_name = words[1]
- if len(words) > 2:
+ if len(words) > 2: # noqa: PLR2004
if 'mg' in words[2].lower():
self.mass_units = MassUnits.mg
opts.quality.inpfile_units = words[2]
@@ -1654,10 +2136,12 @@ def _read_options(self):
self.mass_units = MassUnits.ug
opts.quality.inpfile_units = words[2]
else:
- raise ValueError('Invalid chemical units in OPTIONS section')
+ raise ValueError( # noqa: TRY003
+ 'Invalid chemical units in OPTIONS section' # noqa: EM101
+ )
else:
self.mass_units = MassUnits.mg
- opts.quality.inpfile_units = 'mg/L'
+ opts.quality.inpfile_units = 'mg/L'
elif key == 'VISCOSITY':
opts.hydraulic.viscosity = float(words[1])
elif key == 'DIFFUSIVITY':
@@ -1674,43 +2158,57 @@ def _read_options(self):
opts.hydraulic.flowchange = float(words[1])
elif key == 'UNBALANCED':
opts.hydraulic.unbalanced = words[1].upper()
- if len(words) > 2:
+ if len(words) > 2: # noqa: PLR2004
opts.hydraulic.unbalanced_value = int(words[2])
elif key == 'MINIMUM':
- minimum_pressure = to_si(self.flow_units, float(words[2]), HydParam.Pressure)
+ minimum_pressure = to_si(
+ self.flow_units, float(words[2]), HydParam.Pressure
+ )
opts.hydraulic.minimum_pressure = minimum_pressure
elif key == 'REQUIRED':
- required_pressure = to_si(self.flow_units, float(words[2]), HydParam.Pressure)
+ required_pressure = to_si(
+ self.flow_units, float(words[2]), HydParam.Pressure
+ )
opts.hydraulic.required_pressure = required_pressure
elif key == 'PRESSURE':
- if len(words) > 2:
+ if len(words) > 2: # noqa: PLR2004
if words[1].upper() == 'EXPONENT':
opts.hydraulic.pressure_exponent = float(words[2])
else:
edata['key'] = ' '.join(words)
- raise RuntimeError('%(lnum)-6d %(sec)13s unknown option %(key)s' % edata)
+ raise RuntimeError(
+ '%(lnum)-6d %(sec)13s unknown option %(key)s' % edata
+ )
else:
opts.hydraulic.inpfile_pressure_units = words[1]
elif key == 'PATTERN':
opts.hydraulic.pattern = words[1]
elif key == 'DEMAND':
- if len(words) > 2:
+ if len(words) > 2: # noqa: PLR2004
if words[1].upper() == 'MULTIPLIER':
opts.hydraulic.demand_multiplier = float(words[2])
elif words[1].upper() == 'MODEL':
opts.hydraulic.demand_model = words[2]
else:
edata['key'] = ' '.join(words)
- raise RuntimeError('%(lnum)-6d %(sec)13s unknown option %(key)s' % edata)
+ raise RuntimeError(
+ '%(lnum)-6d %(sec)13s unknown option %(key)s' % edata
+ )
else:
edata['key'] = ' '.join(words)
- raise RuntimeError('%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ raise RuntimeError(
+ '%(lnum)-6d %(sec)13s no value provided for %(key)s'
+ % edata
+ )
elif key == 'EMITTER':
- if len(words) > 2:
+ if len(words) > 2: # noqa: PLR2004
opts.hydraulic.emitter_exponent = float(words[2])
else:
edata['key'] = 'EMITTER EXPONENT'
- raise RuntimeError('%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ raise RuntimeError(
+ '%(lnum)-6d %(sec)13s no value provided for %(key)s'
+ % edata
+ )
elif key == 'TOLERANCE':
opts.quality.tolerance = float(words[1])
elif key == 'CHECKFREQ':
@@ -1721,139 +2219,309 @@ def _read_options(self):
opts.hydraulic.damplimit = float(words[1])
elif key == 'MAP':
opts.graphics.map_filename = words[1]
- else:
- if len(words) == 2:
- edata['key'] = words[0]
- setattr(opts, words[0].lower(), float(words[1]))
- logger.warn('%(lnum)-6d %(sec)13s option "%(key)s" is undocumented; adding, but please verify syntax', edata)
- elif len(words) == 3:
- edata['key'] = words[0] + ' ' + words[1]
- setattr(opts, words[0].lower() + '_' + words[1].lower(), float(words[2]))
- logger.warn('%(lnum)-6d %(sec)13s option "%(key)s" is undocumented; adding, but please verify syntax', edata)
+ elif len(words) == 2: # noqa: PLR2004
+ edata['key'] = words[0]
+ setattr(opts, words[0].lower(), float(words[1]))
+ logger.warning(
+ '%(lnum)-6d %(sec)13s option "%(key)s" is undocumented; adding, but please verify syntax',
+ edata,
+ )
+ elif len(words) == 3: # noqa: PLR2004
+ edata['key'] = words[0] + ' ' + words[1]
+ setattr(
+ opts,
+ words[0].lower() + '_' + words[1].lower(),
+ float(words[2]),
+ )
+ logger.warning(
+ '%(lnum)-6d %(sec)13s option "%(key)s" is undocumented; adding, but please verify syntax',
+ edata,
+ )
if isinstance(opts.time.report_timestep, (float, int)):
if opts.time.report_timestep < opts.time.hydraulic_timestep:
- raise RuntimeError('opts.report_timestep must be greater than or equal to opts.hydraulic_timestep.')
+ raise RuntimeError( # noqa: TRY003
+ 'opts.report_timestep must be greater than or equal to opts.hydraulic_timestep.' # noqa: EM101
+ )
if opts.time.report_timestep % opts.time.hydraulic_timestep != 0:
- raise RuntimeError('opts.report_timestep must be a multiple of opts.hydraulic_timestep')
+ raise RuntimeError( # noqa: TRY003
+ 'opts.report_timestep must be a multiple of opts.hydraulic_timestep' # noqa: EM101
+ )
- def _write_options(self, f, wn, version=2.2):
+ def _write_options(self, f, wn, version=2.2): # noqa: C901
f.write('[OPTIONS]\n'.encode(sys_default_enc))
entry_string = '{:20s} {:20s}\n'
entry_float = '{:20s} {:.11g}\n'
- f.write(entry_string.format('UNITS', self.flow_units.name).encode(sys_default_enc))
-
- f.write(entry_string.format('HEADLOSS', wn.options.hydraulic.headloss).encode(sys_default_enc))
-
- f.write(entry_float.format('SPECIFIC GRAVITY', wn.options.hydraulic.specific_gravity).encode(sys_default_enc))
-
- f.write(entry_float.format('VISCOSITY', wn.options.hydraulic.viscosity).encode(sys_default_enc))
-
- f.write(entry_float.format('TRIALS', wn.options.hydraulic.trials).encode(sys_default_enc))
-
- f.write(entry_float.format('ACCURACY', wn.options.hydraulic.accuracy).encode(sys_default_enc))
-
- f.write(entry_float.format('CHECKFREQ', wn.options.hydraulic.checkfreq).encode(sys_default_enc))
-
- f.write(entry_float.format('MAXCHECK', wn.options.hydraulic.maxcheck).encode(sys_default_enc))
+ f.write(
+ entry_string.format('UNITS', self.flow_units.name).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_string.format('HEADLOSS', wn.options.hydraulic.headloss).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format(
+ 'SPECIFIC GRAVITY', wn.options.hydraulic.specific_gravity
+ ).encode(sys_default_enc)
+ )
+
+ f.write(
+ entry_float.format('VISCOSITY', wn.options.hydraulic.viscosity).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format('TRIALS', wn.options.hydraulic.trials).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format('ACCURACY', wn.options.hydraulic.accuracy).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format('CHECKFREQ', wn.options.hydraulic.checkfreq).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format('MAXCHECK', wn.options.hydraulic.maxcheck).encode(
+ sys_default_enc
+ )
+ )
# EPANET 2.2 OPTIONS
- if version == 2.0:
+ if version == 2.0: # noqa: PLR2004
pass
else:
- if wn.options.hydraulic.headerror != 0:
- f.write(entry_float.format('HEADERROR', wn.options.hydraulic.headerror).encode(sys_default_enc))
+ if wn.options.hydraulic.headerror != 0:
+ f.write(
+ entry_float.format(
+ 'HEADERROR', wn.options.hydraulic.headerror
+ ).encode(sys_default_enc)
+ )
if wn.options.hydraulic.flowchange != 0:
- f.write(entry_float.format('FLOWCHANGE', wn.options.hydraulic.flowchange).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'FLOWCHANGE', wn.options.hydraulic.flowchange
+ ).encode(sys_default_enc)
+ )
# EPANET 2.x OPTIONS
if wn.options.hydraulic.damplimit != 0:
- f.write(entry_float.format('DAMPLIMIT', wn.options.hydraulic.damplimit).encode(sys_default_enc))
+ f.write(
+ entry_float.format(
+ 'DAMPLIMIT', wn.options.hydraulic.damplimit
+ ).encode(sys_default_enc)
+ )
if wn.options.hydraulic.unbalanced_value is None:
- f.write(entry_string.format('UNBALANCED', wn.options.hydraulic.unbalanced).encode(sys_default_enc))
+ f.write(
+ entry_string.format(
+ 'UNBALANCED', wn.options.hydraulic.unbalanced
+ ).encode(sys_default_enc)
+ )
else:
- f.write('{:20s} {:s} {:d}\n'.format('UNBALANCED', wn.options.hydraulic.unbalanced, wn.options.hydraulic.unbalanced_value).encode(sys_default_enc))
+ f.write(
+ '{:20s} {:s} {:d}\n'.format(
+ 'UNBALANCED',
+ wn.options.hydraulic.unbalanced,
+ wn.options.hydraulic.unbalanced_value,
+ ).encode(sys_default_enc)
+ )
if wn.options.hydraulic.pattern is not None:
- f.write(entry_string.format('PATTERN', wn.options.hydraulic.pattern).encode(sys_default_enc))
-
- f.write(entry_float.format('DEMAND MULTIPLIER', wn.options.hydraulic.demand_multiplier).encode(sys_default_enc))
+ f.write(
+ entry_string.format('PATTERN', wn.options.hydraulic.pattern).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format(
+ 'DEMAND MULTIPLIER', wn.options.hydraulic.demand_multiplier
+ ).encode(sys_default_enc)
+ )
# EPANET 2.2 OPTIONS
- if version == 2.0:
- if wn.options.hydraulic.demand_model in ['PDA', 'PDD']:
- logger.critical('You have specified a PDD analysis using EPANET 2.0. This is not supported in EPANET 2.0. The analysis will default to DD mode.')
- else:
- if wn.options.hydraulic.demand_model in ['PDA', 'PDD']:
- f.write('{:20s} {}\n'.format('DEMAND MODEL', wn.options.hydraulic.demand_model).encode(sys_default_enc))
-
- minimum_pressure = from_si(self.flow_units, wn.options.hydraulic.minimum_pressure, HydParam.Pressure)
- f.write('{:20s} {:.2f}\n'.format('MINIMUM PRESSURE', minimum_pressure).encode(sys_default_enc))
+ if version == 2.0: # noqa: PLR2004
+ if wn.options.hydraulic.demand_model in ['PDA', 'PDD']: # noqa: PLR6201
+ logger.critical(
+ 'You have specified a PDD analysis using EPANET 2.0. This is not supported in EPANET 2.0. The analysis will default to DD mode.'
+ )
+ elif wn.options.hydraulic.demand_model in ['PDA', 'PDD']: # noqa: PLR6201
+ f.write(
+ '{:20s} {}\n'.format(
+ 'DEMAND MODEL', wn.options.hydraulic.demand_model
+ ).encode(sys_default_enc)
+ )
+
+ minimum_pressure = from_si(
+ self.flow_units,
+ wn.options.hydraulic.minimum_pressure,
+ HydParam.Pressure,
+ )
+ f.write(
+ '{:20s} {:.2f}\n'.format(
+ 'MINIMUM PRESSURE', minimum_pressure
+ ).encode(sys_default_enc)
+ )
+
+ required_pressure = from_si(
+ self.flow_units,
+ wn.options.hydraulic.required_pressure,
+ HydParam.Pressure,
+ )
+ if (
+ required_pressure >= 0.1 # noqa: PLR2004
+ ): # EPANET lower limit on required pressure = 0.1 (in psi or m)
+ f.write(
+ '{:20s} {:.2f}\n'.format(
+ 'REQUIRED PRESSURE', required_pressure
+ ).encode(sys_default_enc)
+ )
+ else:
+ warnings.warn( # noqa: B028
+ 'REQUIRED PRESSURE is below the lower limit for EPANET (0.1 in psi or m). The value has been set to 0.1 in the INP file.'
+ )
+ logger.warning(
+ 'REQUIRED PRESSURE is below the lower limit for EPANET (0.1 in psi or m). The value has been set to 0.1 in the INP file.'
+ )
+ f.write(
+ '{:20s} {:.2f}\n'.format('REQUIRED PRESSURE', 0.1).encode(
+ sys_default_enc
+ )
+ )
+ f.write(
+ '{:20s} {}\n'.format(
+ 'PRESSURE EXPONENT', wn.options.hydraulic.pressure_exponent
+ ).encode(sys_default_enc)
+ )
- required_pressure = from_si(self.flow_units, wn.options.hydraulic.required_pressure, HydParam.Pressure)
- if required_pressure >= 0.1: # EPANET lower limit on required pressure = 0.1 (in psi or m)
- f.write('{:20s} {:.2f}\n'.format('REQUIRED PRESSURE', required_pressure).encode(sys_default_enc))
- else:
- warnings.warn('REQUIRED PRESSURE is below the lower limit for EPANET (0.1 in psi or m). The value has been set to 0.1 in the INP file.')
- logger.warning('REQUIRED PRESSURE is below the lower limit for EPANET (0.1 in psi or m). The value has been set to 0.1 in the INP file.')
- f.write('{:20s} {:.2f}\n'.format('REQUIRED PRESSURE', 0.1).encode(sys_default_enc))
- f.write('{:20s} {}\n'.format('PRESSURE EXPONENT', wn.options.hydraulic.pressure_exponent).encode(sys_default_enc))
-
if wn.options.hydraulic.inpfile_pressure_units is not None:
- f.write(entry_string.format('PRESSURE', wn.options.hydraulic.inpfile_pressure_units).encode(sys_default_enc))
-
- # EPANET 2.0+ OPTIONS
- f.write(entry_float.format('EMITTER EXPONENT', wn.options.hydraulic.emitter_exponent).encode(sys_default_enc))
+ f.write(
+ entry_string.format(
+ 'PRESSURE', wn.options.hydraulic.inpfile_pressure_units
+ ).encode(sys_default_enc)
+ )
- if wn.options.quality.parameter.upper() in ['NONE', 'AGE']:
- f.write(entry_string.format('QUALITY', wn.options.quality.parameter).encode(sys_default_enc))
- elif wn.options.quality.parameter.upper() in ['TRACE']:
- f.write('{:20s} {} {}\n'.format('QUALITY', wn.options.quality.parameter, wn.options.quality.trace_node).encode(sys_default_enc))
+ # EPANET 2.0+ OPTIONS
+ f.write(
+ entry_float.format(
+ 'EMITTER EXPONENT', wn.options.hydraulic.emitter_exponent
+ ).encode(sys_default_enc)
+ )
+
+ if wn.options.quality.parameter.upper() in ['NONE', 'AGE']: # noqa: PLR6201
+ f.write(
+ entry_string.format('QUALITY', wn.options.quality.parameter).encode(
+ sys_default_enc
+ )
+ )
+ elif wn.options.quality.parameter.upper() == 'TRACE':
+ f.write(
+ '{:20s} {} {}\n'.format(
+ 'QUALITY',
+ wn.options.quality.parameter,
+ wn.options.quality.trace_node,
+ ).encode(sys_default_enc)
+ )
else:
- f.write('{:20s} {} {}\n'.format('QUALITY', wn.options.quality.chemical_name, wn.options.quality.inpfile_units).encode(sys_default_enc))
-
- f.write(entry_float.format('DIFFUSIVITY', wn.options.quality.diffusivity).encode(sys_default_enc))
-
- f.write(entry_float.format('TOLERANCE', wn.options.quality.tolerance).encode(sys_default_enc))
+ f.write(
+ '{:20s} {} {}\n'.format(
+ 'QUALITY',
+ wn.options.quality.chemical_name,
+ wn.options.quality.inpfile_units,
+ ).encode(sys_default_enc)
+ )
+
+ f.write(
+ entry_float.format('DIFFUSIVITY', wn.options.quality.diffusivity).encode(
+ sys_default_enc
+ )
+ )
+
+ f.write(
+ entry_float.format('TOLERANCE', wn.options.quality.tolerance).encode(
+ sys_default_enc
+ )
+ )
if wn.options.hydraulic.hydraulics is not None:
- f.write('{:20s} {:s} {:<30s}\n'.format('HYDRAULICS', wn.options.hydraulic.hydraulics, wn.options.hydraulic.hydraulics_filename).encode(sys_default_enc))
+ f.write(
+ '{:20s} {:s} {:<30s}\n'.format(
+ 'HYDRAULICS',
+ wn.options.hydraulic.hydraulics,
+ wn.options.hydraulic.hydraulics_filename,
+ ).encode(sys_default_enc)
+ )
if wn.options.graphics.map_filename is not None:
- f.write(entry_string.format('MAP', wn.options.graphics.map_filename).encode(sys_default_enc))
+ f.write(
+ entry_string.format('MAP', wn.options.graphics.map_filename).encode(
+ sys_default_enc
+ )
+ )
f.write('\n'.encode(sys_default_enc))
def _read_times(self):
opts = self.wn.options
time_format = ['am', 'AM', 'pm', 'PM']
- for lnum, line in self.sections['[TIMES]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[TIMES]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
- if (current[0].upper() == 'DURATION'):
- opts.time.duration = int(float(current[1]) * 3600) if _is_number(current[1]) else int(_str_time_to_sec(current[1]))
- elif (current[0].upper() == 'HYDRAULIC'):
- opts.time.hydraulic_timestep = int(float(current[2]) * 3600) if _is_number(current[2]) else int(_str_time_to_sec(current[2]))
- elif (current[0].upper() == 'QUALITY'):
- opts.time.quality_timestep = int(float(current[2]) * 3600) if _is_number(current[2]) else int(_str_time_to_sec(current[2]))
- elif (current[1].upper() == 'CLOCKTIME'):
- if len(current) > 3:
+ if current[0].upper() == 'DURATION':
+ opts.time.duration = (
+ int(float(current[1]) * 3600)
+ if _is_number(current[1])
+ else int(_str_time_to_sec(current[1]))
+ )
+ elif current[0].upper() == 'HYDRAULIC':
+ opts.time.hydraulic_timestep = (
+ int(float(current[2]) * 3600)
+ if _is_number(current[2])
+ else int(_str_time_to_sec(current[2]))
+ )
+ elif current[0].upper() == 'QUALITY':
+ opts.time.quality_timestep = (
+ int(float(current[2]) * 3600)
+ if _is_number(current[2])
+ else int(_str_time_to_sec(current[2]))
+ )
+ elif current[1].upper() == 'CLOCKTIME':
+ if len(current) > 3: # noqa: PLR2004
time_format = current[3].upper()
else:
# Kludge for 24hr time that needs an AM/PM
time_format = 'AM'
time = current[2]
opts.time.start_clocktime = _clock_time_to_sec(time, time_format)
- elif (current[0].upper() == 'STATISTIC'):
+ elif current[0].upper() == 'STATISTIC':
opts.time.statistic = current[1].upper()
else:
# Other time options: RULE TIMESTEP, PATTERN TIMESTEP, REPORT TIMESTEP, REPORT START
key_string = current[0] + '_' + current[1]
- setattr(opts.time, key_string.lower(), int(float(current[2]) * 3600) if _is_number(current[2]) else int(_str_time_to_sec(current[2])))
-
- def _write_times(self, f, wn):
+ setattr(
+ opts.time,
+ key_string.lower(),
+ int(float(current[2]) * 3600)
+ if _is_number(current[2])
+ else int(_str_time_to_sec(current[2])),
+ )
+
+ def _write_times(self, f, wn): # noqa: PLR6301
f.write('[TIMES]\n'.encode(sys_default_enc))
entry = '{:20s} {:10s}\n'
time_entry = '{:20s} {:02d}:{:02d}:{:02d}\n'
@@ -1863,248 +2531,305 @@ def _write_times(self, f, wn):
f.write(time_entry.format('DURATION', hrs, mm, sec).encode(sys_default_enc))
hrs, mm, sec = _sec_to_string(time.hydraulic_timestep)
- f.write(time_entry.format('HYDRAULIC TIMESTEP', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('HYDRAULIC TIMESTEP', hrs, mm, sec).encode(
+ sys_default_enc
+ )
+ )
hrs, mm, sec = _sec_to_string(time.quality_timestep)
- f.write(time_entry.format('QUALITY TIMESTEP', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('QUALITY TIMESTEP', hrs, mm, sec).encode(
+ sys_default_enc
+ )
+ )
hrs, mm, sec = _sec_to_string(time.pattern_timestep)
- f.write(time_entry.format('PATTERN TIMESTEP', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('PATTERN TIMESTEP', hrs, mm, sec).encode(
+ sys_default_enc
+ )
+ )
hrs, mm, sec = _sec_to_string(time.pattern_start)
- f.write(time_entry.format('PATTERN START', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('PATTERN START', hrs, mm, sec).encode(sys_default_enc)
+ )
hrs, mm, sec = _sec_to_string(time.report_timestep)
- f.write(time_entry.format('REPORT TIMESTEP', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('REPORT TIMESTEP', hrs, mm, sec).encode(
+ sys_default_enc
+ )
+ )
hrs, mm, sec = _sec_to_string(time.report_start)
- f.write(time_entry.format('REPORT START', hrs, mm, sec).encode(sys_default_enc))
+ f.write(
+ time_entry.format('REPORT START', hrs, mm, sec).encode(sys_default_enc)
+ )
hrs, mm, sec = _sec_to_string(time.start_clocktime)
-
- #Sina added this to WNTR-1: thsi adds the abikity to run corerctly for
- #time steps that are more than the first day
- day = int(hrs/24)
- hrs -=day*24
-
- if hrs < 12:
+
+ # Sina added this to WNTR-1: this adds the ability to run correctly for
+ # time steps that are more than the first day
+ day = int(hrs / 24)
+ hrs -= day * 24
+
+ if hrs < 12: # noqa: PLR2004
time_format = ' AM'
else:
hrs -= 12
time_format = ' PM'
- f.write('{:20s} {:02d}:{:02d}:{:02d}{:s}\n'.format('START CLOCKTIME', hrs, mm, sec, time_format).encode(sys_default_enc))
+ f.write(
+ '{:20s} {:02d}:{:02d}:{:02d}{:s}\n'.format(
+ 'START CLOCKTIME', hrs, mm, sec, time_format
+ ).encode(sys_default_enc)
+ )
hrs, mm, sec = _sec_to_string(time.rule_timestep)
- f.write(time_entry.format('RULE TIMESTEP', hrs, mm, int(sec)).encode(sys_default_enc))
- f.write(entry.format('STATISTIC', wn.options.time.statistic).encode(sys_default_enc))
+ f.write(
+ time_entry.format('RULE TIMESTEP', hrs, mm, int(sec)).encode(
+ sys_default_enc
+ )
+ )
+ f.write(
+ entry.format('STATISTIC', wn.options.time.statistic).encode(
+ sys_default_enc
+ )
+ )
f.write('\n'.encode(sys_default_enc))
- def _read_report(self):
- for lnum, line in self.sections['[REPORT]']:
- line = line.split(';')[0]
+ def _read_report(self): # noqa: C901
+ for lnum, line in self.sections['[REPORT]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
- if current[0].upper() in ['PAGE', 'PAGESIZE']:
+ if current[0].upper() in ['PAGE', 'PAGESIZE']: # noqa: PLR6201
self.wn.options.report.pagesize = int(current[1])
- elif current[0].upper() in ['FILE']:
+ elif current[0].upper() == 'FILE':
self.wn.options.report.file = current[1]
- elif current[0].upper() in ['STATUS']:
+ elif current[0].upper() == 'STATUS':
self.wn.options.report.status = current[1].upper()
- elif current[0].upper() in ['SUMMARY']:
+ elif current[0].upper() == 'SUMMARY':
self.wn.options.report.summary = current[1].upper()
- elif current[0].upper() in ['ENERGY']:
+ elif current[0].upper() == 'ENERGY':
self.wn.options.report.energy = current[1].upper()
- elif current[0].upper() in ['NODES']:
- if current[1].upper() in ['NONE']:
+ elif current[0].upper() == 'NODES':
+ if current[1].upper() == 'NONE':
self.wn.options.report.nodes = False
- elif current[1].upper() in ['ALL']:
+ elif current[1].upper() == 'ALL':
self.wn.options.report.nodes = True
elif not isinstance(self.wn.options.report.nodes, list):
self.wn.options.report.nodes = []
- for ct in range(len(current)-2):
+ for ct in range(len(current) - 2):
i = ct + 2
self.wn.options.report.nodes.append(current[i])
else:
- for ct in range(len(current)-2):
+ for ct in range(len(current) - 2):
i = ct + 2
self.wn.options.report.nodes.append(current[i])
- elif current[0].upper() in ['LINKS']:
- if current[1].upper() in ['NONE']:
+ elif current[0].upper() == 'LINKS':
+ if current[1].upper() == 'NONE':
self.wn.options.report.links = False
- elif current[1].upper() in ['ALL']:
+ elif current[1].upper() == 'ALL':
self.wn.options.report.links = True
elif not isinstance(self.wn.options.report.links, list):
self.wn.options.report.links = []
- for ct in range(len(current)-2):
+ for ct in range(len(current) - 2):
i = ct + 2
self.wn.options.report.links.append(current[i])
else:
- for ct in range(len(current)-2):
+ for ct in range(len(current) - 2):
i = ct + 2
self.wn.options.report.links.append(current[i])
+ elif (
+ current[0].lower() not in self.wn.options.report.report_params.keys() # noqa: SIM118
+ ):
+ logger.warning('Unknown report parameter: %s', current[0])
+ continue
+ elif current[1].upper() == 'YES':
+ self.wn.options.report.report_params[current[0].lower()] = True
+ elif current[1].upper() == 'NO':
+ self.wn.options.report.report_params[current[0].lower()] = False
else:
- if current[0].lower() not in self.wn.options.report.report_params.keys():
- logger.warning('Unknown report parameter: %s', current[0])
- continue
- elif current[1].upper() in ['YES']:
- self.wn.options.report.report_params[current[0].lower()] = True
- elif current[1].upper() in ['NO']:
- self.wn.options.report.report_params[current[0].lower()] = False
- else:
- self.wn.options.report.param_opts[current[0].lower()][current[1].upper()] = float(current[2])
+ self.wn.options.report.param_opts[current[0].lower()][
+ current[1].upper()
+ ] = float(current[2])
- def _write_report(self, f, wn):
+ def _write_report(self, f, wn): # noqa: C901, PLR6301
f.write('[REPORT]\n'.encode(sys_default_enc))
report = wn.options.report
if report.status.upper() != 'NO':
- f.write('STATUS {}\n'.format(report.status).encode(sys_default_enc))
+ f.write(f'STATUS {report.status}\n'.encode(sys_default_enc))
if report.summary.upper() != 'YES':
- f.write('SUMMARY {}\n'.format(report.summary).encode(sys_default_enc))
+ f.write(f'SUMMARY {report.summary}\n'.encode(sys_default_enc))
if report.pagesize is not None:
- f.write('PAGE {}\n'.format(report.pagesize).encode(sys_default_enc))
+ f.write(f'PAGE {report.pagesize}\n'.encode(sys_default_enc))
if report.report_filename is not None:
- f.write('FILE {}\n'.format(report.report_filename).encode(sys_default_enc))
+ f.write(f'FILE {report.report_filename}\n'.encode(sys_default_enc))
if report.energy.upper() != 'NO':
- f.write('ENERGY {}\n'.format(report.status).encode(sys_default_enc))
+ f.write(f'ENERGY {report.status}\n'.encode(sys_default_enc))
if report.nodes is True:
f.write('NODES ALL\n'.encode(sys_default_enc))
elif isinstance(report.nodes, str):
- f.write('NODES {}\n'.format(report.nodes).encode(sys_default_enc))
+ f.write(f'NODES {report.nodes}\n'.encode(sys_default_enc))
elif isinstance(report.nodes, list):
for ct, node in enumerate(report.nodes):
if ct == 0:
- f.write('NODES {}'.format(node).encode(sys_default_enc))
+ f.write(f'NODES {node}'.encode(sys_default_enc))
elif ct % 10 == 0:
- f.write('\nNODES {}'.format(node).encode(sys_default_enc))
+ f.write(f'\nNODES {node}'.encode(sys_default_enc))
else:
- f.write(' {}'.format(node).encode(sys_default_enc))
+ f.write(f' {node}'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
if report.links is True:
f.write('LINKS ALL\n'.encode(sys_default_enc))
elif isinstance(report.links, str):
- f.write('LINKS {}\n'.format(report.links).encode(sys_default_enc))
+ f.write(f'LINKS {report.links}\n'.encode(sys_default_enc))
elif isinstance(report.links, list):
for ct, link in enumerate(report.links):
if ct == 0:
- f.write('LINKS {}'.format(link).encode(sys_default_enc))
+ f.write(f'LINKS {link}'.encode(sys_default_enc))
elif ct % 10 == 0:
- f.write('\nLINKS {}'.format(link).encode(sys_default_enc))
+ f.write(f'\nLINKS {link}'.encode(sys_default_enc))
else:
- f.write(' {}'.format(link).encode(sys_default_enc))
+ f.write(f' {link}'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
- # FIXME: defaults no longer located here
-# for key, item in report.report_params.items():
-# if item[1] != item[0]:
-# f.write('{:10s} {}\n'.format(key.upper(), item[1]).encode(sys_default_enc))
+ # FIXME: defaults no longer located here # noqa: FIX001, TD001, TD002
+ # for key, item in report.report_params.items():
+ # if item[1] != item[0]:
+ # f.write('{:10s} {}\n'.format(key.upper(), item[1]).encode(sys_default_enc))
for key, item in report.param_opts.items():
for opt, val in item.items():
- f.write('{:10s} {:10s} {}\n'.format(key.upper(), opt.upper(), val).encode(sys_default_enc))
+ f.write(
+ f'{key.upper():10s} {opt.upper():10s} {val}\n'.encode(
+ sys_default_enc
+ )
+ )
f.write('\n'.encode(sys_default_enc))
- ### Network Map/Tags
+ # Network Map/Tags
def _read_coordinates(self):
- for lnum, line in self.sections['[COORDINATES]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[COORDINATES]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
-# assert(len(current) == 3), ("Error reading node coordinates. Check format.")
+ # assert(len(current) == 3), ("Error reading node coordinates. Check format.")
node = self.wn.get_node(current[0])
node.coordinates = (float(current[1]), float(current[2]))
- def _write_coordinates(self, f, wn):
+ def _write_coordinates(self, f, wn): # noqa: PLR6301
f.write('[COORDINATES]\n'.encode(sys_default_enc))
entry = '{:10s} {:20.9f} {:20.9f}\n'
label = '{:10s} {:10s} {:10s}\n'
f.write(label.format(';Node', 'X-Coord', 'Y-Coord').encode(sys_default_enc))
for name, node in wn.nodes():
- if node._is_isolated == True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
val = node.coordinates
f.write(entry.format(name, val[0], val[1]).encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_vertices(self):
- for lnum, line in self.sections['[VERTICES]']:
- line = line.split(';')[0].strip()
+ for lnum, line in self.sections['[VERTICES]']: # noqa: B007
+ line = line.split(';')[0].strip() # noqa: PLW2901
current = line.split()
if current == []:
continue
- if len(current) != 3:
+ if len(current) != 3: # noqa: PLR2004
logger.warning('Invalid VERTICES line: %s', line)
continue
link_name = current[0]
link = self.wn.get_link(link_name)
- link._vertices.append((float(current[1]), float(current[2])))
+ link._vertices.append((float(current[1]), float(current[2]))) # noqa: SLF001
- def _write_vertices(self, f, wn):
+ def _write_vertices(self, f, wn): # noqa: PLR6301
f.write('[VERTICES]\n'.encode(sys_default_enc))
entry = '{:10s} {:20.9f} {:20.9f}\n'
label = '{:10s} {:10s} {:10s}\n'
f.write(label.format(';Link', 'X-Coord', 'Y-Coord').encode(sys_default_enc))
for name, link in wn.links():
- if link._is_isolated == True: #Sina added this
+ if link._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- for vert in link._vertices: #Sina: I unindented this and the next line. Possible Bug in WNTR-1
+ for vert in (
+ link._vertices # noqa: SLF001
+ ): # Sina: I unindented this and the next line. Possible Bug in WNTR-1
f.write(entry.format(name, vert[0], vert[1]).encode(sys_default_enc))
-
+
f.write('\n'.encode(sys_default_enc))
def _read_labels(self):
labels = []
- for lnum, line in self.sections['[LABELS]']:
- line = line.split(';')[0].strip()
+ for lnum, line in self.sections['[LABELS]']: # noqa: B007
+ line = line.split(';')[0].strip() # noqa: PLW2901
current = line.split()
if current == []:
continue
labels.append(line)
- self.wn._labels = labels
+ self.wn._labels = labels # noqa: SLF001
- def _write_labels(self, f, wn):
+ def _write_labels(self, f, wn): # noqa: PLR6301
f.write('[LABELS]\n'.encode(sys_default_enc))
- if wn._labels is not None:
- for label in wn._labels:
- f.write(' {}\n'.format(label).encode(sys_default_enc))
+ if wn._labels is not None: # noqa: SLF001
+ for label in wn._labels: # noqa: SLF001
+ f.write(f' {label}\n'.encode(sys_default_enc))
f.write('\n'.encode(sys_default_enc))
def _read_backdrop(self):
- for lnum, line in self.sections['[BACKDROP]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[BACKDROP]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
key = current[0].upper()
- if key == 'DIMENSIONS' and len(current) > 4:
- self.wn.options.graphics.dimensions = [current[1], current[2], current[3], current[4]]
+ if key == 'DIMENSIONS' and len(current) > 4: # noqa: PLR2004
+ self.wn.options.graphics.dimensions = [
+ current[1],
+ current[2],
+ current[3],
+ current[4],
+ ]
elif key == 'UNITS' and len(current) > 1:
self.wn.options.graphics.units = current[1]
elif key == 'FILE' and len(current) > 1:
self.wn.options.graphics.image_filename = current[1]
- elif key == 'OFFSET' and len(current) > 2:
+ elif key == 'OFFSET' and len(current) > 2: # noqa: PLR2004
self.wn.options.graphics.offset = [current[1], current[2]]
- def _write_backdrop(self, f, wn):
+ def _write_backdrop(self, f, wn): # noqa: PLR6301
if wn.options.graphics is not None:
f.write('[BACKDROP]\n'.encode(sys_default_enc))
if wn.options.graphics.dimensions is not None:
- f.write('DIMENSIONS {0} {1} {2} {3}\n'.format(wn.options.graphics.dimensions[0],
- wn.options.graphics.dimensions[1],
- wn.options.graphics.dimensions[2],
- wn.options.graphics.dimensions[3]).encode(sys_default_enc))
+ f.write(
+ f'DIMENSIONS {wn.options.graphics.dimensions[0]} {wn.options.graphics.dimensions[1]} {wn.options.graphics.dimensions[2]} {wn.options.graphics.dimensions[3]}\n'.encode(
+ sys_default_enc
+ )
+ )
if wn.options.graphics.units is not None:
- f.write('UNITS {0}\n'.format(wn.options.graphics.units).encode(sys_default_enc))
+ f.write(
+ f'UNITS {wn.options.graphics.units}\n'.encode(sys_default_enc)
+ )
if wn.options.graphics.image_filename is not None:
- f.write('FILE {0}\n'.format(wn.options.graphics.image_filename).encode(sys_default_enc))
+ f.write(
+ f'FILE {wn.options.graphics.image_filename}\n'.encode(
+ sys_default_enc
+ )
+ )
if wn.options.graphics.offset is not None:
- f.write('OFFSET {0} {1}\n'.format(wn.options.graphics.offset[0], wn.options.graphics.offset[1]).encode(sys_default_enc))
+ f.write(
+ f'OFFSET {wn.options.graphics.offset[0]} {wn.options.graphics.offset[1]}\n'.encode(
+ sys_default_enc
+ )
+ )
f.write('\n'.encode(sys_default_enc))
def _read_tags(self):
- for lnum, line in self.sections['[TAGS]']:
- line = line.split(';')[0]
+ for lnum, line in self.sections['[TAGS]']: # noqa: B007
+ line = line.split(';')[0] # noqa: PLW2901
current = line.split()
if current == []:
continue
@@ -2117,7 +2842,7 @@ def _read_tags(self):
else:
continue
- def _write_tags(self, f, wn):
+ def _write_tags(self, f, wn): # noqa: PLR6301
f.write('[TAGS]\n'.encode(sys_default_enc))
entry = '{:10s} {:10s} {:10s}\n'
label = '{:10s} {:10s} {:10s}\n'
@@ -2126,50 +2851,63 @@ def _write_tags(self, f, wn):
# nnodes.sort()
for node_name in nnodes:
node = wn.nodes[node_name]
- if node._is_isolated == True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if node.tag:
- f.write(entry.format('NODE', node_name, node.tag).encode(sys_default_enc))
+ f.write(
+ entry.format('NODE', node_name, node.tag).encode(sys_default_enc)
+ )
nlinks = list(wn.link_name_list)
nlinks.sort()
for link_name in nlinks:
link = wn.links[link_name]
-
- if link._is_isolated == True: #Sina added this
+
+ if link._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if link.tag:
- f.write(entry.format('LINK', link_name, link.tag).encode(sys_default_enc))
+ f.write(
+ entry.format('LINK', link_name, link.tag).encode(sys_default_enc)
+ )
f.write('\n'.encode(sys_default_enc))
- ### End of File
+ # End of File
def _read_end(self):
- """Finalize read by verifying that all curves have been dealt with"""
+ """Finalize read by verifying that all curves have been dealt with""" # noqa: D400
+
def create_curve(curve_name):
curve_points = []
- if curve_name not in self.wn.curve_name_list or self.wn.get_curve(curve_name) is None:
+ if (
+ curve_name not in self.wn.curve_name_list
+ or self.wn.get_curve(curve_name) is None
+ ):
for point in self.curves[curve_name]:
x = point[0]
y = point[1]
- curve_points.append((x,y))
+ curve_points.append((x, y))
self.wn.add_curve(curve_name, None, curve_points)
curve = self.wn.get_curve(curve_name)
- return curve
+ return curve # noqa: RET504
curve_name_list = self.wn.curve_name_list
- for name, curvedata in self.curves.items():
+ for name, curvedata in self.curves.items(): # noqa: B007, PERF102
if name not in curve_name_list or self.wn.get_curve(name) is None:
- warnings.warn('Not all curves were used in "{}"; added with type None, units conversion left to user'.format(self.wn.name))
- logger.warning('Curve was not used: "{}"; saved as curve type None and unit conversion not performed'.format(name))
+ warnings.warn( # noqa: B028
+ f'Not all curves were used in "{self.wn.name}"; added with type None, units conversion left to user'
+ )
+ logger.warning(
+ f'Curve was not used: "{name}"; saved as curve type None and unit conversion not performed'
+ )
create_curve(name)
- def _write_end(self, f, wn):
+ def _write_end(self, f, wn): # noqa: ARG002, PLR6301
f.write('[END]\n'.encode(sys_default_enc))
-class _EpanetRule(object):
- """contains the text for an EPANET rule"""
- def __init__(self, ruleID, inp_units=None, mass_units=None):
+class _EpanetRule:
+ """contains the text for an EPANET rule""" # noqa: D400
+
+ def __init__(self, ruleID, inp_units=None, mass_units=None): # noqa: N803
self.inp_units = inp_units
self.mass_units = mass_units
self.ruleID = ruleID
@@ -2179,25 +2917,38 @@ def __init__(self, ruleID, inp_units=None, mass_units=None):
self.priority = 0
@classmethod
- def parse_rules_lines(cls, lines, flow_units=FlowUnits.SI, mass_units=MassUnits.mg) -> list:
- rules = list()
+ def parse_rules_lines( # noqa: C901
+ cls,
+ lines,
+ flow_units=FlowUnits.SI,
+ mass_units=MassUnits.mg,
+ ) -> list:
+ rules = list() # noqa: C408
rule = None
in_if = False
in_then = False
in_else = False
- new_lines = list()
- new_line = list()
+ new_lines = list() # noqa: C408
+ new_line = list() # noqa: C408
for line in lines:
if isinstance(line, (tuple, list)):
- line = line[1]
- line = line.split(';')[0]
+ line = line[1] # noqa: PLW2901
+ line = line.split(';')[0] # noqa: PLW2901
words = line.strip().split()
for word in words:
- if word.upper() in ['RULE', 'IF', 'THEN', 'ELSE', 'AND', 'OR', 'PRIORITY']:
+ if word.upper() in [ # noqa: PLR6201
+ 'RULE',
+ 'IF',
+ 'THEN',
+ 'ELSE',
+ 'AND',
+ 'OR',
+ 'PRIORITY',
+ ]:
if len(new_line) > 0:
text = ' '.join(new_line)
new_lines.append(text)
- new_line = list()
+ new_line = list() # noqa: C408
new_line.append(word)
if len(new_line) > 0:
text = ' '.join(new_line)
@@ -2249,112 +3000,141 @@ def parse_rules_lines(cls, lines, flow_units=FlowUnits.SI, mass_units=MassUnits.
return rules
def from_if_then_else(self, control):
- """Create a rule from a Rule object"""
+ """Create a rule from a Rule object""" # noqa: D400
if isinstance(control, Rule):
self.ruleID = control.name
- self.add_control_condition(control._condition)
- for ct, action in enumerate(control._then_actions):
+ self.add_control_condition(control._condition) # noqa: SLF001
+ for ct, action in enumerate(control._then_actions): # noqa: SLF001
if ct == 0:
self.add_action_on_true(action)
else:
self.add_action_on_true(action, ' AND')
- for ct, action in enumerate(control._else_actions):
+ for ct, action in enumerate(control._else_actions): # noqa: SLF001
if ct == 0:
self.add_action_on_false(action)
else:
self.add_action_on_false(action, ' AND')
- self.set_priority(control._priority)
+ self.set_priority(control._priority) # noqa: SLF001
else:
- raise ValueError('Invalid control type for rules: %s'%control.__class__.__name__)
+ raise ValueError( # noqa: DOC501, TRY004
+ 'Invalid control type for rules: %s' % control.__class__.__name__ # noqa: UP031
+ )
def add_if(self, clause):
- """Add an "if/and/or" clause from an INP file"""
+ """Add an "if/and/or" clause from an INP file""" # noqa: D400
self._if_clauses.append(clause)
- def add_control_condition(self, condition, prefix=' IF'):
- """Add a ControlCondition from an IfThenElseControl"""
+ def add_control_condition(self, condition, prefix=' IF'): # noqa: C901
+ """Add a ControlCondition from an IfThenElseControl""" # noqa: D400
if isinstance(condition, OrCondition):
- self.add_control_condition(condition._condition_1, prefix)
- self.add_control_condition(condition._condition_2, ' OR')
+ self.add_control_condition(condition._condition_1, prefix) # noqa: SLF001
+ self.add_control_condition(condition._condition_2, ' OR') # noqa: SLF001
elif isinstance(condition, AndCondition):
- self.add_control_condition(condition._condition_1, prefix)
- self.add_control_condition(condition._condition_2, ' AND')
+ self.add_control_condition(condition._condition_1, prefix) # noqa: SLF001
+ self.add_control_condition(condition._condition_2, ' AND') # noqa: SLF001
elif isinstance(condition, TimeOfDayCondition):
fmt = '{} SYSTEM CLOCKTIME {} {}'
- clause = fmt.format(prefix, condition._relation.text, condition._sec_to_clock(condition._threshold))
+ clause = fmt.format(
+ prefix,
+ condition._relation.text, # noqa: SLF001
+ condition._sec_to_clock(condition._threshold), # noqa: SLF001
+ )
self.add_if(clause)
elif isinstance(condition, SimTimeCondition):
fmt = '{} SYSTEM TIME {} {}'
- clause = fmt.format(prefix, condition._relation.text, condition._sec_to_hours_min_sec(condition._threshold))
+ clause = fmt.format(
+ prefix,
+ condition._relation.text, # noqa: SLF001
+ condition._sec_to_hours_min_sec(condition._threshold), # noqa: SLF001
+ )
self.add_if(clause)
elif isinstance(condition, ValueCondition):
- fmt = '{} {} {} {} {} {}' # CONJ, TYPE, ID, ATTRIBUTE, RELATION, THRESHOLD
- attr = condition._source_attr
- val_si = condition._repr_value(attr, condition._threshold)
- if attr.lower() in ['demand']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Demand))
- elif attr.lower() in ['head', 'level']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.HydraulicHead))
- elif attr.lower() in ['flow']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Flow))
- elif attr.lower() in ['pressure']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Pressure))
- elif attr.lower() in ['setting']:
- if isinstance(condition._source_obj, Valve):
- if condition._source_obj.valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ fmt = (
+ '{} {} {} {} {} {}' # CONJ, TYPE, ID, ATTRIBUTE, RELATION, THRESHOLD
+ )
+ attr = condition._source_attr # noqa: SLF001
+ val_si = condition._repr_value(attr, condition._threshold) # noqa: SLF001
+ if attr.lower() == 'demand':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Demand):.6g}'
+ elif attr.lower() in ['head', 'level']: # noqa: PLR6201
+ value = (
+ f'{from_si(self.inp_units, val_si, HydParam.HydraulicHead):.6g}'
+ )
+ elif attr.lower() == 'flow':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Flow):.6g}'
+ elif attr.lower() == 'pressure':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Pressure):.6g}'
+ elif attr.lower() == 'setting':
+ if isinstance(condition._source_obj, Valve): # noqa: SLF001
+ if condition._source_obj.valve_type.upper() in [ # noqa: PLR6201, SLF001
+ 'PRV',
+ 'PBV',
+ 'PSV',
+ ]:
value = from_si(self.inp_units, val_si, HydParam.Pressure)
- elif condition._source_obj.valve_type.upper() in ['FCV']:
+ elif condition._source_obj.valve_type.upper() == 'FCV': # noqa: SLF001
value = from_si(self.inp_units, val_si, HydParam.Flow)
else:
value = val_si
else:
value = val_si
- value = '{:.6g}'.format(value)
- else: # status
+ value = f'{value:.6g}'
+ else: # status
value = val_si
- if isinstance(condition._source_obj, Valve):
+ if isinstance(condition._source_obj, Valve): # noqa: SLF001
cls = 'Valve'
- elif isinstance(condition._source_obj, Pump):
+ elif isinstance(condition._source_obj, Pump): # noqa: SLF001
cls = 'Pump'
else:
- cls = condition._source_obj.__class__.__name__
- clause = fmt.format(prefix, cls,
- condition._source_obj.name, condition._source_attr,
- condition._relation.symbol, value)
+ cls = condition._source_obj.__class__.__name__ # noqa: SLF001
+ clause = fmt.format(
+ prefix,
+ cls,
+ condition._source_obj.name, # noqa: SLF001
+ condition._source_attr, # noqa: SLF001
+ condition._relation.symbol, # noqa: SLF001
+ value,
+ )
self.add_if(clause)
else:
- raise ValueError('Unknown ControlCondition for EPANET Rules')
+ raise ValueError('Unknown ControlCondition for EPANET Rules') # noqa: DOC501, EM101, TRY003, TRY004
def add_then(self, clause):
- """Add a "then/and" clause from an INP file"""
+ """Add a "then/and" clause from an INP file""" # noqa: D400
self._then_clauses.append(clause)
- def add_action_on_true(self, action, prefix=' THEN'):
- """Add a "then" action from an IfThenElseControl"""
+ def add_action_on_true(self, action, prefix=' THEN'): # noqa: C901
+ """Add a "then" action from an IfThenElseControl""" # noqa: D400
if isinstance(action, ControlAction):
fmt = '{} {} {} {} = {}'
- attr = action._attribute
- val_si = action._repr_value()
- if attr.lower() in ['demand']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Demand))
- elif attr.lower() in ['head', 'level']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.HydraulicHead))
- elif attr.lower() in ['flow']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Flow))
- elif attr.lower() in ['pressure']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Pressure))
- elif attr.lower() in ['setting']:
+ attr = action._attribute # noqa: SLF001
+ val_si = action._repr_value() # noqa: SLF001
+ if attr.lower() == 'demand':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Demand):.6g}'
+ elif attr.lower() in ['head', 'level']: # noqa: PLR6201
+ value = (
+ f'{from_si(self.inp_units, val_si, HydParam.HydraulicHead):.6g}'
+ )
+ elif attr.lower() == 'flow':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Flow):.6g}'
+ elif attr.lower() == 'pressure':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Pressure):.6g}'
+ elif attr.lower() == 'setting':
if isinstance(action.target()[0], Valve):
- if action.target()[0].valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ if action.target()[0].valve_type.upper() in [ # noqa: PLR6201
+ 'PRV',
+ 'PBV',
+ 'PSV',
+ ]:
value = from_si(self.inp_units, val_si, HydParam.Pressure)
- elif action.target()[0].valve_type.upper() in ['FCV']:
+ elif action.target()[0].valve_type.upper() == 'FCV':
value = from_si(self.inp_units, val_si, HydParam.Flow)
else:
value = val_si
else:
value = val_si
- value = '{:.6g}'.format(value)
- else: # status
+ value = f'{value:.6g}'
+ else: # status
value = val_si
if isinstance(action.target()[0], Valve):
cls = 'Valve'
@@ -2362,41 +3142,47 @@ def add_action_on_true(self, action, prefix=' THEN'):
cls = 'Pump'
else:
cls = action.target()[0].__class__.__name__
- clause = fmt.format(prefix, cls,
- action.target()[0].name, action.target()[1],
- value)
+ clause = fmt.format(
+ prefix, cls, action.target()[0].name, action.target()[1], value
+ )
self.add_then(clause)
def add_else(self, clause):
- """Add an "else/and" clause from an INP file"""
+ """Add an "else/and" clause from an INP file""" # noqa: D400
self._else_clauses.append(clause)
- def add_action_on_false(self, action, prefix=' ELSE'):
- """Add an "else" action from an IfThenElseControl"""
+ def add_action_on_false(self, action, prefix=' ELSE'): # noqa: C901
+ """Add an "else" action from an IfThenElseControl""" # noqa: D400
if isinstance(action, ControlAction):
fmt = '{} {} {} {} = {}'
- attr = action._attribute
- val_si = action._repr_value()
- if attr.lower() in ['demand']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Demand))
- elif attr.lower() in ['head', 'level']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.HydraulicHead))
- elif attr.lower() in ['flow']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Flow))
- elif attr.lower() in ['pressure']:
- value = '{:.6g}'.format(from_si(self.inp_units, val_si, HydParam.Pressure))
- elif attr.lower() in ['setting']:
+ attr = action._attribute # noqa: SLF001
+ val_si = action._repr_value() # noqa: SLF001
+ if attr.lower() == 'demand':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Demand):.6g}'
+ elif attr.lower() in ['head', 'level']: # noqa: PLR6201
+ value = (
+ f'{from_si(self.inp_units, val_si, HydParam.HydraulicHead):.6g}'
+ )
+ elif attr.lower() == 'flow':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Flow):.6g}'
+ elif attr.lower() == 'pressure':
+ value = f'{from_si(self.inp_units, val_si, HydParam.Pressure):.6g}'
+ elif attr.lower() == 'setting':
if isinstance(action.target()[0], Valve):
- if action.target()[0].valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ if action.target()[0].valve_type.upper() in [ # noqa: PLR6201
+ 'PRV',
+ 'PBV',
+ 'PSV',
+ ]:
value = from_si(self.inp_units, val_si, HydParam.Pressure)
- elif action.target()[0].valve_type.upper() in ['FCV']:
+ elif action.target()[0].valve_type.upper() == 'FCV':
value = from_si(self.inp_units, val_si, HydParam.Flow)
else:
value = val_si
else:
value = val_si
- value = '{:.6g}'.format(value)
- else: # status
+ value = f'{value:.6g}'
+ else: # status
value = val_si
if isinstance(action.target()[0], Valve):
cls = 'Valve'
@@ -2404,9 +3190,9 @@ def add_action_on_false(self, action, prefix=' ELSE'):
cls = 'Pump'
else:
cls = action.target()[0].__class__.__name__
- clause = fmt.format(prefix, cls,
- action.target()[0].name, action.target()[1],
- value)
+ clause = fmt.format(
+ prefix, cls, action.target()[0].name, action.target()[1], value
+ )
self.add_else(clause)
def set_priority(self, priority):
@@ -2415,67 +3201,96 @@ def set_priority(self, priority):
def __str__(self):
if self.priority >= 0:
if len(self._else_clauses) > 0:
- return 'RULE {}\n{}\n{}\n{}\n PRIORITY {}\n ; end of rule\n'.format(self.ruleID, '\n'.join(self._if_clauses), '\n'.join(self._then_clauses), '\n'.join(self._else_clauses), self.priority)
- else:
- return 'RULE {}\n{}\n{}\n PRIORITY {}\n ; end of rule\n'.format(self.ruleID, '\n'.join(self._if_clauses), '\n'.join(self._then_clauses), self.priority)
+ return 'RULE {}\n{}\n{}\n{}\n PRIORITY {}\n ; end of rule\n'.format(
+ self.ruleID,
+ '\n'.join(self._if_clauses),
+ '\n'.join(self._then_clauses),
+ '\n'.join(self._else_clauses),
+ self.priority,
+ )
+ else: # noqa: RET505
+ return 'RULE {}\n{}\n{}\n PRIORITY {}\n ; end of rule\n'.format(
+ self.ruleID,
+ '\n'.join(self._if_clauses),
+ '\n'.join(self._then_clauses),
+ self.priority,
+ )
+ elif len(self._else_clauses) > 0:
+ return 'RULE {}\n{}\n{}\n{}\n ; end of rule\n'.format(
+ self.ruleID,
+ '\n'.join(self._if_clauses),
+ '\n'.join(self._then_clauses),
+ '\n'.join(self._else_clauses),
+ )
else:
- if len(self._else_clauses) > 0:
- return 'RULE {}\n{}\n{}\n{}\n ; end of rule\n'.format(self.ruleID, '\n'.join(self._if_clauses), '\n'.join(self._then_clauses), '\n'.join(self._else_clauses))
- else:
- return 'RULE {}\n{}\n{}\n ; end of rule\n'.format(self.ruleID, '\n'.join(self._if_clauses), '\n'.join(self._then_clauses))
+ return 'RULE {}\n{}\n{}\n ; end of rule\n'.format(
+ self.ruleID,
+ '\n'.join(self._if_clauses),
+ '\n'.join(self._then_clauses),
+ )
- def generate_control(self, model):
+ def generate_control(self, model): # noqa: C901
condition_list = []
for line in self._if_clauses:
condition = None
words = line.split()
if words[1].upper() == 'SYSTEM':
if words[2].upper() == 'DEMAND':
- ### TODO: system demand
+ # TODO: system demand # noqa: TD002
pass
elif words[2].upper() == 'TIME':
- condition = SimTimeCondition(model, words[3], ' '.join(words[4:]))
+ condition = SimTimeCondition(
+ model, words[3], ' '.join(words[4:])
+ )
else:
- condition = TimeOfDayCondition(model, words[3], ' '.join(words[4:]))
+ condition = TimeOfDayCondition(
+ model, words[3], ' '.join(words[4:])
+ )
else:
attr = words[3].lower()
- value = ValueCondition._parse_value(words[5])
- if attr.lower() in ['demand']:
+ value = ValueCondition._parse_value(words[5]) # noqa: SLF001
+ if attr.lower() == 'demand':
value = to_si(self.inp_units, value, HydParam.Demand)
- elif attr.lower() in ['head']:
- value = to_si(self.inp_units, value, HydParam.HydraulicHead)
- elif attr.lower() in ['level']:
+ elif attr.lower() == 'head' or attr.lower() == 'level':
value = to_si(self.inp_units, value, HydParam.HydraulicHead)
- elif attr.lower() in ['flow']:
+ elif attr.lower() == 'flow':
value = to_si(self.inp_units, value, HydParam.Flow)
- elif attr.lower() in ['pressure']:
+ elif attr.lower() == 'pressure':
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif attr.lower() in ['setting']:
+ elif attr.lower() == 'setting':
link = model.get_link(words[2])
if isinstance(link, wntrfr.network.Pump):
- value = value
+ value = value # noqa: PLW0127
elif isinstance(link, wntrfr.network.Valve):
- if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']: # noqa: PLR6201
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif link.valve_type.upper() in ['FCV']:
+ elif link.valve_type.upper() == 'FCV':
value = to_si(self.inp_units, value, HydParam.Flow)
- if words[1].upper() in ['NODE', 'JUNCTION', 'RESERVOIR', 'TANK']:
- condition = ValueCondition(model.get_node(words[2]), words[3].lower(), words[4].lower(), value)
- elif words[1].upper() in ['LINK', 'PIPE', 'PUMP', 'VALVE']:
- condition = ValueCondition(model.get_link(words[2]), words[3].lower(), words[4].lower(), value)
+ if words[1].upper() in ['NODE', 'JUNCTION', 'RESERVOIR', 'TANK']: # noqa: PLR6201
+ condition = ValueCondition(
+ model.get_node(words[2]),
+ words[3].lower(),
+ words[4].lower(),
+ value,
+ )
+ elif words[1].upper() in ['LINK', 'PIPE', 'PUMP', 'VALVE']: # noqa: PLR6201
+ condition = ValueCondition(
+ model.get_link(words[2]),
+ words[3].lower(),
+ words[4].lower(),
+ value,
+ )
else:
- ### FIXME: raise error
+ # FIXME: raise error # noqa: FIX001, TD001, TD002
pass
- if words[0].upper() == 'IF':
- condition_list.append(condition)
- elif words[0].upper() == 'AND':
+ if words[0].upper() == 'IF' or words[0].upper() == 'AND':
condition_list.append(condition)
elif words[0].upper() == 'OR':
if len(condition_list) > 0:
other = condition_list[-1]
condition_list.remove(other)
else:
- ### FIXME: raise error
+ # FIXME: raise error # noqa: FIX001, TD001, TD002
pass
conj = OrCondition(other, condition)
condition_list.append(conj)
@@ -2488,65 +3303,70 @@ def generate_control(self, model):
then_acts = []
for act in self._then_clauses:
words = act.strip().split()
- if len(words) < 6:
- # TODO: raise error
+ if len(words) < 6: # noqa: PLR2004
+ # TODO: raise error # noqa: TD002
pass
link = model.get_link(words[2])
attr = words[3].lower()
- value = ValueCondition._parse_value(words[5])
- if attr.lower() in ['demand']:
+ value = ValueCondition._parse_value(words[5]) # noqa: SLF001
+ if attr.lower() == 'demand':
value = to_si(self.inp_units, value, HydParam.Demand)
- elif attr.lower() in ['head', 'level']:
+ elif attr.lower() in ['head', 'level']: # noqa: PLR6201
value = to_si(self.inp_units, value, HydParam.HydraulicHead)
- elif attr.lower() in ['flow']:
+ elif attr.lower() == 'flow':
value = to_si(self.inp_units, value, HydParam.Flow)
- elif attr.lower() in ['pressure']:
+ elif attr.lower() == 'pressure':
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif attr.lower() in ['setting']:
+ elif attr.lower() == 'setting':
if isinstance(link, Valve):
- if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']: # noqa: PLR6201
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif link.valve_type.upper() in ['FCV']:
+ elif link.valve_type.upper() == 'FCV':
value = to_si(self.inp_units, value, HydParam.Flow)
then_acts.append(ControlAction(link, attr, value))
else_acts = []
for act in self._else_clauses:
words = act.strip().split()
- if len(words) < 6:
- # TODO: raise error
+ if len(words) < 6: # noqa: PLR2004
+ # TODO: raise error # noqa: TD002
pass
link = model.get_link(words[2])
attr = words[3].lower()
- value = ValueCondition._parse_value(words[5])
- if attr.lower() in ['demand']:
+ value = ValueCondition._parse_value(words[5]) # noqa: SLF001
+ if attr.lower() == 'demand':
value = to_si(self.inp_units, value, HydParam.Demand)
- elif attr.lower() in ['head', 'level']:
+ elif attr.lower() in ['head', 'level']: # noqa: PLR6201
value = to_si(self.inp_units, value, HydParam.HydraulicHead)
- elif attr.lower() in ['flow']:
+ elif attr.lower() == 'flow':
value = to_si(self.inp_units, value, HydParam.Flow)
- elif attr.lower() in ['pressure']:
+ elif attr.lower() == 'pressure':
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif attr.lower() in ['setting']:
+ elif attr.lower() == 'setting':
if isinstance(link, Valve):
- if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']:
+ if link.valve_type.upper() in ['PRV', 'PBV', 'PSV']: # noqa: PLR6201
value = to_si(self.inp_units, value, HydParam.Pressure)
- elif link.valve_type.upper() in ['FCV']:
+ elif link.valve_type.upper() == 'FCV':
value = to_si(self.inp_units, value, HydParam.Flow)
else_acts.append(ControlAction(link, attr, value))
- return Rule(final_condition, then_acts, else_acts, priority=self.priority, name=self.ruleID)
+ return Rule(
+ final_condition,
+ then_acts,
+ else_acts,
+ priority=self.priority,
+ name=self.ruleID,
+ )
-class BinFile(object):
- """
- EPANET binary output file reader.
-
+class BinFile:
+ """EPANET binary output file reader.
+
This class provides read functionality for EPANET binary output files.
-
+
Parameters
----------
results_type : list of :class:`~wntrfr.epanet.util.ResultType`, default=None
This parameter is *only* active when using a subclass of the BinFile that implements
- a custom reader or writer.
+ a custom reader or writer.
If ``None``, then all results will be saved (node quality, demand, link flow, etc.).
Otherwise, a list of result types can be passed to limit the memory used.
network : bool, default=False
@@ -2558,19 +3378,26 @@ class BinFile(object):
Save the statistics lines (different from the stats flag in the inp file) that are
automatically calculated regarding hydraulic conditions.
convert_status : bool, default=True
- Convert the EPANET link status (8 values) to simpler WNTR status (3 values). By
+ Convert the EPANET link status (8 values) to simpler WNTR status (3 values). By
default, this is done, and the encoded-cause status values are converted simple state
values, instead.
Returns
- ----------
+ -------
:class:`~wntrfr.sim.results.SimulationResults`
A WNTR results object will be created and added to the instance after read.
"""
- def __init__(self, result_types=None, network=False, energy=False, statistics=False,
- convert_status=True):
- if os.name in ['nt', 'dos'] or sys.platform in ['darwin']:
+
+ def __init__(
+ self,
+ result_types=None,
+ network=False, # noqa: FBT002
+ energy=False, # noqa: FBT002
+ statistics=False, # noqa: FBT002
+ convert_status=True, # noqa: FBT002
+ ):
+ if os.name in ['nt', 'dos'] or sys.platform == 'darwin': # noqa: PLR6201
self.ftype = '=f4'
else:
self.ftype = '=f4'
@@ -2599,21 +3426,21 @@ def __init__(self, result_types=None, network=False, energy=False, statistics=Fa
self.report_file = None
self.results = wntrfr.sim.SimulationResults()
if result_types is None:
- self.items = [ member for name, member in ResultType.__members__.items() ]
+ self.items = [member for name, member in ResultType.__members__.items()]
else:
self.items = result_types
self.create_network = network
self.keep_energy = energy
self.keep_statistics = statistics
- def _get_time(self, t):
+ def _get_time(self, t): # noqa: PLR6301
s = int(t)
- h = int(s/3600)
- s -= h*3600
- m = int(s/60)
- s -= m*60
+ h = int(s / 3600)
+ s -= h * 3600
+ m = int(s / 60)
+ s -= m * 60
s = int(s)
- return '{:02}:{:02}:{:02}'.format(h, m, s)
+ return f'{h:02}:{m:02}:{s:02}'
def save_network_desc_line(self, element, values):
"""Save network description meta-data and element characteristics.
@@ -2630,7 +3457,6 @@ def save_network_desc_line(self, element, values):
The values that go with the information
"""
- pass
def save_energy_line(self, pump_idx, pump_name, values):
"""Save pump energy from the output file.
@@ -2646,28 +3472,32 @@ def save_energy_line(self, pump_idx, pump_name, values):
the pump name
values : numpy.array
the values to save
-
+
"""
- pass
def finalize_save(self, good_read, sim_warnings):
"""Post-process data before writing results.
-
- This method, by default, does nothing. It is available to be overloaded
+
+ This method, by default, does nothing. It is available to be overloaded
in order to post process data.
-
+
Parameters
----------
good_read : bool
was the full file read correctly
sim_warnings : int
were there warnings issued during the simulation
-
+
"""
- pass
-# @run_lineprofile()
- def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=True):
+ # @run_lineprofile()
+ def read( # noqa: C901, PLR0914, PLR0915
+ self,
+ filename,
+ convergence_error=False, # noqa: FBT002
+ darcy_weisbach=False, # noqa: FBT002
+ convert=True, # noqa: FBT002
+ ):
"""Read a binary file and create a results object.
Parameters
@@ -2676,22 +3506,23 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
An EPANET BIN output file
convergence_error: bool (optional)
If convergence_error is True, an error will be raised if the
- simulation does not converge. If convergence_error is False, partial results are returned,
+ simulation does not converge. If convergence_error is False, partial results are returned,
a warning will be issued, and results.error_code will be set to 0
if the simulation does not converge. Default = False.
Returns
-------
object
- returns a WaterNetworkResults object
+ returns a WaterNetworkResults object
+
"""
self.results = wntrfr.sim.SimulationResults()
-
- logger.debug('Read binary EPANET data from %s',filename)
- dt_str = 'u1' #.format(self.idlen)
- with open(filename, 'rb') as fin:
+
+ logger.debug('Read binary EPANET data from %s', filename)
+ dt_str = 'u1' # .format(self.idlen)
+ with open(filename, 'rb') as fin: # noqa: PTH123
ftype = self.ftype
- idlen = self.idlen
+ idlen = self.idlen # noqa: F841
logger.debug('... read prolog information ...')
prolog = np.fromfile(fin, dtype=np.int32, count=15)
magic1 = prolog[0]
@@ -2709,26 +3540,46 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
reportstart = prolog[12]
reportstep = prolog[13]
duration = prolog[14]
- logger.debug('EPANET/Toolkit version %d',version)
- logger.debug('Nodes: %d; Tanks/Resrv: %d Links: %d; Pumps: %d; Valves: %d',
- nnodes, ntanks, nlinks, npumps, nvalve)
- logger.debug('WQ opt: %s; Trace Node: %s; Flow Units %s; Pressure Units %s',
- wqopt, srctrace, flowunits, presunits)
- logger.debug('Statistics: %s; Report Start %d, step %d; Duration=%d sec',
- statsflag, reportstart, reportstep, duration)
+ logger.debug('EPANET/Toolkit version %d', version)
+ logger.debug(
+ 'Nodes: %d; Tanks/Resrv: %d Links: %d; Pumps: %d; Valves: %d',
+ nnodes,
+ ntanks,
+ nlinks,
+ npumps,
+ nvalve,
+ )
+ logger.debug(
+ 'WQ opt: %s; Trace Node: %s; Flow Units %s; Pressure Units %s',
+ wqopt,
+ srctrace,
+ flowunits,
+ presunits,
+ )
+ logger.debug(
+ 'Statistics: %s; Report Start %d, step %d; Duration=%d sec',
+ statsflag,
+ reportstart,
+ reportstep,
+ duration,
+ )
# Ignore the title lines
np.fromfile(fin, dtype=np.uint8, count=240)
inpfile = np.fromfile(fin, dtype=np.uint8, count=260)
rptfile = np.fromfile(fin, dtype=np.uint8, count=260)
- chemical = bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen)[:]).decode(sys_default_enc)
-# wqunits = ''.join([chr(f) for f in np.fromfile(fin, dtype=np.uint8, count=idlen) if f!=0 ])
- wqunits = bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen)[:]).decode(sys_default_enc)
- mass = wqunits.split('/',1)[0]
- if mass in ['mg', 'ug', u'mg', u'ug']:
+ chemical = bytes(
+ np.fromfile(fin, dtype=dt_str, count=self.idlen)[:]
+ ).decode(sys_default_enc)
+ # wqunits = ''.join([chr(f) for f in np.fromfile(fin, dtype=np.uint8, count=idlen) if f!=0 ])
+ wqunits = bytes(
+ np.fromfile(fin, dtype=dt_str, count=self.idlen)[:]
+ ).decode(sys_default_enc)
+ mass = wqunits.split('/', 1)[0]
+ if mass in ['mg', 'ug', 'mg', 'ug']: # noqa: PLR6201
massunits = MassUnits[mass]
else:
- massunits = MassUnits.mg
+ massunits = MassUnits.mg
self.flow_units = flowunits
self.pres_units = presunits
self.quality_type = wqopt
@@ -2747,18 +3598,32 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
self.report_file = rptfile
nodenames = []
linknames = []
- nodenames = [bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen)).decode(sys_default_enc).replace('\x00','') for _ in range(nnodes)]
- linknames = [bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen)).decode(sys_default_enc).replace('\x00','') for _ in range(nlinks)]
+ nodenames = [
+ bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen))
+ .decode(sys_default_enc)
+ .replace('\x00', '')
+ for _ in range(nnodes)
+ ]
+ linknames = [
+ bytes(np.fromfile(fin, dtype=dt_str, count=self.idlen))
+ .decode(sys_default_enc)
+ .replace('\x00', '')
+ for _ in range(nlinks)
+ ]
self.node_names = np.array(nodenames)
self.link_names = np.array(linknames)
- linkstart = np.array(np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int)
- linkend = np.array(np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int)
+ linkstart = np.array( # noqa: F841
+ np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int
+ )
+ linkend = np.array( # noqa: F841
+ np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int
+ )
linktype = np.fromfile(fin, dtype=np.int32, count=nlinks)
- tankidxs = np.fromfile(fin, dtype=np.int32, count=ntanks)
- tankarea = np.fromfile(fin, dtype=np.dtype(ftype), count=ntanks)
- elevation = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
- linklen = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- diameter = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
+ tankidxs = np.fromfile(fin, dtype=np.int32, count=ntanks) # noqa: F841
+ tankarea = np.fromfile(fin, dtype=np.dtype(ftype), count=ntanks) # noqa: F841
+ elevation = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes) # noqa: F841
+ linklen = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks) # noqa: F841
+ diameter = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks) # noqa: F841
"""
self.save_network_desc_line('link_start', linkstart)
self.save_network_desc_line('link_end', linkend)
@@ -2770,18 +3635,26 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
self.save_network_desc_line('link_diameter', diameter)
"""
logger.debug('... read energy data ...')
- for i in range(npumps):
- pidx = int(np.fromfile(fin,dtype=np.int32, count=1))
+ for i in range(npumps): # noqa: B007
+ pidx = int(np.fromfile(fin, dtype=np.int32, count=1))
energy = np.fromfile(fin, dtype=np.dtype(ftype), count=6)
- self.save_energy_line(pidx, linknames[pidx-1], energy)
+ self.save_energy_line(pidx, linknames[pidx - 1], energy)
peakenergy = np.fromfile(fin, dtype=np.dtype(ftype), count=1)
self.peak_energy = peakenergy
logger.debug('... read EP simulation data ...')
- reporttimes = np.arange(reportstart, duration+reportstep-(duration%reportstep), reportstep)
+ reporttimes = np.arange(
+ reportstart,
+ duration + reportstep - (duration % reportstep),
+ reportstep,
+ )
nrptsteps = len(reporttimes)
- statsN = nrptsteps
- if statsflag in [StatisticsType.Maximum, StatisticsType.Minimum, StatisticsType.Range]:
+ statsN = nrptsteps # noqa: N806, F841
+ if statsflag in [ # noqa: PLR6201
+ StatisticsType.Maximum,
+ StatisticsType.Minimum,
+ StatisticsType.Range,
+ ]:
nrptsteps = 1
reporttimes = [reportstart + reportstep]
self.num_periods = nrptsteps
@@ -2829,96 +3702,181 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
self.save_network_desc_line('link_start', pd.Series(data=names[linkstart-1], index=linknames, copy=True))
self.save_network_desc_line('link_end', pd.Series(data=names[linkend-1], index=linknames, copy=True))
"""
-
-# type_list = 4*nnodes*['node'] + 8*nlinks*['link']
- name_list = nodenames*4 + linknames*8
- valuetype = nnodes*['demand']+nnodes*['head']+nnodes*['pressure']+nnodes*['quality'] + nlinks*['flow']+nlinks*['velocity']+nlinks*['headloss']+nlinks*['linkquality']+nlinks*['linkstatus']+nlinks*['linksetting']+nlinks*['reactionrate']+nlinks*['frictionfactor']
-
-# tuples = zip(type_list, valuetype, name_list)
+
+ # type_list = 4*nnodes*['node'] + 8*nlinks*['link']
+ name_list = nodenames * 4 + linknames * 8
+ valuetype = (
+ nnodes * ['demand']
+ + nnodes * ['head']
+ + nnodes * ['pressure']
+ + nnodes * ['quality']
+ + nlinks * ['flow']
+ + nlinks * ['velocity']
+ + nlinks * ['headloss']
+ + nlinks * ['linkquality']
+ + nlinks * ['linkstatus']
+ + nlinks * ['linksetting']
+ + nlinks * ['reactionrate']
+ + nlinks * ['frictionfactor']
+ )
+
+ # tuples = zip(type_list, valuetype, name_list)
tuples = list(zip(valuetype, name_list))
-# tuples = [(valuetype[i], v) for i, v in enumerate(name_list)]
- index = pd.MultiIndex.from_tuples(tuples, names=['value','name'])
-
+ # tuples = [(valuetype[i], v) for i, v in enumerate(name_list)]
+ index = pd.MultiIndex.from_tuples(tuples, names=['value', 'name'])
+
try:
- data = np.fromfile(fin, dtype = np.dtype(ftype), count = (4*nnodes+8*nlinks)*nrptsteps)
+ data = np.fromfile(
+ fin,
+ dtype=np.dtype(ftype),
+ count=(4 * nnodes + 8 * nlinks) * nrptsteps,
+ )
except Exception as e:
- logger.exception('Failed to process file: %s', e)
-
- N = int(np.floor(len(data)/(4*nnodes+8*nlinks)))
- if N < nrptsteps:
+ logger.exception('Failed to process file: %s', e) # noqa: TRY401
+
+ N = int(np.floor(len(data) / (4 * nnodes + 8 * nlinks))) # noqa: N806
+ if nrptsteps > N:
t = reporttimes[N]
if convergence_error:
- logger.error('Simulation did not converge at time ' + self._get_time(t) + '.')
- raise RuntimeError('Simulation did not converge at time ' + self._get_time(t) + '.')
- else:
- data = data[0:N*(4*nnodes+8*nlinks)]
- data = np.reshape(data, (N, (4*nnodes+8*nlinks)))
+ logger.error(
+ 'Simulation did not converge at time ' # noqa: G003
+ + self._get_time(t)
+ + '.'
+ )
+ raise RuntimeError( # noqa: DOC501
+ 'Simulation did not converge at time '
+ + self._get_time(t)
+ + '.'
+ )
+ else: # noqa: RET506
+ data = data[0 : N * (4 * nnodes + 8 * nlinks)]
+ data = np.reshape(data, (N, (4 * nnodes + 8 * nlinks)))
reporttimes = reporttimes[0:N]
- warnings.warn('Simulation did not converge at time ' + self._get_time(t) + '.')
+ warnings.warn( # noqa: B028
+ 'Simulation did not converge at time '
+ + self._get_time(t)
+ + '.'
+ )
self.results.error_code = wntrfr.sim.results.ResultsStatus.error
else:
- data = np.reshape(data, (nrptsteps, (4*nnodes+8*nlinks)))
+ data = np.reshape(data, (nrptsteps, (4 * nnodes + 8 * nlinks)))
self.results.error_code = None
- df = pd.DataFrame(data.transpose(), index =index, columns = reporttimes)
- df = df.transpose()
-
+ df = pd.DataFrame(data.transpose(), index=index, columns=reporttimes) # noqa: PD901
+ df = df.transpose() # noqa: PD901
+
self.results.node = {}
self.results.link = {}
self.results.network_name = self.inp_file
-
+
if convert:
# Node Results
- self.results.node['demand'] = HydParam.Demand._to_si(self.flow_units, df['demand'])
- self.results.node['head'] = HydParam.HydraulicHead._to_si(self.flow_units, df['head'])
- self.results.node['pressure'] = HydParam.Pressure._to_si(self.flow_units, df['pressure'])
-
+ self.results.node['demand'] = HydParam.Demand._to_si( # noqa: SLF001
+ self.flow_units, df['demand']
+ )
+ self.results.node['head'] = HydParam.HydraulicHead._to_si( # noqa: SLF001
+ self.flow_units, df['head']
+ )
+ self.results.node['pressure'] = HydParam.Pressure._to_si( # noqa: SLF001
+ self.flow_units, df['pressure']
+ )
+
# Water Quality Results (node and link)
if self.quality_type is QualType.Chem:
- self.results.node['quality'] = QualParam.Concentration._to_si(self.flow_units, df['quality'], mass_units=self.mass_units)
- self.results.link['quality'] = QualParam.Concentration._to_si(self.flow_units, df['linkquality'], mass_units=self.mass_units)
+ self.results.node['quality'] = QualParam.Concentration._to_si( # noqa: SLF001
+ self.flow_units, df['quality'], mass_units=self.mass_units
+ )
+ self.results.link['quality'] = QualParam.Concentration._to_si( # noqa: SLF001
+ self.flow_units,
+ df['linkquality'],
+ mass_units=self.mass_units,
+ )
elif self.quality_type is QualType.Age:
- self.results.node['quality'] = QualParam.WaterAge._to_si(self.flow_units, df['quality'], mass_units=self.mass_units)
- self.results.link['quality'] = QualParam.WaterAge._to_si(self.flow_units, df['linkquality'], mass_units=self.mass_units)
+ self.results.node['quality'] = QualParam.WaterAge._to_si( # noqa: SLF001
+ self.flow_units, df['quality'], mass_units=self.mass_units
+ )
+ self.results.link['quality'] = QualParam.WaterAge._to_si( # noqa: SLF001
+ self.flow_units,
+ df['linkquality'],
+ mass_units=self.mass_units,
+ )
else:
self.results.node['quality'] = df['quality']
self.results.link['quality'] = df['linkquality']
-
+
# Link Results
- self.results.link['flowrate'] = HydParam.Flow._to_si(self.flow_units, df['flow'])
- self.results.link['velocity'] = HydParam.Velocity._to_si(self.flow_units, df['velocity'])
-
+ self.results.link['flowrate'] = HydParam.Flow._to_si( # noqa: SLF001
+ self.flow_units, df['flow']
+ )
+ self.results.link['velocity'] = HydParam.Velocity._to_si( # noqa: SLF001
+ self.flow_units, df['velocity']
+ )
+
headloss = np.array(df['headloss'])
- headloss[:, linktype < 2] = to_si(self.flow_units, headloss[:, linktype < 2], HydParam.HeadLoss) # Pipe or CV
- headloss[:, linktype >= 2] = to_si(self.flow_units, headloss[:, linktype >= 2], HydParam.Length) # Pump or Valve
- self.results.link["headloss"] = pd.DataFrame(data=headloss, columns=linknames, index=reporttimes)
-
+ headloss[:, linktype < 2] = to_si( # noqa: PLR2004
+ self.flow_units,
+ headloss[:, linktype < 2], # noqa: PLR2004
+ HydParam.HeadLoss,
+ ) # Pipe or CV
+ headloss[:, linktype >= 2] = to_si( # noqa: PLR2004
+ self.flow_units,
+ headloss[:, linktype >= 2], # noqa: PLR2004
+ HydParam.Length,
+ ) # Pump or Valve
+ self.results.link['headloss'] = pd.DataFrame(
+ data=headloss, columns=linknames, index=reporttimes
+ )
+
status = np.array(df['linkstatus'])
if self.convert_status:
- status[status <= 2] = 0
- status[status == 3] = 1
- status[status >= 5] = 1
- status[status == 4] = 2
- self.results.link['status'] = pd.DataFrame(data=status, columns=linknames, index=reporttimes)
-
+ status[status <= 2] = 0 # noqa: PLR2004
+ status[status == 3] = 1 # noqa: PLR2004
+ status[status >= 5] = 1 # noqa: PLR2004
+ status[status == 4] = 2 # noqa: PLR2004
+ self.results.link['status'] = pd.DataFrame(
+ data=status, columns=linknames, index=reporttimes
+ )
+
setting = np.array(df['linksetting'])
# pump setting is relative speed (unitless)
- setting[:, linktype == EN.PIPE] = to_si(self.flow_units, setting[:, linktype == EN.PIPE], HydParam.RoughnessCoeff,
- darcy_weisbach=darcy_weisbach)
- setting[:, linktype == EN.PRV] = to_si(self.flow_units, setting[:, linktype == EN.PRV], HydParam.Pressure)
- setting[:, linktype == EN.PSV] = to_si(self.flow_units, setting[:, linktype == EN.PSV], HydParam.Pressure)
- setting[:, linktype == EN.PBV] = to_si(self.flow_units, setting[:, linktype == EN.PBV], HydParam.Pressure)
- setting[:, linktype == EN.FCV] = to_si(self.flow_units, setting[:, linktype == EN.FCV], HydParam.Flow)
- self.results.link['setting'] = pd.DataFrame(data=setting, columns=linknames, index=reporttimes)
-
+ setting[:, linktype == EN.PIPE] = to_si(
+ self.flow_units,
+ setting[:, linktype == EN.PIPE],
+ HydParam.RoughnessCoeff,
+ darcy_weisbach=darcy_weisbach,
+ )
+ setting[:, linktype == EN.PRV] = to_si(
+ self.flow_units,
+ setting[:, linktype == EN.PRV],
+ HydParam.Pressure,
+ )
+ setting[:, linktype == EN.PSV] = to_si(
+ self.flow_units,
+ setting[:, linktype == EN.PSV],
+ HydParam.Pressure,
+ )
+ setting[:, linktype == EN.PBV] = to_si(
+ self.flow_units,
+ setting[:, linktype == EN.PBV],
+ HydParam.Pressure,
+ )
+ setting[:, linktype == EN.FCV] = to_si(
+ self.flow_units, setting[:, linktype == EN.FCV], HydParam.Flow
+ )
+ self.results.link['setting'] = pd.DataFrame(
+ data=setting, columns=linknames, index=reporttimes
+ )
+
self.results.link['friction_factor'] = df['frictionfactor']
- self.results.link['reaction_rate'] = QualParam.ReactionRate._to_si(self.flow_units, df['reactionrate'],self.mass_units)
+ self.results.link['reaction_rate'] = QualParam.ReactionRate._to_si( # noqa: SLF001
+ self.flow_units, df['reactionrate'], self.mass_units
+ )
else:
self.results.node['demand'] = df['demand']
self.results.node['head'] = df['head']
self.results.node['pressure'] = df['pressure']
self.results.node['quality'] = df['quality']
-
+
self.results.link['flowrate'] = df['flow']
self.results.link['headloss'] = df['headloss']
self.results.link['velocity'] = df['velocity']
@@ -2927,7 +3885,7 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
self.results.link['setting'] = df['linksetting']
self.results.link['friction_factor'] = df['frictionfactor']
self.results.link['reaction_rate'] = df['reactionrate']
-
+
logger.debug('... read epilog ...')
# Read the averages and then the number of periods for checks
averages = np.fromfile(fin, dtype=np.dtype(ftype), count=4)
@@ -2936,27 +3894,30 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert=
warnflag = np.fromfile(fin, dtype=np.int32, count=1)
magic2 = np.fromfile(fin, dtype=np.int32, count=1)
if magic1 != magic2:
- logger.critical('The magic number did not match -- binary incomplete or incorrectly read. If you believe this file IS complete, please try a different float type. Current type is "%s"',ftype)
- #print numperiods, warnflag, magic
+ logger.critical(
+ 'The magic number did not match -- binary incomplete or incorrectly read. If you believe this file IS complete, please try a different float type. Current type is "%s"',
+ ftype,
+ )
+ # print numperiods, warnflag, magic
if warnflag != 0:
logger.warning('Warnings were issued during simulation')
- self.finalize_save(magic1==magic2, warnflag)
-
+ self.finalize_save(magic1 == magic2, warnflag)
+
return self.results
-class NoSectionError(Exception):
+class NoSectionError(Exception): # noqa: D101
pass
-class _InpFileDifferHelper(object): # pragma: no cover
+class _InpFileDifferHelper: # pragma: no cover
def __init__(self, f):
- """
- Parameters
+ """Parameters
----------
f: str
- """
- self._f = open(f, 'r')
+
+ """ # noqa: D205
+ self._f = open(f) # noqa: PLW1514, PTH123, SIM115
self._num_lines = len(self._f.readlines())
self._end = self._f.tell()
self._f.seek(0)
@@ -2965,7 +3926,7 @@ def __init__(self, f):
def f(self):
return self._f
- def iter(self, start=0, stop=None, skip_section_headings=True):
+ def iter(self, start=0, stop=None, skip_section_headings=True): # noqa: FBT002
if stop is None:
stop = self._end
f = self.f
@@ -2984,8 +3945,7 @@ def iter(self, start=0, stop=None, skip_section_headings=True):
yield loc, line
def get_section(self, sec):
- """
- Parameters
+ """Parameters
----------
sec: str
The section
@@ -2996,13 +3956,13 @@ def get_section(self, sec):
The starting point in the file for sec
end: int
The ending point in the file for sec
-
- """
+
+ """ # noqa: D205
start = None
end = None
in_sec = False
for loc, line in self.iter(0, None, skip_section_headings=False):
- line = line.split(';')[0]
+ line = line.split(';')[0] # noqa: PLW2901
if sec in line:
start = loc
in_sec = True
@@ -3012,54 +3972,53 @@ def get_section(self, sec):
in_sec = False
break
if start is None:
- raise NoSectionError('Could not find section ' + sec)
+ raise NoSectionError('Could not find section ' + sec) # noqa: DOC501
if end is None:
end = self._end
return start, end
def contains_section(self, sec):
- """
- Parameters
+ """Parameters
----------
sec: str
- """
+
+ """ # noqa: D205
try:
self.get_section(sec)
- return True
+ return True # noqa: TRY300
except NoSectionError:
return False
def _convert_line(line): # pragma: no cover
- """
- Parameters
+ """Parameters
----------
line: str
Returns
-------
list
-
- """
+
+ """ # noqa: D205
line = line.upper().split()
tmp = []
for i in line:
if '.' in i:
try:
tmp.append(float(i))
- except:
+ except: # noqa: E722
tmp.append(i)
else:
try:
tmp.append(int(i))
- except:
+ except: # noqa: E722
tmp.append(i)
return tmp
-def _compare_lines(line1, line2, tol=1e-14): # pragma: no cover
- """
- Parameters
+# pragma: no cover
+def _compare_lines(line1, line2, tol=1e-14):
+ """Parameters
----------
line1: list of str
line2: list of str
@@ -3067,32 +4026,29 @@ def _compare_lines(line1, line2, tol=1e-14): # pragma: no cover
Returns
-------
bool
-
- """
+
+ """ # noqa: D205
if len(line1) != len(line2):
return False
for i, a in enumerate(line1):
b = line2[i]
- if isinstance(a, (int, float)):
- if a != b:
- return False
- elif isinstance(a, int) and isinstance(b, int):
+ if isinstance(a, (int, float)) or (
+ isinstance(a, int) and isinstance(b, int)
+ ):
if a != b:
return False
elif isinstance(a, (int, float)) and isinstance(b, (int, float)):
if abs(a - b) > tol:
return False
- else:
- if a != b:
- return False
+ elif a != b:
+ return False
return True
def _clean_line(wn, sec, line): # pragma: no cover
- """
- Parameters
+ """Parameters
----------
wn: wntrfr.network.WaterNetworkModel
sec: str
@@ -3101,10 +4057,10 @@ def _clean_line(wn, sec, line): # pragma: no cover
Returns
-------
new_list: list of str
-
- """
+
+ """ # noqa: D205
if sec == '[JUNCTIONS]':
- if len(line) == 4:
+ if len(line) == 4: # noqa: PLR2004
other = wn.options.hydraulic.pattern
if other is None:
other = 1
@@ -3116,9 +4072,8 @@ def _clean_line(wn, sec, line): # pragma: no cover
return line
-def _read_control_line(line, wn, flow_units, control_name):
- """
- Parameters
+def _read_control_line(line, wn, flow_units, control_name): # noqa: C901
+ """Parameters
----------
line: str
wn: wntrfr.network.WaterNetworkModel
@@ -3128,47 +4083,61 @@ def _read_control_line(line, wn, flow_units, control_name):
Returns
-------
control_obj: Control
-
- """
+
+ """ # noqa: D205
line = line.split(';')[0]
current = line.split()
if current == []:
- return
+ return None
link_name = current[1]
link = wn.get_link(link_name)
if current[5].upper() != 'TIME' and current[5].upper() != 'CLOCKTIME':
node_name = current[5]
current = [i.upper() for i in current]
- current[1] = link_name # don't capitalize the link name
+ current[1] = link_name # don't capitalize the link name
# Create the control action object
status = current[2].upper()
- if status == 'OPEN' or status == 'OPENED' or status == 'CLOSED' or status == 'ACTIVE':
+ if (
+ status == 'OPEN' # noqa: PLR1714
+ or status == 'OPENED'
+ or status == 'CLOSED'
+ or status == 'ACTIVE'
+ ):
setting = LinkStatus[status].value
action_obj = wntrfr.network.ControlAction(link, 'status', setting)
- else:
- if isinstance(link, wntrfr.network.Pump):
- action_obj = wntrfr.network.ControlAction(link, 'base_speed', float(current[2]))
- elif isinstance(link, wntrfr.network.Valve):
- if link.valve_type == 'PRV' or link.valve_type == 'PSV' or link.valve_type == 'PBV':
- setting = to_si(flow_units, float(current[2]), HydParam.Pressure)
- elif link.valve_type == 'FCV':
- setting = to_si(flow_units, float(current[2]), HydParam.Flow)
- elif link.valve_type == 'TCV':
- setting = float(current[2])
- elif link.valve_type == 'GPV':
- setting = current[2]
- else:
- raise ValueError('Unrecognized valve type {0} while parsing control {1}'.format(link.valve_type, line))
- action_obj = wntrfr.network.ControlAction(link, 'setting', setting)
+ elif isinstance(link, wntrfr.network.Pump):
+ action_obj = wntrfr.network.ControlAction(
+ link, 'base_speed', float(current[2])
+ )
+ elif isinstance(link, wntrfr.network.Valve):
+ if (
+ link.valve_type == 'PRV' # noqa: PLR1714
+ or link.valve_type == 'PSV'
+ or link.valve_type == 'PBV'
+ ):
+ setting = to_si(flow_units, float(current[2]), HydParam.Pressure)
+ elif link.valve_type == 'FCV':
+ setting = to_si(flow_units, float(current[2]), HydParam.Flow)
+ elif link.valve_type == 'TCV':
+ setting = float(current[2])
+ elif link.valve_type == 'GPV':
+ setting = current[2]
else:
- raise RuntimeError(('Links of type {0} can only have controls that change\n'.format(type(link))+
- 'the link status. Control: {0}'.format(line)))
+ raise ValueError( # noqa: DOC501, TRY003
+ f'Unrecognized valve type {link.valve_type} while parsing control {line}' # noqa: EM102
+ )
+ action_obj = wntrfr.network.ControlAction(link, 'setting', setting)
+ else:
+ raise RuntimeError( # noqa: DOC501
+ f'Links of type {type(link)} can only have controls that change\n' # noqa: ISC003
+ + f'the link status. Control: {line}'
+ )
# Create the control object
- #control_count += 1
- #control_name = 'control '+str(control_count)
+ # control_count += 1
+ # control_name = 'control '+str(control_count)
if 'TIME' not in current and 'CLOCKTIME' not in current:
threshold = None
if 'IF' in current:
@@ -3178,66 +4147,92 @@ def _read_control_line(line, wn, flow_units, control_name):
elif current[6] == 'BELOW':
oper = np.less
else:
- raise RuntimeError("The following control is not recognized: " + line)
+ raise RuntimeError( # noqa: DOC501
+ 'The following control is not recognized: ' + line
+ )
# OKAY - we are adding in the elevation. This is A PROBLEM
# IN THE INP WRITER. Now that we know, we can fix it, but
# if this changes, it will affect multiple pieces, just an
# FYI.
if node.node_type == 'Junction':
- threshold = to_si(flow_units,
- float(current[7]), HydParam.Pressure)# + node.elevation
- control_obj = Control._conditional_control(node, 'pressure', oper, threshold, action_obj, control_name)
+ threshold = to_si(
+ flow_units, float(current[7]), HydParam.Pressure
+ ) # + node.elevation
+ control_obj = Control._conditional_control( # noqa: SLF001
+ node, 'pressure', oper, threshold, action_obj, control_name
+ )
elif node.node_type == 'Tank':
- threshold = to_si(flow_units,
- float(current[7]), HydParam.HydraulicHead)# + node.elevation
- control_obj = Control._conditional_control(node, 'level', oper, threshold, action_obj, control_name)
+ threshold = to_si(
+ flow_units, float(current[7]), HydParam.HydraulicHead
+ ) # + node.elevation
+ control_obj = Control._conditional_control( # noqa: SLF001
+ node, 'level', oper, threshold, action_obj, control_name
+ )
else:
- raise RuntimeError("The following control is not recognized: " + line)
-# control_name = ''
-# for i in range(len(current)-1):
-# control_name = control_name + '/' + current[i]
-# control_name = control_name + '/' + str(round(threshold, 2))
- else:
- if 'CLOCKTIME' not in current: # at time
- if 'TIME' not in current:
- raise ValueError('Unrecognized line in inp file: {0}'.format(line))
-
+ raise RuntimeError('The following control is not recognized: ' + line) # noqa: DOC501
+ # control_name = ''
+ # for i in range(len(current)-1):
+ # control_name = control_name + '/' + current[i]
+ # control_name = control_name + '/' + str(round(threshold, 2))
+ elif 'CLOCKTIME' not in current: # at time
+ if 'TIME' not in current:
+ raise ValueError(f'Unrecognized line in inp file: {line}') # noqa: DOC501, EM102, TRY003
+
+ if ':' in current[5]:
+ run_at_time = int(_str_time_to_sec(current[5]))
+ else:
+ run_at_time = int(float(current[5]) * 3600)
+ control_obj = Control._time_control( # noqa: SLF001
+ wn,
+ run_at_time,
+ 'SIM_TIME',
+ False, # noqa: FBT003
+ action_obj,
+ control_name,
+ )
+ # control_name = ''
+ # for i in range(len(current)-1):
+ # control_name = control_name + '/' + current[i]
+ # control_name = control_name + '/' + str(run_at_time)
+ else: # at clocktime
+ if len(current) < 7: # noqa: PLR2004
if ':' in current[5]:
run_at_time = int(_str_time_to_sec(current[5]))
else:
- run_at_time = int(float(current[5])*3600)
- control_obj = Control._time_control(wn, run_at_time, 'SIM_TIME', False, action_obj, control_name)
-# control_name = ''
-# for i in range(len(current)-1):
-# control_name = control_name + '/' + current[i]
-# control_name = control_name + '/' + str(run_at_time)
- else: # at clocktime
- if len(current) < 7:
- if ':' in current[5]:
- run_at_time = int(_str_time_to_sec(current[5]))
- else:
- run_at_time = int(float(current[5])*3600)
- else:
- run_at_time = int(_clock_time_to_sec(current[5], current[6]))
- control_obj = Control._time_control(wn, run_at_time, 'CLOCK_TIME', True, action_obj, control_name)
-# control_name = ''
-# for i in range(len(current)-1):
-# control_name = control_name + '/' + current[i]
-# control_name = control_name + '/' + str(run_at_time)
+ run_at_time = int(float(current[5]) * 3600)
+ else:
+ run_at_time = int(_clock_time_to_sec(current[5], current[6]))
+ control_obj = Control._time_control( # noqa: SLF001
+ wn,
+ run_at_time,
+ 'CLOCK_TIME',
+ True, # noqa: FBT003
+ action_obj,
+ control_name,
+ )
+ # control_name = ''
+ # for i in range(len(current)-1):
+ # control_name = control_name + '/' + current[i]
+ # control_name = control_name + '/' + str(run_at_time)
return control_obj
-def _diff_inp_files(file1, file2=None, float_tol=1e-8, max_diff_lines_per_section=5,
- htmldiff_file='diff.html'): # pragma: no cover
- """
- Parameters
+def _diff_inp_files( # noqa: C901
+ file1,
+ file2=None,
+ float_tol=1e-8,
+ max_diff_lines_per_section=5,
+ htmldiff_file='diff.html',
+): # pragma: no cover
+ """Parameters
----------
file1: str
file2: str
float_tol: float
max_diff_lines_per_section: int
htmldiff_file: str
- """
+
+ """ # noqa: D205
wn = InpFile().read(file1)
f1 = _InpFileDifferHelper(file1)
if file2 is None:
@@ -3248,11 +4243,11 @@ def _diff_inp_files(file1, file2=None, float_tol=1e-8, max_diff_lines_per_sectio
different_lines_1 = []
different_lines_2 = []
n = 0
-
+
for section in _INP_SECTIONS:
if not f1.contains_section(section):
if f2.contains_section(section):
- print('\tfile1 does not contain section {0} but file2 does.'.format(section))
+ print(f'\tfile1 does not contain section {section} but file2 does.') # noqa: T201
continue
start1, stop1 = f1.get_section(section)
start2, stop2 = f2.get_section(section)
@@ -3272,7 +4267,7 @@ def _diff_inp_files(file1, file2=None, float_tol=1e-8, max_diff_lines_per_sectio
tmp_line = line1
label = tmp_label
else:
- tmp_line += " " + " ".join(line1.split()[1:])
+ tmp_line += ' ' + ' '.join(line1.split()[1:])
if tmp_line is not None:
new_lines_1.append((tmp_loc, tmp_line))
label = None
@@ -3287,7 +4282,7 @@ def _diff_inp_files(file1, file2=None, float_tol=1e-8, max_diff_lines_per_sectio
tmp_line = line2
label = tmp_label
else:
- tmp_line += " " + " ".join(line2.split()[1:])
+ tmp_line += ' ' + ' '.join(line2.split()[1:])
if tmp_line is not None:
new_lines_2.append((tmp_loc, tmp_line))
else:
@@ -3301,53 +4296,53 @@ def _diff_inp_files(file1, file2=None, float_tol=1e-8, max_diff_lines_per_sectio
assert len(different_lines_1) == len(different_lines_2)
n1 = 0
n2 = 0
- for loc1, line1 in new_lines_1:
+ for loc1, line1 in new_lines_1: # noqa: B007
different_lines_1.append(line1)
n1 += 1
- for loc2, line2 in new_lines_2:
+ for loc2, line2 in new_lines_2: # noqa: B007
different_lines_2.append(line2)
n2 += 1
if n1 > n2:
n = n1 - n2
- for i in range(n):
- different_lines_2.append("")
+ for i in range(n): # noqa: B007
+ different_lines_2.append('') # noqa: PERF401
elif n2 > n1:
n = n2 - n1
- for i in range(n):
- different_lines_1.append("")
+ for i in range(n): # noqa: B007
+ different_lines_1.append('') # noqa: PERF401
else:
- raise RuntimeError('Unexpected')
+ raise RuntimeError('Unexpected') # noqa: DOC501, EM101
continue
-
+
section_line_counter = 0
f2_iter = iter(new_lines_2)
- for loc1, line1 in new_lines_1:
+ for loc1, line1 in new_lines_1: # noqa: B007
orig_line_1 = line1
loc2, line2 = next(f2_iter)
orig_line_2 = line2
- line1 = _convert_line(line1)
+ line1 = _convert_line(line1) # noqa: PLW2901
line2 = _convert_line(line2)
- line1 = _clean_line(wn, section, line1)
+ line1 = _clean_line(wn, section, line1) # noqa: PLW2901
line2 = _clean_line(wn, section, line2)
if not _compare_lines(line1, line2, tol=float_tol):
if section_line_counter < max_diff_lines_per_section:
- section_line_counter = section_line_counter+1
+ section_line_counter = section_line_counter + 1 # noqa: PLR6104
else:
break
different_lines_1.append(orig_line_1)
different_lines_2.append(orig_line_2)
-
- if len(different_lines_1) < 200: # If lines < 200 use difflib
+
+ if len(different_lines_1) < 200: # If lines < 200 use difflib # noqa: PLR2004
differ = difflib.HtmlDiff()
html_diff = differ.make_file(different_lines_1, different_lines_2)
- else: # otherwise, create a simple html file
- differ_df = pd.DataFrame([different_lines_1, different_lines_2],
- index=[file1, file2]).transpose()
+ else: # otherwise, create a simple html file
+ differ_df = pd.DataFrame(
+ [different_lines_1, different_lines_2], index=[file1, file2]
+ ).transpose()
html_diff = differ_df.to_html()
-
- g = open(htmldiff_file, 'w')
+
+ g = open(htmldiff_file, 'w') # noqa: PLW1514, PTH123, SIM115
g.write(html_diff)
g.close()
-
- return n
+ return n
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/epanet/toolkit.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/epanet/toolkit.py
index 67ef6f855..c779ffadb 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/epanet/toolkit.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/epanet/toolkit.py
@@ -1,79 +1,95 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed May 26 16:11:36 2021
+"""Created on Wed May 26 16:11:36 2021
@author: snaeimi
-"""
-import wntrfr.epanet.toolkit
-import numpy as np
+""" # noqa: CPY001, D400
+
import ctypes
-import os, sys
-from pkg_resources import resource_filename
+import logging
+import os
import platform
+import sys
+
+import numpy as np
+import wntrfr.epanet.toolkit
+from pkg_resources import resource_filename
-import logging
logger = logging.getLogger(__name__)
-class EpanetException(Exception):
+
+class EpanetException(Exception): # noqa: N818, D101
pass
-class ENepanet(wntrfr.epanet.toolkit.ENepanet):
- def __init__(self, inpfile='', rptfile='', binfile='', changed_epanet=False, version=2.2):
- if changed_epanet==False or changed_epanet==True:
- self.changed_epanet=changed_epanet
+
+class ENepanet(wntrfr.epanet.toolkit.ENepanet): # noqa: D101
+ def __init__( # noqa: C901
+ self,
+ inpfile='',
+ rptfile='',
+ binfile='',
+ changed_epanet=False, # noqa: FBT002
+ version=2.2,
+ ):
+ if changed_epanet == False or changed_epanet == True: # noqa: E712, PLR1714
+ self.changed_epanet = changed_epanet
else:
- raise ValueError("changed_epanet must be a boolean value")
-
- if changed_epanet==False:
+ raise ValueError('changed_epanet must be a boolean value') # noqa: EM101, TRY003
+
+ if changed_epanet == False: # noqa: E712
super().__init__(inpfile, rptfile, binfile, version=version)
else:
- try:
+ try: # noqa: SIM105
super().__init__(inpfile, rptfile, binfile, version=version)
- except:
- pass # to add robustness for the time when for the WNTR
- #cannot load the umodified DLLs for any reason
-
- if float(version) != 2.2:
- raise ValueError("EPANET version must be 2.2 when using the changed version")
-
- elif float(version) == 2.2:
- libnames = ["epanet22_mod", "epanet22_win32_mod"]
- if "64" in platform.machine():
- libnames.insert(0, "epanet22_amd64_mod")
+ except: # noqa: S110, E722
+ pass # to add robustness for the time when for the WNTR
+ # cannot load the umodified DLLs for any reason
+
+ if float(version) != 2.2: # noqa: PLR2004
+ raise ValueError( # noqa: TRY003
+ 'EPANET version must be 2.2 when using the changed version' # noqa: EM101
+ )
+
+ elif float(version) == 2.2: # noqa: RET506, PLR2004
+ libnames = ['epanet22_mod', 'epanet22_win32_mod']
+ if '64' in platform.machine():
+ libnames.insert(0, 'epanet22_amd64_mod')
for lib in libnames:
try:
- if os.name in ["nt", "dos"]:
+ if os.name in ['nt', 'dos']: # noqa: PLR6201
libepanet = resource_filename(
- __name__, "Windows/%s.dll" % lib
+ __name__,
+ 'Windows/%s.dll' % lib, # noqa: UP031
)
self.ENlib = ctypes.windll.LoadLibrary(libepanet)
- elif sys.platform in ["darwin"]:
+ elif sys.platform == 'darwin':
libepanet = resource_filename(
- __name__, "Darwin/lib%s.dylib" % lib
+ __name__,
+ 'Darwin/lib%s.dylib' % lib, # noqa: UP031
)
self.ENlib = ctypes.cdll.LoadLibrary(libepanet)
else:
libepanet = resource_filename(
- __name__, "Linux/lib%s.so" % lib
+ __name__,
+ 'Linux/lib%s.so' % lib, # noqa: UP031
)
self.ENlib = ctypes.cdll.LoadLibrary(libepanet)
- return
- except Exception as E1:
+ return # noqa: TRY300
+ except Exception as E1: # noqa: PERF203
if lib == libnames[-1]:
- raise E1
- pass
+ raise E1 # noqa: TRY201
finally:
- if version >= 2.2 and '32' not in lib:
+ if version >= 2.2 and '32' not in lib: # noqa: PLR2004
self._project = ctypes.c_uint64()
- elif version >= 2.2:
+ elif version >= 2.2: # noqa: PLR2004
self._project = ctypes.c_uint32()
else:
- self._project = None
+ self._project = None
-
- def ENSetIgnoreFlag(self, ignore_flag=0):
- if abs(ignore_flag - np.round(ignore_flag))>0.00001 or ignore_flag<0:
- logger.error('ignore_flag must be int value and bigger than zero'+str(ignore_flag))
- flag=ctypes.c_int(int(ignore_flag))
- #print('++++++++++++++++++++++')
- #self.ENlib.ENEXTENDEDsetignoreflag(flag)
\ No newline at end of file
+ def ENSetIgnoreFlag(self, ignore_flag=0): # noqa: D102, N802, PLR6301
+ if abs(ignore_flag - np.round(ignore_flag)) > 0.00001 or ignore_flag < 0: # noqa: PLR2004
+ logger.error(
+ 'ignore_flag must be int value and bigger than zero' # noqa: G003
+ + str(ignore_flag)
+ )
+ flag = ctypes.c_int(int(ignore_flag)) # noqa: F841
+ # print('++++++++++++++++++++++')
+ # self.ENlib.ENEXTENDEDsetignoreflag(flag)
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/morph/link.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/morph/link.py
index acc1571bb..ae378486b 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/morph/link.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/morph/link.py
@@ -1,38 +1,45 @@
-"""
-The wntrfr.morph.link module contains functions to split/break pipes.
-"""
-import logging
+"""The wntrfr.morph.link module contains functions to split/break pipes.""" # noqa: CPY001, INP001
+
import copy
-from wntrfr.network.elements import Reservoir, Pipe
+import logging
+
+from wntrfr.network.elements import Pipe, Reservoir
logger = logging.getLogger(__name__)
-def split_pipe(wn, pipe_name_to_split, new_pipe_name, new_junction_name,
- add_pipe_at_end=True, split_at_point=0.5, return_copy=True):
- """
- Split a pipe by adding a junction and one new pipe segment.
-
- This function splits the original pipe into two pipes by adding a new
- junction and new pipe to the model.
- The updated model retains the original length of the pipe section.
- The split occurs at a user specified distance between the
- original start and end nodes of the pipe (in that direction).
- The new pipe can be added to either end of the original pipe.
-
+
+def split_pipe(
+ wn,
+ pipe_name_to_split,
+ new_pipe_name,
+ new_junction_name,
+ add_pipe_at_end=True, # noqa: FBT002
+ split_at_point=0.5,
+ return_copy=True, # noqa: FBT002
+):
+ """Split a pipe by adding a junction and one new pipe segment.
+
+ This function splits the original pipe into two pipes by adding a new
+ junction and new pipe to the model.
+ The updated model retains the original length of the pipe section.
+ The split occurs at a user specified distance between the
+ original start and end nodes of the pipe (in that direction).
+ The new pipe can be added to either end of the original pipe.
+
* The new junction has a base demand of 0 and the default demand pattern.
- The elevation and coordinates of the new junction are based on a linear
+ The elevation and coordinates of the new junction are based on a linear
interpolation between the end points of the original pipe.
-
- * The new pipe has the same diameter, roughness, minor loss,
- and base status of the original pipe.
+
+ * The new pipe has the same diameter, roughness, minor loss,
+ and base status of the original pipe.
* Check valves are not added to the new
pipe. Since the new pipe can be connected at either the start
or the end of the original pipe, the user can control if the split occurs before
- or after a check valve.
-
- * No controls are added to the new pipe; the original pipe keeps any controls.
-
+ or after a check valve.
+
+ * No controls are added to the new pipe; the original pipe keeps any controls.
+
Parameters
----------
wn: wntr WaterNetworkModel
@@ -44,58 +51,72 @@ def split_pipe(wn, pipe_name_to_split, new_pipe_name, new_junction_name,
new_junction_name: string
Name of the new junction to be added.
add_pipe_at_end: bool, optional
- If True, add the new pipe between the new node and the original end node.
+ If True, add the new pipe between the new node and the original end node.
If False, add the new pipe between the original start node and the new node.
split_at_point: float, optional
- Between 0 and 1, the position along the original pipe where the new
+ Between 0 and 1, the position along the original pipe where the new
junction will be located.
return_copy: bool, optional
If True, modify and return a copy of the WaterNetworkModel object.
If False, modify and return the original WaterNetworkModel object.
-
+
Returns
-------
wntr WaterNetworkModel
Water network model with split pipe
- """
- wn2 = _split_or_break_pipe(wn, pipe_name_to_split, new_pipe_name,
- [new_junction_name],
- add_pipe_at_end, split_at_point, 'SPLIT', return_copy)
-
- return wn2
-
-def break_pipe(wn, pipe_name_to_split, new_pipe_name, new_junction_name_old_pipe,
- new_junction_name_new_pipe, add_pipe_at_end=True,
- split_at_point=0.5, return_copy=True):
"""
- Break a pipe by adding a two unconnected junctions and one new pipe segment.
-
- This function splits the original pipe into two disconnected pipes by
- adding two new junctions and new pipe to the model.
- **This provides a true broken pipe -- i.e., there is no longer flow
- possible from one side of the break to the other. This is more likely to
- introduce non-convergable hydraulics than a simple split_pipe with a leak
+ wn2 = _split_or_break_pipe(
+ wn,
+ pipe_name_to_split,
+ new_pipe_name,
+ [new_junction_name],
+ add_pipe_at_end,
+ split_at_point,
+ 'SPLIT',
+ return_copy,
+ )
+
+ return wn2 # noqa: RET504
+
+
+def break_pipe(
+ wn,
+ pipe_name_to_split,
+ new_pipe_name,
+ new_junction_name_old_pipe,
+ new_junction_name_new_pipe,
+ add_pipe_at_end=True, # noqa: FBT002
+ split_at_point=0.5,
+ return_copy=True, # noqa: FBT002
+):
+ """Break a pipe by adding a two unconnected junctions and one new pipe segment.
+
+ This function splits the original pipe into two disconnected pipes by
+ adding two new junctions and new pipe to the model.
+ **This provides a true broken pipe -- i.e., there is no longer flow
+ possible from one side of the break to the other. This is more likely to
+ introduce non-convergable hydraulics than a simple split_pipe with a leak
added.**
- The updated model retains the original length of the pipe section.
- The split occurs at a user specified distance between the
- original start and end nodes of the pipe (in that direction).
- The new pipe can be added to either end of the original pipe.
-
+ The updated model retains the original length of the pipe section.
+ The split occurs at a user specified distance between the
+ original start and end nodes of the pipe (in that direction).
+ The new pipe can be added to either end of the original pipe.
+
* The new junction has a base demand of 0 and the default demand pattern.
- The elevation and coordinates of the new junction are based on a linear
+ The elevation and coordinates of the new junction are based on a linear
interpolation between the end points of the original pipe.
-
- * The new pipe has the same diameter, roughness, minor loss,
- and base status of the original pipe.
+
+ * The new pipe has the same diameter, roughness, minor loss,
+ and base status of the original pipe.
* Check valves are not added to the new
pipe. Since the new pipe can be connected at either the start
or the end of the original pipe, the user can control if the split occurs before
- or after a check valve.
-
- * No controls are added to the new pipe; the original pipe keeps any controls.
-
+ or after a check valve.
+
+ * No controls are added to the new pipe; the original pipe keeps any controls.
+
Parameters
----------
wn: wntr WaterNetworkModel
@@ -109,57 +130,72 @@ def break_pipe(wn, pipe_name_to_split, new_pipe_name, new_junction_name_old_pipe
new_junction_name_new_pipe: string
Name of the new junction to be added to the new pipe
add_pipe_at_end: bool, optional
- If True, add the new pipe at after the new junction. If False, add the
+ If True, add the new pipe at after the new junction. If False, add the
new pipe before the new junction
split_at_point: float, optional
- Relative position (value between 0 and 1) along the original pipe
+ Relative position (value between 0 and 1) along the original pipe
where the new junction will be located.
return_copy: bool, optional
If True, modify and return a copy of the WaterNetworkModel object.
If False, modify and return the original WaterNetworkModel object.
-
+
Returns
-------
wntr WaterNetworkModel
Water network model with pipe break
+
"""
- wn2 = _split_or_break_pipe(wn, pipe_name_to_split, new_pipe_name,
- [new_junction_name_old_pipe, new_junction_name_new_pipe],
- add_pipe_at_end, split_at_point, 'BREAK', return_copy)
-
- return wn2
+ wn2 = _split_or_break_pipe(
+ wn,
+ pipe_name_to_split,
+ new_pipe_name,
+ [new_junction_name_old_pipe, new_junction_name_new_pipe],
+ add_pipe_at_end,
+ split_at_point,
+ 'BREAK',
+ return_copy,
+ )
+
+ return wn2 # noqa: RET504
-def _split_or_break_pipe(wn, pipe_name_to_split, new_pipe_name,
- new_junction_names, add_pipe_at_end, split_at_point,
- flag, return_copy):
-
- if return_copy: # Get a copy of the WaterNetworkModel
+
+def _split_or_break_pipe( # noqa: C901
+ wn,
+ pipe_name_to_split,
+ new_pipe_name,
+ new_junction_names,
+ add_pipe_at_end,
+ split_at_point,
+ flag,
+ return_copy,
+):
+ if return_copy: # Get a copy of the WaterNetworkModel
wn2 = copy.deepcopy(wn)
else:
wn2 = wn
-
+
pipe = wn2.get_link(pipe_name_to_split)
-
+
# Do sanity checks
if not isinstance(pipe, Pipe):
- raise ValueError('You can only split pipes.')
+ raise ValueError('You can only split pipes.') # noqa: EM101, TRY003, TRY004
if split_at_point < 0 or split_at_point > 1:
- raise ValueError('split_at_point must be between 0 and 1')
- #Sina edited here
- #node_list = [node_name for node_name, node in wn2.nodes()]
- #link_list = [link_name for link_name, link in wn2.links()]
- #for new_junction_name in new_junction_names:
- #if new_junction_name in wn.node_name_list:
- #raise RuntimeError('The junction name you provided is already \
- #being used for another node.')
- #if new_pipe_name in wn.link_name_list:
- #raise RuntimeError('The new link name you provided is already being \
- #used for another link.')
+ raise ValueError('split_at_point must be between 0 and 1') # noqa: EM101, TRY003
+ # Sina edited here
+ # node_list = [node_name for node_name, node in wn2.nodes()]
+ # link_list = [link_name for link_name, link in wn2.links()]
+ # for new_junction_name in new_junction_names:
+ # if new_junction_name in wn.node_name_list:
+ # raise RuntimeError('The junction name you provided is already \
+ # being used for another node.')
+ # if new_pipe_name in wn.link_name_list:
+ # raise RuntimeError('The new link name you provided is already being \
+ # used for another link.')
# Get start and end node info
start_node = pipe.start_node
end_node = pipe.end_node
-
+
# calculate the new elevation
if isinstance(start_node, Reservoir):
junction_elevation = end_node.elevation
@@ -175,18 +211,25 @@ def _split_or_break_pipe(wn, pipe_name_to_split, new_pipe_name,
dx = pipe.end_node.coordinates[0] - x0
y0 = pipe.start_node.coordinates[1]
dy = pipe.end_node.coordinates[1] - y0
- junction_coordinates = (x0 + dx * split_at_point,
- y0 + dy * split_at_point)
+ junction_coordinates = (x0 + dx * split_at_point, y0 + dy * split_at_point)
# add the new junction
- #for new_junction_name in new_junction_names:
- wn2.add_junction(new_junction_names[0], base_demand=0.0,
- demand_pattern=None, elevation=junction_elevation,
- coordinates=junction_coordinates)
- if len(new_junction_names)==2:
- wn2.add_junction(new_junction_names[1], base_demand=0.0,
- demand_pattern=None, elevation=junction_elevation,
- coordinates=junction_coordinates)
+ # for new_junction_name in new_junction_names:
+ wn2.add_junction(
+ new_junction_names[0],
+ base_demand=0.0,
+ demand_pattern=None,
+ elevation=junction_elevation,
+ coordinates=junction_coordinates,
+ )
+ if len(new_junction_names) == 2: # noqa: PLR2004
+ wn2.add_junction(
+ new_junction_names[1],
+ base_demand=0.0,
+ demand_pattern=None,
+ elevation=junction_elevation,
+ coordinates=junction_coordinates,
+ )
original_length = pipe.length
@@ -196,24 +239,42 @@ def _split_or_break_pipe(wn, pipe_name_to_split, new_pipe_name,
elif flag == 'SPLIT':
j0 = new_junction_names[0]
j1 = new_junction_names[0]
-
+
if add_pipe_at_end:
- pipe.end_node = wn2.get_node(j0)
+ pipe.end_node = wn2.get_node(j0)
# add new pipe and change original length
- wn2.add_pipe(new_pipe_name, j1, end_node.name,
- original_length*(1-split_at_point), pipe.diameter,
- pipe.roughness, pipe.minor_loss, pipe.status, pipe.cv)
- pipe.length = original_length*split_at_point
- else: # add pipe at start
- pipe.start_node = wn2.get_node(j0)
+ wn2.add_pipe(
+ new_pipe_name,
+ j1,
+ end_node.name,
+ original_length * (1 - split_at_point),
+ pipe.diameter,
+ pipe.roughness,
+ pipe.minor_loss,
+ pipe.status,
+ pipe.cv,
+ )
+ pipe.length = original_length * split_at_point
+ else: # add pipe at start
+ pipe.start_node = wn2.get_node(j0)
# add new pipe and change original length
- wn2.add_pipe(new_pipe_name, start_node.name, j1,
- original_length*split_at_point, pipe.diameter,
- pipe.roughness, pipe.minor_loss, pipe.status, pipe.cv)
- pipe.length = original_length*(1-split_at_point)
-
+ wn2.add_pipe(
+ new_pipe_name,
+ start_node.name,
+ j1,
+ original_length * split_at_point,
+ pipe.diameter,
+ pipe.roughness,
+ pipe.minor_loss,
+ pipe.status,
+ pipe.cv,
+ )
+ pipe.length = original_length * (1 - split_at_point)
+
if pipe.cv:
- logger.warn('You are splitting a pipe with a check valve. The new \
- pipe will not have a check valve.')
-
- return wn2
+ logger.warning(
+ 'You are splitting a pipe with a check valve. The new \
+ pipe will not have a check valve.'
+ )
+
+ return wn2
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/__init__.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/__init__.py
index e69de29bb..b5142d879 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/__init__.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/__init__.py
@@ -0,0 +1 @@
+# noqa: CPY001, D104, N999
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/model.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/model.py
index c653401de..c669aedc9 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/model.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/network/model.py
@@ -1,5 +1,4 @@
-"""
-The wntrfr.network.model module includes methods to build a water network
+"""The wntrfr.network.model module includes methods to build a water network
model.
.. rubric:: Contents
@@ -13,156 +12,157 @@
NodeRegistry
LinkRegistry
-"""
+""" # noqa: CPY001, D205
+
import logging
-import pandas as pd
-import numpy as np
-from collections import OrderedDict
-from wntrfr.network.base import LinkStatus
-from wntrfr.network.elements import Pump
-from wntrfr.network.elements import Valve
-from ..epanet.io import InpFile
+import math
+from collections import OrderedDict
-from wntrfr.network import WaterNetworkModel
+import numpy as np
+from wntrfr.network import WaterNetworkModel
+from wntrfr.network.base import LinkStatus
+from wntrfr.network.elements import Pump, Valve
+from ..epanet.io import InpFile # noqa: TID252
logger = logging.getLogger(__name__)
class WaterNetworkModel(WaterNetworkModel):
- """
- Water network model class.
+ """Water network model class.
Parameters
- -------------------
+ ----------
inp_file_name: string (optional)
Directory and filename of EPANET inp file to load into the
WaterNetworkModel object.
+
"""
def __init__(self, inp_file_name=None):
-
super().__init__(inp_file_name)
self.breakage_link = {}
- self.expicit_leak = []
+ self.expicit_leak = []
-
- def updateWaterNetworkModelWithResult(self, result , registry, latest_simulation_time = None):
- """
- Updates Water Netwrok Model consistent with result model. must be only
+ def updateWaterNetworkModelWithResult( # noqa: C901, N802
+ self,
+ result,
+ registry, # noqa: ARG002
+ latest_simulation_time=None,
+ ):
+ """Updates Water Network Model consistent with result model. must be only
used with EPANET solver or just in case you really know what you are
doing.
Parameters
----------
- result : Water Netwrok Result
-
+ result : Water Network Result
+
latest_simulation_time : int
latest time of simulation(duration of the latest run), to be
checked with time in result. Defaylt None.
-
+
Raises
------
ValueError
- When simulation object is not of type EpanetSimulator
+ When simulation object is not of type EpanetSimulator
When latest simulation time is provided and is not consistent with
latest time in result.
When Tank level gets less than zero
-
-
+
+
Returns
-------
None.
- """
-
-
+ """ # noqa: D205, D401
max_time = result.node['head'].index.max()
- if latest_simulation_time == None:
+ if latest_simulation_time == None: # noqa: E711
latest_simulation_time = max_time
- else:
- if latest_simulation_time != max_time:
- raise ValueError('Provided LATEST SIMULATION TIME id not consistnt with teh latest time in RESULT')
-
- avilable_tank_name_list = set(self.tank_name_list).intersection(result.node['head'].columns)
+ elif latest_simulation_time != max_time:
+ raise ValueError( # noqa: TRY003
+ 'Provided LATEST SIMULATION TIME id not consistent with the latest time in RESULT' # noqa: EM101
+ )
+
+ avilable_tank_name_list = set(self.tank_name_list).intersection(
+ result.node['head'].columns
+ )
for tank_name in avilable_tank_name_list:
- #if tank_name not in result.node['head'].columns:
- #continue
+ # if tank_name not in result.node['head'].columns:
+ # continue
tank_level = None
- head = None
-
+ head = None
+
cur_node = self.get_node(tank_name)
- if cur_node._is_isolated:
+ if cur_node._is_isolated: # noqa: SLF001
continue
head = result.node['head'].loc[max_time, tank_name]
-
+
tank_level = head - cur_node.elevation
- if tank_level < 0:
- tank_level=0
-
- if tank_level < cur_node.min_level:
- tank_level = cur_node.min_level
-
+ tank_level = max(tank_level, 0)
+
+ tank_level = max(tank_level, cur_node.min_level)
+
if tank_level - cur_node.max_level > 0:
tank_level = cur_node.max_level
-
+
cur_node.init_level = abs(tank_level)
- cur_node._head = cur_node.elevation + tank_level
-
+ cur_node._head = cur_node.elevation + tank_level # noqa: SLF001
+
if tank_level < 0.0:
- logger.error('head= '+ repr(head))
- logger.error('elevation= '+ repr(cur_node.elevation))
- logger.error('tank_level= '+ repr(tank_level))
- raise ValueError('Tank Level for ' + tank_name + ' is less than zero')
-
+ logger.error('head= ' + repr(head)) # noqa: G003
+ logger.error('elevation= ' + repr(cur_node.elevation)) # noqa: G003
+ logger.error('tank_level= ' + repr(tank_level)) # noqa: G003
+ raise ValueError(
+ 'Tank Level for ' + tank_name + ' is less than zero'
+ )
+
for link_name in self.link_name_list:
link = self.get_link(link_name)
setting = None
- status = None
+ status = None
try:
setting = result.link['setting'].loc[max_time, link_name]
- status = result.link['status'].loc[max_time, link_name]
- except:
- #logger.error(link_name + ' exist in WaterNetwork but does not exist in result')
- #raise ValueError(link_name + ' exist in WaterNetwork but does not exist in result')
+ status = result.link['status'].loc[max_time, link_name]
+ except: # noqa: S112, E722
+ # logger.error(link_name + ' exist in WaterNetwork but does not exist in result')
+ # raise ValueError(link_name + ' exist in WaterNetwork but does not exist in result')
continue
-
+
if isinstance(link, Valve):
- link.settings = float(setting)
-
+ link.settings = float(setting)
+
elif isinstance(link, Pump):
link.setting.base_value = float(setting)
-
+
if status == 0:
- link._user_status = LinkStatus.Closed
-
+ link._user_status = LinkStatus.Closed # noqa: SLF001
+
elif status == 1:
- link._user_status = LinkStatus.Open
-
- elif status == 2:
- link._user_status = LinkStatus.Active
-
+ link._user_status = LinkStatus.Open # noqa: SLF001
+
+ elif status == 2: # noqa: PLR2004
+ link._user_status = LinkStatus.Active # noqa: SLF001
+
else:
- logger.error('Element type is: '+repr(type(link)))
- logger.error('Status is : ' + repr(status))
-
+ logger.error('Element type is: ' + repr(type(link))) # noqa: G003
+ logger.error('Status is : ' + repr(status)) # noqa: G003
+
def read_inpfile(self, filename):
- """
- Defines water network model components from an EPANET INP file
+ """Defines water network model components from an EPANET INP file
Parameters
----------
filename : string
Name of the INP file.
- """
+ """ # noqa: D400, D401
inpfile = InpFile()
inpfile.read(filename, wn=self)
self._inpfile = inpfile
def write_inpfile(self, filename, units=None):
- """
- Writes the current water network model to an EPANET INP file
+ """Writes the current water network model to an EPANET INP file
Parameters
----------
@@ -171,122 +171,178 @@ def write_inpfile(self, filename, units=None):
units : str, int or FlowUnits
Name of the units being written to the inp file.
- """
+ """ # noqa: D400, D401
if self._inpfile is None:
- logger.warning('Writing a minimal INP file without saved non-WNTR options (energy, etc.)')
+ logger.warning(
+ 'Writing a minimal INP file without saved non-WNTR options (energy, etc.)'
+ )
self._inpfile = InpFile()
if units is None:
units = self._options.hydraulic.en2_units
self._inpfile.write(filename, self, units=units)
-
- def implicitLeakToExplicitEMitter(self, registry):
+
+ def implicitLeakToExplicitEMitter(self, registry): # noqa: N802, D102
if len(self.expicit_leak) > 0:
- raise ValueError("Explicit leak is not reset")
-
+ raise ValueError('Explicit leak is not reset') # noqa: EM101, TRY003
+
registry.active_pipe_damages = OrderedDict()
for node_name in self.node_name_list:
node = self.get_node(node_name)
-
- if node._leak:
-
+
+ if node._leak: # noqa: SLF001
if node_name in self.expicit_leak:
- raise ValueError('The node name in already in leak memory: '+node_name)
-
- new_node_name = node_name+'-nn'
- new_coord = (node.coordinates[0]+1,node.coordinates[1]+1)
- self.add_junction(new_node_name, elevation=node.elevation ,coordinates=new_coord)
- new_node = self.get_node(new_node_name)
-
-
- new_pipe_name = node_name+'-elk'
- self.add_pipe(new_pipe_name, node_name, new_node_name, diameter=100, length=1, roughness=1000000, check_valve=True)
-
- cd = node.leak_area*(2)**0.5 #(m^3ps/(KPa^0.5))
- cd = cd/(0.145038**0.5) #(gpm/(Psi^0.5))
+ raise ValueError(
+ 'The node name in already in leak memory: ' + node_name
+ )
+
+ new_node_name = node_name + '-nn'
+ new_coord = (node.coordinates[0] + 1, node.coordinates[1] + 1)
+ self.add_junction(
+ new_node_name, elevation=node.elevation, coordinates=new_coord
+ )
+ new_node = self.get_node(new_node_name)
+
+ new_pipe_name = node_name + '-elk'
+ self.add_pipe(
+ new_pipe_name,
+ node_name,
+ new_node_name,
+ diameter=100,
+ length=1,
+ roughness=1000000,
+ check_valve=True,
+ )
+
+ cd = node.leak_area * (2) ** 0.5 # (m^3ps/(KPa^0.5))
+ cd = cd / (0.145038**0.5) # (gpm/(Psi^0.5)) # noqa: PLR6104
# When writing to emitter, function from_si changes m^3ps to GPM
-
- new_node._emitter_coefficient = cd
-
- if node.demand_timeseries_list[0].base_value > 0.001:
- raise ValueError('leak node has demand: '+node_name)
- temp={'node_name':node_name, 'method':'emitter', 'element1':new_pipe_name, 'element2':new_node_name, 'attr1':cd}
+
+ new_node._emitter_coefficient = cd # noqa: SLF001
+
+ if node.demand_timeseries_list[0].base_value > 0.001: # noqa: PLR2004
+ raise ValueError('leak node has demand: ' + node_name)
+ temp = {
+ 'node_name': node_name,
+ 'method': 'emitter',
+ 'element1': new_pipe_name,
+ 'element2': new_node_name,
+ 'attr1': cd,
+ }
self.expicit_leak.append(temp)
registry.explicit_leak_node[node_name] = new_node_name
- registry.active_pipe_damages.update({new_node_name:node_name})
-
- def implicitLeakToExplicitReservoir(self, registry):
+ registry.active_pipe_damages.update({new_node_name: node_name})
+
+ def implicitLeakToExplicitReservoir(self, registry): # noqa: N802, D102
if len(self.expicit_leak) > 0:
- raise ValueError("Explicit leak is not reset")
+ raise ValueError('Explicit leak is not reset') # noqa: EM101, TRY003
registry.active_pipe_damages = OrderedDict()
for node_name in self.node_name_list:
node = self.get_node(node_name)
-
- if node._leak:
-
+
+ if node._leak: # noqa: SLF001
if node_name in self.expicit_leak:
- raise ValueError('The node name in already in leak memory: '+node_name)
-
- new_node_name = node_name+'_nn'
- new_coord = (node.coordinates[0]+1,node.coordinates[1]+1)
- self.add_reservoir(new_node_name, base_head = node.elevation ,coordinates=new_coord)
-
- new_pipe_name = node_name+'-rlk'
- diameter = np.sqrt(node.leak_area*4/3.14)
- self.add_pipe(new_pipe_name, node_name, new_node_name, diameter=diameter, length=1, roughness=1000000, minor_loss = 1, check_valve=True)
-
- if node.demand_timeseries_list[0].base_value>0.001:
- raise ValueError('leak node has demand: '+node_name)
- temp={'node_name':node_name, 'method':'reservoir', 'element1':new_pipe_name, 'element2':new_node_name}
+ raise ValueError(
+ 'The node name in already in leak memory: ' + node_name
+ )
+
+ new_node_name = node_name + '_nn'
+ new_coord = (node.coordinates[0] + 1, node.coordinates[1] + 1)
+ self.add_reservoir(
+ new_node_name, base_head=node.elevation, coordinates=new_coord
+ )
+
+ new_pipe_name = node_name + '-rlk'
+ diameter = np.sqrt(node.leak_area * 4 / math.pi)
+ self.add_pipe(
+ new_pipe_name,
+ node_name,
+ new_node_name,
+ diameter=diameter,
+ length=1,
+ roughness=1000000,
+ minor_loss=1,
+ check_valve=True,
+ )
+
+ if node.demand_timeseries_list[0].base_value > 0.001: # noqa: PLR2004
+ raise ValueError('leak node has demand: ' + node_name)
+ temp = {
+ 'node_name': node_name,
+ 'method': 'reservoir',
+ 'element1': new_pipe_name,
+ 'element2': new_node_name,
+ }
self.expicit_leak.append(temp)
registry.explicit_leak_node[node_name] = new_node_name
- registry.active_pipe_damages.update({new_node_name:node_name})
-
- def resetExplicitLeak(self):
-
- for data in self.expicit_leak:
+ registry.active_pipe_damages.update({new_node_name: node_name})
+
+ def resetExplicitLeak(self): # noqa: N802, D102
+ for data in self.expicit_leak:
new_pipe_name = data['element1']
new_node_name = data['element2']
-
+
self.remove_link(new_pipe_name, force=True)
- self.get_node(new_node_name)._emitter_coefficient=None
+ self.get_node(new_node_name)._emitter_coefficient = None # noqa: SLF001
self.remove_node(new_node_name, force=True)
-
- self.expicit_leak = []
-
- def linkBreackage(self, registry):
+
+ self.expicit_leak = []
+
+ def linkBreackage(self, registry): # noqa: N802, D102
if len(self.breakage_link) > 0:
- raise ValueError("Breakckage is not unliked")
-
+ raise ValueError('Breakckage is not unliked') # noqa: EM101, TRY003
+
self.breakage_link = {}
pipe_damage_table = registry.getDamageData('PIPE')
- broken_pipe_damage_table = pipe_damage_table[pipe_damage_table['damage_type']=='break']
-
- for damage_node, row in broken_pipe_damage_table.iterrows():
- if registry.getPipeDamageAttribute('repair',damage_node)==True:
+ broken_pipe_damage_table = pipe_damage_table[
+ pipe_damage_table['damage_type'] == 'break'
+ ]
+
+ for damage_node, row in broken_pipe_damage_table.iterrows(): # noqa: B007
+ if registry.getPipeDamageAttribute('repair', damage_node) == True: # noqa: E712
continue
- pipe_A, pipe_B, orginal_pipe, node_A, node_B = registry.getBreakData(damage_node)
-
- pipe_name_list = self.pipe_name_list
+ pipe_A, pipe_B, orginal_pipe, node_A, node_B = registry.getBreakData( # noqa: F841, N806
+ damage_node
+ )
+
+ pipe_name_list = self.pipe_name_list
junction_name_list = self.junction_name_list
-
- iPipe_A_in = pipe_A in pipe_name_list
- iPipe_B_in = pipe_B in pipe_name_list
- iNode_A_in = node_A in junction_name_list
- iNode_B_in = node_B in junction_name_list
-
+
+ iPipe_A_in = pipe_A in pipe_name_list # noqa: N806
+ iPipe_B_in = pipe_B in pipe_name_list # noqa: N806
+ iNode_A_in = node_A in junction_name_list # noqa: N806
+ iNode_B_in = node_B in junction_name_list # noqa: N806
+
if not iPipe_A_in or not iPipe_B_in or not iNode_A_in or not iNode_B_in:
if iPipe_A_in or iPipe_B_in or iNode_A_in or iNode_B_in:
- raise ValueError('The damage is partially removed?: '+repr(iPipe_A_in)+', '+repr(iPipe_B_in)+', '+repr(iNode_A_in)+', '+repr(iNode_B_in)+', '+repr(damage_node))
+ raise ValueError(
+ 'The damage is partially removed?: '
+ + repr(iPipe_A_in)
+ + ', '
+ + repr(iPipe_B_in)
+ + ', '
+ + repr(iNode_A_in)
+ + ', '
+ + repr(iNode_B_in)
+ + ', '
+ + repr(damage_node)
+ )
else:
node1 = self.get_link(pipe_A).start_node
node2 = self.get_link(pipe_B).end_node
-
- new_pipe_name = damage_node+'_BLP'
- self.add_pipe(new_pipe_name, node1.name, node2.name, length=1, diameter=1*2.54/100, roughness=100)
+
+ new_pipe_name = damage_node + '_BLP'
+ self.add_pipe(
+ new_pipe_name,
+ node1.name,
+ node2.name,
+ length=1,
+ diameter=1 * 2.54 / 100,
+ roughness=100,
+ )
self.breakage_link[damage_node] = new_pipe_name
-
- def unlinkBreackage(self):
- for damage_node, link_pipe_name in self.breakage_link.items():
+
+ def unlinkBreackage(self): # noqa: N802, D102
+ for damage_node, link_pipe_name in self.breakage_link.items(): # noqa: B007, PERF102
self.remove_link(link_pipe_name, force=True)
- self.breakage_link = {}
\ No newline at end of file
+ self.breakage_link = {}
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/__init__.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/__init__.py
index e69de29bb..b5142d879 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/__init__.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/__init__.py
@@ -0,0 +1 @@
+# noqa: CPY001, D104, N999
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/epanet.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/epanet.py
index 4d40a5906..c02c0ef18 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/epanet.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/epanet.py
@@ -1,30 +1,31 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Jun 1 17:09:25 2021
+"""Created on Tue Jun 1 17:09:25 2021
@author: snaeimi
-"""
-import logging
+""" # noqa: CPY001, D400
+
import itertools
+import logging
+import math
+from collections import OrderedDict
+
import numpy as np
import scipy.sparse.csr
-from collections import OrderedDict
-from wntrfr.sim.core import WaterNetworkSimulator
import wntrfr.epanet.io
-from ..epanet import toolkit
+from Report_Reading import Report_Reading
from wntrfr.network.io import write_inpfile
+from wntrfr.network.model import LinkStatus
+from wntrfr.sim.core import _get_csr_data_index # noqa: PLC2701
from wntrfr.sim.epanet import EpanetSimulator
from wntrfr.sim.network_isolation import check_for_isolated_junctions, get_long_size
-from wntrfr.sim.core import _get_csr_data_index
from wntrfr.utils.ordered_set import OrderedSet
-from wntrfr.network.model import LinkStatus
-from Report_Reading import Report_Reading
-logger = logging.getLogger(__name__)
+from ..epanet import toolkit # noqa: TID252
+
+logger = logging.getLogger(__name__)
+
class EpanetSimulator(EpanetSimulator):
- """
- Fast EPANET simulator class.
+ """Fast EPANET simulator class.
Use the EPANET DLL to run an INP file as-is, and read the results from the
binary output file. Multiple water quality simulations are still possible
@@ -41,8 +42,8 @@ class EpanetSimulator(EpanetSimulator):
wn : WaterNetworkModel
Water network model
mode: DD or PDD with default None value(read mode from InpFile, if there is no mode
- provided in inpdile wither, it will be DD) If there is a condlict between mode in
- the class aregument and inpfile, the agument will supperseed the InpFile
+ provided in inpdile either, it will be DD) If there is a conflict between mode in
+ the class argument and inpfile, the augment will supersede the InpFile
reader : wntrfr.epanet.io.BinFile derived object
Defaults to None, which will create a new wntrfr.epanet.io.BinFile object with
the results_types specified as an init option. Otherwise, a fully
@@ -56,70 +57,88 @@ class EpanetSimulator(EpanetSimulator):
wntrfr.epanet.io.BinFile
"""
+
def __init__(self, wn):
+ super(EpanetSimulator, self).__init__(wn) # noqa: UP008
+
+ # Sina added this for time manipulate function
- super(EpanetSimulator, self).__init__(wn)
-
- #Sina added this for time manipulate function
-
self._initial_hydraulic_timestep = wn.options.time.hydraulic_timestep
self._initial_report_timestep = wn.options.time.hydraulic_timestep
-
-
- #Sina added this for isolation init
+
+ # Sina added this for isolation init
long_size = get_long_size()
- if long_size == 4:
+ if long_size == 4: # noqa: PLR2004
self._int_dtype = np.int32
else:
- assert long_size == 8
+ assert long_size == 8 # noqa: PLR2004
self._int_dtype = np.int64
self._link_name_to_id = OrderedDict()
self._link_id_to_name = OrderedDict()
self._node_name_to_id = OrderedDict()
self._node_id_to_name = OrderedDict()
self._initialize_name_id_maps()
- #sina end
-
-
- def manipulateTimeOrder(self, begin_time, end_time, change_time_step=False, min_correction_time_step=None):
+ # sina end
+
+ def manipulateTimeOrder( # noqa: N802, D102
+ self,
+ begin_time,
+ end_time,
+ change_time_step=False, # noqa: FBT002
+ min_correction_time_step=None,
+ ):
time_dif = end_time - begin_time
min_step_time = min_correction_time_step
- self._wn.options.time.duration = time_dif
+ self._wn.options.time.duration = time_dif
if time_dif <= 1:
self._wn.options.time.report_timestep = time_dif
- self._wn.options.time.pattern_start = begin_time
+ self._wn.options.time.pattern_start = begin_time
self._wn.options.time.start_clocktime = begin_time
if change_time_step:
- if min_correction_time_step == None:
- raise ValueError('if change_time_step is True, then min_correction_time_step must be provided')
-
- self._wn.options.time.hydraulic_timestep = self._initial_hydraulic_timestep
- self._wn.options.time.report_timestep = self._initial_report_timestep
- time_step = min(self._wn.options.time.hydraulic_timestep, self._wn.options.time.report_timestep)
- if min_step_time > time_step:
- min_step_time = time_step
- iFinished = False
- i=1
- logger.debug("time_dif= " + repr(time_dif))
-
+ if min_correction_time_step == None: # noqa: E711
+ raise ValueError( # noqa: TRY003
+ 'if change_time_step is True, then min_correction_time_step must be provided' # noqa: EM101
+ )
+
+ self._wn.options.time.hydraulic_timestep = (
+ self._initial_hydraulic_timestep
+ )
+ self._wn.options.time.report_timestep = self._initial_report_timestep
+ time_step = min(
+ self._wn.options.time.hydraulic_timestep,
+ self._wn.options.time.report_timestep,
+ )
+ min_step_time = min(min_step_time, time_step)
+ iFinished = False # noqa: N806
+ i = 1
+ logger.debug('time_dif= ' + repr(time_dif)) # noqa: G003
+
time_step_list = list(range(min_step_time, time_step, min_step_time))
time_step_list.append(time_step)
while i <= len(time_step_list):
if time_dif % time_step_list[-i] == 0:
new_time_step = time_step_list[-i]
- iFinished = True
+ iFinished = True # noqa: N806
break
- elif i == len(time_step_list):
- raise("There was no time check when creating time event?")
+ elif i == len(time_step_list): # noqa: RET508
+ raise ('There was no time check when creating time event?') # noqa: B016
i += 1
- if iFinished==False:
- raise RuntimeError("no timestep is found")
+ if iFinished == False: # noqa: E712
+ raise RuntimeError('no timestep is found') # noqa: EM101, TRY003
self._wn.options.time.report_timestep = new_time_step
-
- def run_sim(self, file_prefix='temp', save_hyd=False, use_hyd=False, hydfile=None,
- version=2.2, convergence_error=False, start_time=None, iModified=True):
- """
- Run the EPANET simulator.
+
+ def run_sim( # noqa: C901
+ self,
+ file_prefix='temp',
+ save_hyd=False, # noqa: FBT002
+ use_hyd=False, # noqa: FBT002
+ hydfile=None,
+ version=2.2,
+ convergence_error=False, # noqa: FBT002, ARG002
+ start_time=None,
+ iModified=True, # noqa: FBT002, N803
+ ):
+ """Run the EPANET simulator.
Runs the EPANET simulator through the compiled toolkit DLL. Can use/save hydraulics
to allow for separate WQ runs.
@@ -136,23 +155,28 @@ def run_sim(self, file_prefix='temp', save_hyd=False, use_hyd=False, hydfile=Non
Optionally specify a filename for the hydraulics file other than the `file_prefix`
"""
- solver_parameters_list = [(1,10, 0), (10, 100, 0), (10,100, 0.01)]
- #solver_parameters_list = [(10,100, 0.01), (10, 100, 0), (1,10, 0)]
- #balanced_system = False
- run_successful= False
+ solver_parameters_list = [(1, 10, 0), (10, 100, 0), (10, 100, 0.01)]
+ # solver_parameters_list = [(10,100, 0.01), (10, 100, 0), (1,10, 0)]
+ # balanced_system = False
+ run_successful = False
i = 0
for solver_parameter in solver_parameters_list:
i += 1
- print(solver_parameter)
+ print(solver_parameter) # noqa: T201
self._wn.options.hydraulic.checkfreq = solver_parameter[0]
- self._wn.options.hydraulic.maxcheck = solver_parameter[1]
+ self._wn.options.hydraulic.maxcheck = solver_parameter[1]
self._wn.options.hydraulic.damplimit = solver_parameter[2]
self._wn.options.hydraulic.unbalanced_value = 100
inpfile = file_prefix + '.inp'
- write_inpfile(self._wn, inpfile, units=self._wn.options.hydraulic.inpfile_units, version=version)
+ write_inpfile(
+ self._wn,
+ inpfile,
+ units=self._wn.options.hydraulic.inpfile_units,
+ version=version,
+ )
- enData = toolkit.ENepanet(changed_epanet=iModified, version=version)
+ enData = toolkit.ENepanet(changed_epanet=iModified, version=version) # noqa: N806
rptfile = file_prefix + '.rpt'
outfile = file_prefix + '.bin'
if hydfile is None:
@@ -162,27 +186,26 @@ def run_sim(self, file_prefix='temp', save_hyd=False, use_hyd=False, hydfile=Non
enData.ENusehydfile(hydfile)
logger.debug('Loaded hydraulics')
else:
-
try:
enData.ENsolveH()
except Exception as err:
enData.ENclose()
if err.args[0] == 'EPANET Error 110':
- print(enData.errcode)
- run_successful= False
+ print(enData.errcode) # noqa: T201
+ run_successful = False
if i < len(solver_parameters_list):
continue
- else:
- raise err
+ else: # noqa: RET507
+ raise err # noqa: TRY201
else:
- raise err
+ raise err # noqa: TRY201
else:
run_successful = True
logger.debug('Solved hydraulics')
if save_hyd:
enData.ENsavehydfile(hydfile)
logger.debug('Saved hydraulics')
-
+
try:
enData.ENsolveQ()
logger.debug('Solved quality')
@@ -190,37 +213,41 @@ def run_sim(self, file_prefix='temp', save_hyd=False, use_hyd=False, hydfile=Non
logger.debug('Ran quality')
except Exception as err:
enData.ENclose()
- raise err
+ raise err # noqa: TRY201
enData.ENclose()
logger.debug('Completed run')
result_data = self.reader.read(outfile)
-
+
self._updateResultStartTime(result_data, start_time)
-
+
report_data = Report_Reading(rptfile)
-
+
result_data.maximum_trial_time = []
-
+
for time in report_data.maximum_trial_time:
result_data.maximum_trial_time.append(time + start_time)
if run_successful:
break
return result_data, run_successful
-
- def _updateResultStartTime(self, result_data, start_time):
- for res_type, res in result_data.link.items():
- #result_data.link[res_type].index = res
- res.index = res.index + start_time
-
- for res_type, res in result_data.node.items():
- #result_data.link[res_type].index = res
- res.index = res.index + start_time
-
- def _get_isolated_junctions_and_links(self, prev_isolated_junctions, prev_isolated_links):
- self._prev_isolated_junctions=prev_isolated_junctions
- self._prev_isolated_links=prev_isolated_links
-
+
+ def _updateResultStartTime(self, result_data, start_time): # noqa: N802, PLR6301
+ for res_type, res in result_data.link.items(): # noqa: B007, PERF102
+ # result_data.link[res_type].index = res
+ res.index = res.index + start_time # noqa: PLR6104
+
+ for res_type, res in result_data.node.items(): # noqa: B007, PERF102
+ # result_data.link[res_type].index = res
+ res.index = res.index + start_time # noqa: PLR6104
+
+ def _get_isolated_junctions_and_links(
+ self,
+ prev_isolated_junctions,
+ prev_isolated_links,
+ ):
+ self._prev_isolated_junctions = prev_isolated_junctions
+ self._prev_isolated_links = prev_isolated_links
+
self._initialize_internal_graph()
logger_level = logger.getEffectiveLevel()
@@ -229,49 +256,58 @@ def _get_isolated_junctions_and_links(self, prev_isolated_junctions, prev_isolat
for j in self._prev_isolated_junctions:
try:
junction = self._wn.get_node(j)
- junction._is_isolated = False
- except:
+ junction._is_isolated = False # noqa: SLF001
+ except: # noqa: S110, PERF203, E722
pass
- for l in self._prev_isolated_links:
+ for l in self._prev_isolated_links: # noqa: E741
try:
link = self._wn.get_link(l)
- link._is_isolated = False
- except:
+ link._is_isolated = False # noqa: SLF001
+ except: # noqa: S110, PERF203, E722
pass
node_indicator = np.ones(self._wn.num_nodes, dtype=self._int_dtype)
- check_for_isolated_junctions(self._source_ids, node_indicator, self._internal_graph.indptr,
- self._internal_graph.indices, self._internal_graph.data,
- self._number_of_connections)
+ check_for_isolated_junctions(
+ self._source_ids,
+ node_indicator,
+ self._internal_graph.indptr,
+ self._internal_graph.indices,
+ self._internal_graph.data,
+ self._number_of_connections,
+ )
- isolated_junction_ids = [i for i in range(len(node_indicator)) if node_indicator[i] == 1]
+ isolated_junction_ids = [
+ i for i in range(len(node_indicator)) if node_indicator[i] == 1
+ ]
isolated_junctions = OrderedSet()
isolated_links = OrderedSet()
for j_id in isolated_junction_ids:
j = self._node_id_to_name[j_id]
junction = self._wn.get_node(j)
- junction._is_isolated = True
+ junction._is_isolated = True # noqa: SLF001
isolated_junctions.add(j)
connected_links = self._wn.get_links_for_node(j)
- for l in connected_links:
+ for l in connected_links: # noqa: E741
link = self._wn.get_link(l)
- link._is_isolated = True
+ link._is_isolated = True # noqa: SLF001
isolated_links.add(l)
if logger_level <= logging.DEBUG:
if len(isolated_junctions) > 0 or len(isolated_links) > 0:
- raise ValueError('isolated junctions: {0}'.format(isolated_junctions))
- logger.debug('isolated links: {0}'.format(isolated_links))
+ raise ValueError(f'isolated junctions: {isolated_junctions}') # noqa: EM102, TRY003
+ logger.debug(f'isolated links: {isolated_links}')
self._prev_isolated_junctions = isolated_junctions
self._prev_isolated_links = isolated_links
return isolated_junctions, isolated_links
-
- def _initialize_internal_graph(self):
+
+ def _initialize_internal_graph(self): # noqa: C901
n_links = OrderedDict()
rows = []
cols = []
vals = []
- for link_name, link in itertools.chain(self._wn.pipes(), self._wn.pumps(), self._wn.valves()):
+ for link_name, link in itertools.chain( # noqa: B007
+ self._wn.pipes(), self._wn.pumps(), self._wn.valves()
+ ):
from_node_name = link.start_node_name
to_node_name = link.end_node_name
from_node_id = self._node_name_to_id[from_node_name]
@@ -281,30 +317,34 @@ def _initialize_internal_graph(self):
n_links[(to_node_id, from_node_id)] = 0
n_links[(from_node_id, to_node_id)] += 1
n_links[(to_node_id, from_node_id)] += 1
- rows.append(from_node_id)
- cols.append(to_node_id)
+ rows.append(from_node_id) # noqa: FURB113
+ cols.append(to_node_id) # noqa: FURB113
rows.append(to_node_id)
cols.append(from_node_id)
if link.initial_status == wntrfr.network.LinkStatus.closed:
+ vals.append(0) # noqa: FURB113
vals.append(0)
- vals.append(0)
- #sina remove comment amrks
+ # sina remove comment amrks
elif link.link_type == 'Pipe':
if link.cv:
- vals.append(1)
+ vals.append(1) # noqa: FURB113
vals.append(0)
else:
- vals.append(1)
+ vals.append(1) # noqa: FURB113
vals.append(1)
elif link.link_type == 'Valve':
- if link.valve_type == 'PRV' or link.valve_type == 'PSV' or link.valve_type == 'FCV':
- vals.append(1)
+ if (
+ link.valve_type == 'PRV' # noqa: PLR1714
+ or link.valve_type == 'PSV'
+ or link.valve_type == 'FCV'
+ ):
+ vals.append(1) # noqa: FURB113
vals.append(0)
else:
- vals.append(1)
+ vals.append(1) # noqa: FURB113
vals.append(1)
else:
- vals.append(1)
+ vals.append(1) # noqa: FURB113
vals.append(1)
rows = np.array(rows, dtype=self._int_dtype)
@@ -313,34 +353,51 @@ def _initialize_internal_graph(self):
self._internal_graph = scipy.sparse.csr_matrix((vals, (rows, cols)))
ndx_map = OrderedDict()
- for link_name, link in self._wn.links():
+ for link_name, link in self._wn.links(): # noqa: B007
from_node_name = link.start_node_name
to_node_name = link.end_node_name
from_node_id = self._node_name_to_id[from_node_name]
to_node_id = self._node_name_to_id[to_node_name]
- ndx1 = _get_csr_data_index(self._internal_graph, from_node_id, to_node_id)
- ndx2 = _get_csr_data_index(self._internal_graph, to_node_id, from_node_id)
+ ndx1 = _get_csr_data_index(
+ self._internal_graph, from_node_id, to_node_id
+ )
+ ndx2 = _get_csr_data_index(
+ self._internal_graph, to_node_id, from_node_id
+ )
ndx_map[link] = (ndx1, ndx2)
self._map_link_to_internal_graph_data_ndx = ndx_map
self._number_of_connections = [0 for i in range(self._wn.num_nodes)]
- for node_id in self._node_id_to_name.keys():
- self._number_of_connections[node_id] = self._internal_graph.indptr[node_id+1] - self._internal_graph.indptr[node_id]
- self._number_of_connections = np.array(self._number_of_connections, dtype=self._int_dtype)
+ for node_id in self._node_id_to_name.keys(): # noqa: SIM118
+ self._number_of_connections[node_id] = (
+ self._internal_graph.indptr[node_id + 1]
+ - self._internal_graph.indptr[node_id]
+ )
+ self._number_of_connections = np.array(
+ self._number_of_connections, dtype=self._int_dtype
+ )
self._node_pairs_with_multiple_links = OrderedDict()
- for from_node_id, to_node_id in n_links.keys():
+ for from_node_id, to_node_id in n_links.keys(): # noqa: SIM118
if n_links[(from_node_id, to_node_id)] > 1:
- if (to_node_id, from_node_id) in self._node_pairs_with_multiple_links:
+ if (
+ to_node_id,
+ from_node_id,
+ ) in self._node_pairs_with_multiple_links:
continue
self._internal_graph[from_node_id, to_node_id] = 0
self._internal_graph[to_node_id, from_node_id] = 0
from_node_name = self._node_id_to_name[from_node_id]
to_node_name = self._node_id_to_name[to_node_id]
- tmp_list = self._node_pairs_with_multiple_links[(from_node_id, to_node_id)] = []
+ tmp_list = self._node_pairs_with_multiple_links[
+ (from_node_id, to_node_id)
+ ] = []
for link_name in self._wn.get_links_for_node(from_node_name):
link = self._wn.get_link(link_name)
- if link.start_node_name == to_node_name or link.end_node_name == to_node_name:
+ if (
+ link.start_node_name == to_node_name # noqa: PLR1714
+ or link.end_node_name == to_node_name
+ ):
tmp_list.append(link)
if link.initial_status != wntrfr.network.LinkStatus.closed:
ndx1, ndx2 = ndx_map[link]
@@ -349,29 +406,46 @@ def _initialize_internal_graph(self):
self._source_ids = []
for node_name, node in self._wn.tanks():
- if node.init_level - node.min_level < 0.01:
+ if node.init_level - node.min_level < 0.01: # noqa: PLR2004
continue
node_id = self._node_name_to_id[node_name]
self._source_ids.append(node_id)
-
- for node_name, node in self._wn.reservoirs():
- connected_link_name_list = self._wn.get_links_for_node(node_name) #this is to exclude the reservoirs that are for leak only
- out_going_link_list_name = [link_name for link_name in connected_link_name_list if self._wn.get_link(link_name).link_type != 'Pipe']
- out_going_pipe_list_name = [self._wn.get_link(pipe_name) for pipe_name in connected_link_name_list if self._wn.get_link(pipe_name).link_type == 'Pipe']
- out_going_pipe_list_name = [link.name for link in out_going_pipe_list_name if ( (link.cv == False and link.initial_status != LinkStatus.Closed) or (link.cv == True and link.end_node_name != node_name))]
+
+ for node_name, node in self._wn.reservoirs(): # noqa: B007
+ connected_link_name_list = self._wn.get_links_for_node(
+ node_name
+ ) # this is to exclude the reservoirs that are for leak only
+ out_going_link_list_name = [
+ link_name
+ for link_name in connected_link_name_list
+ if self._wn.get_link(link_name).link_type != 'Pipe'
+ ]
+ out_going_pipe_list_name = [
+ self._wn.get_link(pipe_name)
+ for pipe_name in connected_link_name_list
+ if self._wn.get_link(pipe_name).link_type == 'Pipe'
+ ]
+ out_going_pipe_list_name = [
+ link.name
+ for link in out_going_pipe_list_name
+ if (
+ (link.cv == False and link.initial_status != LinkStatus.Closed) # noqa: E712
+ or (link.cv == True and link.end_node_name != node_name) # noqa: E712
+ )
+ ]
out_going_link_list_name.extend(out_going_pipe_list_name)
if len(out_going_link_list_name) < 1:
continue
node_id = self._node_name_to_id[node_name]
self._source_ids.append(node_id)
self._source_ids = np.array(self._source_ids, dtype=self._int_dtype)
-
+
def _update_internal_graph(self):
data = self._internal_graph.data
ndx_map = self._map_link_to_internal_graph_data_ndx
for mgr in [self._presolve_controls, self._rules, self._postsolve_controls]:
for obj, attr in mgr.get_changes():
- if 'status' == attr:
+ if attr == 'status':
if obj.status == wntrfr.network.LinkStatus.closed:
ndx1, ndx2 = ndx_map[obj]
data[ndx1] = 0
@@ -381,7 +455,7 @@ def _update_internal_graph(self):
data[ndx1] = 1
data[ndx2] = 1
- for key, link_list in self._node_pairs_with_multiple_links.items():
+ for key, link_list in self._node_pairs_with_multiple_links.items(): # noqa: B007, PERF102
first_link = link_list[0]
ndx1, ndx2 = ndx_map[first_link]
data[ndx1] = 0
@@ -391,232 +465,321 @@ def _update_internal_graph(self):
ndx1, ndx2 = ndx_map[link]
data[ndx1] = 1
data[ndx2] = 1
-
+
def _initialize_name_id_maps(self):
n = 0
- for link_name, link in self._wn.links():
+ for link_name, link in self._wn.links(): # noqa: B007
self._link_name_to_id[link_name] = n
self._link_id_to_name[n] = link_name
- n += 1
+ n += 1 # noqa: SIM113
n = 0
- for node_name, node in self._wn.nodes():
+ for node_name, node in self._wn.nodes(): # noqa: B007
self._node_name_to_id[node_name] = n
self._node_id_to_name[n] = node_name
n += 1
-
- def now_temp(self, rr, isolated_link_list, alread_closed_pipes, _prev_isolated_junctions, already_done_nodes):
- check_nodes = [node_name for node_name in self._wn.junction_name_list if node_name not in _prev_isolated_junctions and node_name not in already_done_nodes]
+
+ def now_temp( # noqa: D102
+ self,
+ rr,
+ isolated_link_list,
+ alread_closed_pipes,
+ _prev_isolated_junctions,
+ already_done_nodes,
+ ):
+ check_nodes = [
+ node_name
+ for node_name in self._wn.junction_name_list
+ if node_name not in _prev_isolated_junctions
+ and node_name not in already_done_nodes
+ ]
junctions_pressure = (rr.node['pressure'][check_nodes]).iloc[-1]
- negative_junctions_pressure = (junctions_pressure[(junctions_pressure < -10)])
- negative_junctions_pressure = negative_junctions_pressure.sort_values(ascending = False)
+ negative_junctions_pressure = junctions_pressure[(junctions_pressure < -10)] # noqa: PLR2004
+ negative_junctions_pressure = negative_junctions_pressure.sort_values(
+ ascending=False
+ )
negative_junctions_name_list = negative_junctions_pressure.index.to_list()
- print('size= ' + repr(len(negative_junctions_name_list)) )
-
+ print('size= ' + repr(len(negative_junctions_name_list))) # noqa: T201
+
pipes_to_be_closed = []
closed_pipes = []
- #closed_nodes = []
+ # closed_nodes = []
ifinish = False
-
+
if len(negative_junctions_name_list) > 0:
i = 0
c = 0
- while i < np.ceil(len(negative_junctions_name_list)/len(negative_junctions_name_list)):
- #for i in np.arange(0, ,1 ):
- if i+c >= len(negative_junctions_name_list):
+ while i < np.ceil(
+ len(negative_junctions_name_list) / len(negative_junctions_name_list)
+ ):
+ # for i in np.arange(0, ,1 ):
+ if i + c >= len(negative_junctions_name_list):
break
- node_name = negative_junctions_name_list[i+c]
+ node_name = negative_junctions_name_list[i + c]
already_done_nodes.append(node_name)
- #for node_name in negative_junctions_name_list:
+ # for node_name in negative_junctions_name_list:
pipe_linked_to_node = self._wn.get_links_for_node(node_name)
- #get_checked_pipe_bool = self.check_pipes_sin(self, pipe_linked_to_node)
- checked_pipe_list = [checked_pipe for checked_pipe in pipe_linked_to_node if self._wn.get_link(checked_pipe).link_type == 'Pipe' and checked_pipe not in isolated_link_list and self._wn.get_link(checked_pipe).cv == False and self._wn.get_link(checked_pipe).initial_status == 1 and self._wn.get_link(checked_pipe).start_node.node_type == 'Junction' and self._wn.get_link(checked_pipe).end_node.node_type == 'Junction' and checked_pipe not in alread_closed_pipes]
+ # get_checked_pipe_bool = self.check_pipes_sin(self, pipe_linked_to_node)
+ checked_pipe_list = [
+ checked_pipe
+ for checked_pipe in pipe_linked_to_node
+ if self._wn.get_link(checked_pipe).link_type == 'Pipe'
+ and checked_pipe not in isolated_link_list
+ and self._wn.get_link(checked_pipe).cv == False # noqa: E712
+ and self._wn.get_link(checked_pipe).initial_status == 1
+ and self._wn.get_link(checked_pipe).start_node.node_type
+ == 'Junction'
+ and self._wn.get_link(checked_pipe).end_node.node_type
+ == 'Junction'
+ and checked_pipe not in alread_closed_pipes
+ ]
pipes_to_be_closed.extend(checked_pipe_list)
-
+
flag = False
for pipe_name in pipes_to_be_closed:
- #pipe = self.wn.get_link(pipe_name)
+ # pipe = self.wn.get_link(pipe_name)
flow = rr.link['flowrate'][pipe_name].iloc[-1]
-
- if abs(flow) > 0.01:
+
+ if abs(flow) > 0.01: # noqa: PLR2004
flag = True
- #pipe.initial_status = LinkStatus(0)
+ # pipe.initial_status = LinkStatus(0)
closed_pipes.append(pipe_name)
if not flag:
- i = i - 1
- c = c + 1
- i = i + 1
+ i = i - 1 # noqa: PLR6104
+ c = c + 1 # noqa: PLR6104
+ i = i + 1 # noqa: PLR6104
else:
- ifinish = True
+ ifinish = True
return closed_pipes, already_done_nodes, ifinish
-
- def alterPipeKmNNN(self, rr, isolated_link_list, _prev_isolated_junctions, flow_criteria, negative_pressure_limit):
- #t1 = time.time()
-
- closed_pipes={}
-
- check_nodes = [node_name for node_name in self._wn.junction_name_list if node_name not in _prev_isolated_junctions] #not isolated junctions
- junctions_pressure = (rr.node['pressure'][check_nodes]).iloc[-1] #latest pressure result for not-isolated junctions
- negative_junctions_pressure = (junctions_pressure[(junctions_pressure < negative_pressure_limit)]) #not-isolated junctions that have pressure less than specified amount
-
- negative_junctions_pressure = negative_junctions_pressure.sort_values(ascending = False)
+
+ def alterPipeKmNNN( # noqa: N802, D102
+ self,
+ rr,
+ isolated_link_list,
+ _prev_isolated_junctions,
+ flow_criteria,
+ negative_pressure_limit,
+ ):
+ # t1 = time.time()
+
+ closed_pipes = {}
+
+ check_nodes = [
+ node_name
+ for node_name in self._wn.junction_name_list
+ if node_name not in _prev_isolated_junctions
+ ] # not isolated junctions
+ junctions_pressure = (rr.node['pressure'][check_nodes]).iloc[
+ -1
+ ] # latest pressure result for not-isolated junctions
+ negative_junctions_pressure = junctions_pressure[
+ (junctions_pressure < negative_pressure_limit)
+ ] # not-isolated junctions that have pressure less than specified amount
+
+ negative_junctions_pressure = negative_junctions_pressure.sort_values(
+ ascending=False
+ )
negative_junctions_name_list = negative_junctions_pressure.index.to_list()
-
+
last_flow_row = rr.link['flowrate'].iloc[-1]
-
+
pipe_found = False
- while pipe_found == False:
+ while pipe_found == False: # noqa: E712
if len(negative_junctions_name_list) == 0:
ifinish = True
return closed_pipes, ifinish
-
+
pipe_name_list = []
- pipe_name_list_temp = self._wn.get_links_for_node(negative_junctions_name_list[-1]) #new: the most negative
+ pipe_name_list_temp = self._wn.get_links_for_node(
+ negative_junctions_name_list[-1]
+ ) # new: the most negative
pipe_name_list.extend(pipe_name_list_temp)
-
+
pipe_name_list = set(pipe_name_list) - set(isolated_link_list)
- pipe_name_list = [pipe_name for pipe_name in pipe_name_list if pipe_name in self._wn.pipe_name_list]
- most_recent_flow_for_pipes = last_flow_row[pipe_name_list]
+ pipe_name_list = [
+ pipe_name
+ for pipe_name in pipe_name_list
+ if pipe_name in self._wn.pipe_name_list
+ ]
+ most_recent_flow_for_pipes = last_flow_row[pipe_name_list]
abs_most_recent_flow_for_pipes = most_recent_flow_for_pipes.abs()
- abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes[abs_most_recent_flow_for_pipes >= flow_criteria]
-
+ abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes[
+ abs_most_recent_flow_for_pipes >= flow_criteria
+ ]
+
if len(abs_most_recent_flow_for_pipes) == 0:
- negative_junctions_pressure.drop(negative_junctions_name_list[-1], inplace=True)
- negative_junctions_name_list = negative_junctions_pressure.index.to_list()
+ negative_junctions_pressure.drop(
+ negative_junctions_name_list[-1],
+ inplace=True, # noqa: PD002
+ )
+ negative_junctions_name_list = (
+ negative_junctions_pressure.index.to_list()
+ )
else:
pipe_found = True
ifinish = False
- abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes.sort_values(ascending = False)
- biggest_flow_pipe_name = abs_most_recent_flow_for_pipes.index[0]
- biggest_flow_pipe_abs_flow = abs_most_recent_flow_for_pipes.iloc[0]
+ abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes.sort_values(
+ ascending=False
+ )
+ biggest_flow_pipe_name = abs_most_recent_flow_for_pipes.index[0]
+ biggest_flow_pipe_abs_flow = abs_most_recent_flow_for_pipes.iloc[0]
pipe = self._wn.get_link(biggest_flow_pipe_name)
- #n1 = pipe.start_node_name
- #n2 = pipe.end_node_name
- #n1_pressure = rr.node['pressure'][n1].iloc[-1]
- #n2_pressure = rr.node['pressure'][n2].iloc[-1]
- already_C = pipe.minor_loss
- #if already_C < 0.001:
- #already_C = 1
- new_C = (1000 * 2* 9.81 * (pipe.diameter**2*3.14/4)**2) / ((biggest_flow_pipe_abs_flow)**2) + already_C #the last of 100 is to magnify the c choosing
+ # n1 = pipe.start_node_name
+ # n2 = pipe.end_node_name
+ # n1_pressure = rr.node['pressure'][n1].iloc[-1]
+ # n2_pressure = rr.node['pressure'][n2].iloc[-1]
+ already_C = pipe.minor_loss # noqa: N806
+ # if already_C < 0.001:
+ # already_C = 1
+ new_C = (1000 * 2 * 9.81 * (pipe.diameter**2 * math.pi / 4) ** 2) / ( # noqa: N806
+ (biggest_flow_pipe_abs_flow) ** 2
+ ) + already_C # the last of 100 is to magnify the c choosing
pipe.minor_loss = new_C
closed_pipes[biggest_flow_pipe_name] = already_C
-
- #t2 = time.time()
- #print(t2-t1)
- #print('size closed: '+repr(len(closed_pipes)) )
+ # t2 = time.time()
+ # print(t2-t1)
+ # print('size closed: '+repr(len(closed_pipes)) )
return closed_pipes, ifinish
-
-
- #if pipe.cv == True:
- #continue
- #if pipe._is_isolated == True:
- #continue
- #node_A = pipe.start_node
- #node_B = pipe.end_node
-
- #if node_A.node_type != "Junction" or node_B.node_type != "Junction":
- #continue
-
- #if node_A.name in already_nodes or node_B.name in already_nodes:
- #continue
-
- #if pipe.initial_status != 1:
- #continue
-
- #for
- #flow = rr.link['flowrate']
-
- #i_possitive_rate = True
-
- #if flow > 0.01:
- #i_possitive_rate = True
- #chosen_node = node_A
- #elif flow < 0.01:
- #i_possitive_rate = False
- #chosen_node = node_B
- #else:
- #continue
-
- #def check_pipes_sin(self, pipe_list):
- #for pipe_name in pipe_list:
- def closePipeNNN(self, rr, isolated_link_list, _prev_isolated_junctions, flow_criteria, negative_pressure_limit):
- closed_pipes={}
-
- check_nodes = [node_name for node_name in self._wn.junction_name_list if node_name not in _prev_isolated_junctions] #not isolated junctions
- junctions_pressure = (rr.node['pressure'][check_nodes]).iloc[-1] #latest pressure result for not-isolated junctions
- negative_junctions_pressure = (junctions_pressure[(junctions_pressure < negative_pressure_limit)]) #not-isolated junctions that have pressure less than specified amount
-
- negative_junctions_pressure = negative_junctions_pressure.sort_values(ascending = False)
+
+ # if pipe.cv == True:
+ # continue
+ # if pipe._is_isolated == True:
+ # continue
+ # node_A = pipe.start_node
+ # node_B = pipe.end_node
+
+ # if node_A.node_type != "Junction" or node_B.node_type != "Junction":
+ # continue
+
+ # if node_A.name in already_nodes or node_B.name in already_nodes:
+ # continue
+
+ # if pipe.initial_status != 1:
+ # continue
+
+ # for
+ # flow = rr.link['flowrate']
+
+ # i_possitive_rate = True
+
+ # if flow > 0.01:
+ # i_possitive_rate = True
+ # chosen_node = node_A
+ # elif flow < 0.01:
+ # i_possitive_rate = False
+ # chosen_node = node_B
+ # else:
+ # continue
+
+ # def check_pipes_sin(self, pipe_list):
+ # for pipe_name in pipe_list:
+ def closePipeNNN( # noqa: N802, D102
+ self,
+ rr,
+ isolated_link_list,
+ _prev_isolated_junctions,
+ flow_criteria,
+ negative_pressure_limit,
+ ):
+ closed_pipes = {}
+
+ check_nodes = [
+ node_name
+ for node_name in self._wn.junction_name_list
+ if node_name not in _prev_isolated_junctions
+ ] # not isolated junctions
+ junctions_pressure = (rr.node['pressure'][check_nodes]).iloc[
+ -1
+ ] # latest pressure result for not-isolated junctions
+ negative_junctions_pressure = junctions_pressure[
+ (junctions_pressure < negative_pressure_limit)
+ ] # not-isolated junctions that have pressure less than specified amount
+
+ negative_junctions_pressure = negative_junctions_pressure.sort_values(
+ ascending=False
+ )
negative_junctions_name_list = negative_junctions_pressure.index.to_list()
-
+
last_flow_row = rr.link['flowrate'].iloc[-1]
-
+
pipe_found = False
- while pipe_found == False:
+ while pipe_found == False: # noqa: E712
if len(negative_junctions_name_list) == 0:
ifinish = True
return closed_pipes, ifinish
-
+
pipe_name_list = []
- pipe_name_list_temp = self._wn.get_links_for_node(negative_junctions_name_list[-1]) #new: the most negative
+ pipe_name_list_temp = self._wn.get_links_for_node(
+ negative_junctions_name_list[-1]
+ ) # new: the most negative
pipe_name_list.extend(pipe_name_list_temp)
-
+
pipe_name_list = set(pipe_name_list) - set(isolated_link_list)
- pipe_name_list = [pipe_name for pipe_name in pipe_name_list if pipe_name in self._wn.pipe_name_list]
- most_recent_flow_for_pipes = last_flow_row[pipe_name_list]
+ pipe_name_list = [
+ pipe_name
+ for pipe_name in pipe_name_list
+ if pipe_name in self._wn.pipe_name_list
+ ]
+ most_recent_flow_for_pipes = last_flow_row[pipe_name_list]
abs_most_recent_flow_for_pipes = most_recent_flow_for_pipes.abs()
- abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes[abs_most_recent_flow_for_pipes >= flow_criteria]
-
+ abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes[
+ abs_most_recent_flow_for_pipes >= flow_criteria
+ ]
+
if len(abs_most_recent_flow_for_pipes) == 0:
- negative_junctions_pressure.drop(negative_junctions_name_list[-1], inplace=True)
- negative_junctions_name_list = negative_junctions_pressure.index.to_list()
+ negative_junctions_pressure.drop(
+ negative_junctions_name_list[-1],
+ inplace=True, # noqa: PD002
+ )
+ negative_junctions_name_list = (
+ negative_junctions_pressure.index.to_list()
+ )
else:
pipe_found = True
ifinish = False
- abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes.sort_values(ascending = False)
- biggest_flow_pipe_name = abs_most_recent_flow_for_pipes.index[0]
- biggest_flow_pipe_abs_flow = abs_most_recent_flow_for_pipes.iloc[0]
+ abs_most_recent_flow_for_pipes = abs_most_recent_flow_for_pipes.sort_values(
+ ascending=False
+ )
+ biggest_flow_pipe_name = abs_most_recent_flow_for_pipes.index[0]
+ biggest_flow_pipe_abs_flow = abs_most_recent_flow_for_pipes.iloc[0] # noqa: F841
pipe = self._wn.get_link(biggest_flow_pipe_name)
-
- already_C = pipe.minor_loss
- initial_status = pipe.initial_status
+
+ already_C = pipe.minor_loss # noqa: N806, F841
+ initial_status = pipe.initial_status
closed_pipes[biggest_flow_pipe_name] = initial_status
pipe.initial_status = LinkStatus.Closed
-
return closed_pipes, ifinish
-
-
- #if pipe.cv == True:
- #continue
- #if pipe._is_isolated == True:
- #continue
- #node_A = pipe.start_node
- #node_B = pipe.end_node
-
- #if node_A.node_type != "Junction" or node_B.node_type != "Junction":
- #continue
-
- #if node_A.name in already_nodes or node_B.name in already_nodes:
- #continue
-
- #if pipe.initial_status != 1:
- #continue
-
- #for
- #flow = rr.link['flowrate']
-
- #i_possitive_rate = True
-
- #if flow > 0.01:
- #i_possitive_rate = True
- #chosen_node = node_A
- #elif flow < 0.01:
- #i_possitive_rate = False
- #chosen_node = node_B
- #else:
- #continue
-
- #def check_pipes_sin(self, pipe_list):
- #for pipe_name in pipe_list:
-
\ No newline at end of file
+
+ # if pipe.cv == True:
+ # continue
+ # if pipe._is_isolated == True:
+ # continue
+ # node_A = pipe.start_node
+ # node_B = pipe.end_node
+
+ # if node_A.node_type != "Junction" or node_B.node_type != "Junction":
+ # continue
+
+ # if node_A.name in already_nodes or node_B.name in already_nodes:
+ # continue
+
+ # if pipe.initial_status != 1:
+ # continue
+
+ # for
+ # flow = rr.link['flowrate']
+
+ # i_possitive_rate = True
+
+ # if flow > 0.01:
+ # i_possitive_rate = True
+ # chosen_node = node_A
+ # elif flow < 0.01:
+ # i_possitive_rate = False
+ # chosen_node = node_B
+ # else:
+ # continue
+
+ # def check_pipes_sin(self, pipe_list):
+ # for pipe_name in pipe_list:
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/io.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/io.py
index 24ae46922..2f0128ca0 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/io.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/io.py
@@ -1,5 +1,4 @@
-"""
-The wntrfr.epanet.io module contains methods for reading/writing EPANET input and output files.
+"""The wntrfr.epanet.io module contains methods for reading/writing EPANET input and output files.
.. rubric:: Contents
@@ -11,47 +10,75 @@
----
-"""
-from __future__ import absolute_import
+""" # noqa: CPY001
-import datetime
-import re
-import io
-import os, sys
import logging
-import six
-import warnings
+import re
+
import numpy as np
import pandas as pd
-import difflib
-from collections import OrderedDict
-
-#from .time_utils import run_lineprofile
-import wntr
+# from .time_utils import run_lineprofile
import wntrfr.network
import wntrfr.sim
from wntrfr.network.base import Link
-from wntrfr.network.model import WaterNetworkModel
-from wntrfr.network.elements import Junction, Reservoir, Tank, Pipe, Pump, Valve
-from wntrfr.network.options import WaterNetworkOptions
-from wntrfr.network.model import Pattern, LinkStatus, Curve, Demands, Source
-from wntrfr.network.controls import TimeOfDayCondition, SimTimeCondition, ValueCondition, Comparison
-from wntrfr.network.controls import OrCondition, AndCondition, Control, ControlAction, _ControlType, Rule
-
-from .util import FlowUnits, MassUnits, HydParam, QualParam, MixType, ResultType, EN
-from .util import to_si, from_si
-from .util import StatisticsType, QualType, PressureUnits
+from wntrfr.network.controls import (
+ Comparison,
+ Control,
+ SimTimeCondition,
+ TimeOfDayCondition,
+ ValueCondition,
+ _ControlType, # noqa: PLC2701
+)
+from wntrfr.network.elements import Junction, Pipe, Pump, Tank, Valve
+from wntrfr.network.model import LinkStatus
+
+from .util import (
+ EN,
+ FlowUnits,
+ HydParam,
+ MassUnits,
+ MixType,
+ PressureUnits,
+ QualParam,
+ QualType,
+ ResultType,
+ StatisticsType,
+ from_si,
+ to_si,
+)
logger = logging.getLogger(__name__)
-_INP_SECTIONS = ['[OPTIONS]', '[TITLE]', '[JUNCTIONS]', '[RESERVOIRS]',
- '[TANKS]', '[PIPES]', '[PUMPS]', '[VALVES]', '[EMITTERS]',
- '[CURVES]', '[PATTERNS]', '[ENERGY]', '[STATUS]',
- '[CONTROLS]', '[RULES]', '[DEMANDS]', '[QUALITY]',
- '[REACTIONS]', '[SOURCES]', '[MIXING]',
- '[TIMES]', '[REPORT]', '[COORDINATES]', '[VERTICES]',
- '[LABELS]', '[BACKDROP]', '[TAGS]']
+_INP_SECTIONS = [
+ '[OPTIONS]',
+ '[TITLE]',
+ '[JUNCTIONS]',
+ '[RESERVOIRS]',
+ '[TANKS]',
+ '[PIPES]',
+ '[PUMPS]',
+ '[VALVES]',
+ '[EMITTERS]',
+ '[CURVES]',
+ '[PATTERNS]',
+ '[ENERGY]',
+ '[STATUS]',
+ '[CONTROLS]',
+ '[RULES]',
+ '[DEMANDS]',
+ '[QUALITY]',
+ '[REACTIONS]',
+ '[SOURCES]',
+ '[MIXING]',
+ '[TIMES]',
+ '[REPORT]',
+ '[COORDINATES]',
+ '[VERTICES]',
+ '[LABELS]',
+ '[BACKDROP]',
+ '[TAGS]',
+]
_JUNC_ENTRY = ' {name:20} {elev:15.11g} {dem:15.11g} {pat:24} {com:>3s}\n'
_JUNC_LABEL = '{:21} {:>12s} {:>12s} {:24}\n'
@@ -65,7 +92,9 @@
_PIPE_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {len:15.11g} {diam:15.11g} {rough:15.11g} {mloss:15.11g} {status:>20s} {com:>3s}\n'
_PIPE_LABEL = '{:21s} {:20s} {:20s} {:>20s} {:>20s} {:>20s} {:>20s} {:>20s}\n'
-_PUMP_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {ptype:8s} {params:20s} {com:>3s}\n'
+_PUMP_ENTRY = (
+ ' {name:20s} {node1:20s} {node2:20s} {ptype:8s} {params:20s} {com:>3s}\n'
+)
_PUMP_LABEL = '{:21s} {:20s} {:20s} {:20s}\n'
_VALVE_ENTRY = ' {name:20s} {node1:20s} {node2:20s} {diam:15.11g} {vtype:4s} {set:15.11g} {mloss:15.11g} {com:>3s}\n'
@@ -75,6 +104,7 @@
_CURVE_ENTRY = ' {name:10s} {x:12f} {y:12f} {com:>3s}\n'
_CURVE_LABEL = '{:11s} {:12s} {:12s}\n'
+
def _split_line(line):
_vc = line.split(';', 1)
_cmnt = None
@@ -83,35 +113,31 @@ def _split_line(line):
pass
elif len(_vc) == 1:
_vals = _vc[0].split()
- elif _vc[0] == '':
+ elif _vc[0] == '': # noqa: PLC1901
_cmnt = _vc[1]
else:
_vals = _vc[0].split()
_cmnt = _vc[1]
return _vals, _cmnt
-def _is_number(s):
- """
- Checks if input is a number
+def _is_number(s):
+ """Checks if input is a number
Parameters
----------
s : anything
- """
-
+ """ # noqa: D400, D401
try:
float(s)
- return True
+ return True # noqa: TRY300
except ValueError:
return False
def _str_time_to_sec(s):
- """
- Converts EPANET time format to seconds.
-
+ """Converts EPANET time format to seconds.
Parameters
----------
@@ -122,33 +148,35 @@ def _str_time_to_sec(s):
Returns
-------
Integer value of time in seconds.
- """
+
+ """ # noqa: D401
pattern1 = re.compile(r'^(\d+):(\d+):(\d+)$')
time_tuple = pattern1.search(s)
if bool(time_tuple):
- return (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60 +
- int(round(float(time_tuple.groups()[2]))))
- else:
+ return (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ + int(round(float(time_tuple.groups()[2])))
+ )
+ else: # noqa: RET505
pattern2 = re.compile(r'^(\d+):(\d+)$')
time_tuple = pattern2.search(s)
if bool(time_tuple):
- return (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60)
- else:
+ return (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ )
+ else: # noqa: RET505
pattern3 = re.compile(r'^(\d+)$')
time_tuple = pattern3.search(s)
if bool(time_tuple):
- return int(time_tuple.groups()[0])*60*60
- else:
- raise RuntimeError("Time format in "
- "INP file not recognized. ")
-
+ return int(time_tuple.groups()[0]) * 60 * 60
+ else: # noqa: RET505
+ raise RuntimeError('Time format in ' 'INP file not recognized. ') # noqa: DOC501, EM101, TRY003
-def _clock_time_to_sec(s, am_pm):
- """
- Converts EPANET clocktime format to seconds.
+def _clock_time_to_sec(s, am_pm): # noqa: C901
+ """Converts EPANET clocktime format to seconds.
Parameters
----------
@@ -163,83 +191,97 @@ def _clock_time_to_sec(s, am_pm):
-------
Integer value of time in seconds
- """
+ """ # noqa: D401
if am_pm.upper() == 'AM':
am = True
elif am_pm.upper() == 'PM':
am = False
else:
- raise RuntimeError('am_pm option not recognized; options are AM or PM')
+ raise RuntimeError('am_pm option not recognized; options are AM or PM') # noqa: DOC501, EM101, TRY003
pattern1 = re.compile(r'^(\d+):(\d+):(\d+)$')
time_tuple = pattern1.search(s)
if bool(time_tuple):
- time_sec = (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60 +
- int(round(float(time_tuple.groups()[2]))))
+ time_sec = (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ + int(round(float(time_tuple.groups()[2])))
+ )
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
- if time_sec >= 3600*12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ if time_sec >= 3600 * 12:
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
+ else: # noqa: RET505
pattern2 = re.compile(r'^(\d+):(\d+)$')
time_tuple = pattern2.search(s)
if bool(time_tuple):
- time_sec = (int(time_tuple.groups()[0])*60*60 +
- int(time_tuple.groups()[1])*60)
+ time_sec = (
+ int(time_tuple.groups()[0]) * 60 * 60
+ + int(time_tuple.groups()[1]) * 60
+ )
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
if time_sec >= 3600 * 12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
+ else: # noqa: RET505
pattern3 = re.compile(r'^(\d+)$')
time_tuple = pattern3.search(s)
if bool(time_tuple):
- time_sec = int(time_tuple.groups()[0])*60*60
+ time_sec = int(time_tuple.groups()[0]) * 60 * 60
if s.startswith('12'):
- time_sec -= 3600*12
+ time_sec -= 3600 * 12
if not am:
if time_sec >= 3600 * 12:
- raise RuntimeError('Cannot specify am/pm for times greater than 12:00:00')
- time_sec += 3600*12
+ raise RuntimeError( # noqa: DOC501, TRY003
+ 'Cannot specify am/pm for times greater than 12:00:00' # noqa: EM101
+ )
+ time_sec += 3600 * 12
return time_sec
- else:
- raise RuntimeError("Time format in "
- "INP file not recognized. ")
+ else: # noqa: RET505
+ raise RuntimeError('Time format in ' 'INP file not recognized. ') # noqa: DOC501, EM101, TRY003
def _sec_to_string(sec):
- hours = int(sec/3600.)
- sec -= hours*3600
- mm = int(sec/60.)
- sec -= mm*60
+ hours = int(sec / 3600.0)
+ sec -= hours * 3600
+ mm = int(sec / 60.0)
+ sec -= mm * 60
return (hours, mm, int(sec))
class InpFile(wntrfr.epanet.InpFile):
- """
- EPANET INP file reader and writer class.
+ """EPANET INP file reader and writer class.
This class provides read and write functionality for EPANET INP files.
The EPANET Users Manual provides full documentation for the INP file format.
"""
+
def __init__(self):
super().__init__()
def _write_junctions(self, f, wn):
f.write('[JUNCTIONS]\n'.encode('ascii'))
- f.write(_JUNC_LABEL.format(';ID', 'Elevation', 'Demand', 'Pattern').encode('ascii'))
+ f.write(
+ _JUNC_LABEL.format(';ID', 'Elevation', 'Demand', 'Pattern').encode(
+ 'ascii'
+ )
+ )
nnames = list(wn.junction_name_list)
# nnames.sort()
for junction_name in nnames:
junction = wn.nodes[junction_name]
- if junction._is_isolated==True: #sina added this
+ # sina added this
+ if junction._is_isolated == True: # noqa: SLF001, E712
continue
if junction.demand_timeseries_list:
base_demands = junction.demand_timeseries_list.base_demand_list()
@@ -258,11 +300,15 @@ def _write_junctions(self, f, wn):
else:
base_demand = 0.0
demand_pattern = None
- E = {'name': junction_name,
- 'elev': from_si(self.flow_units, junction.elevation, HydParam.Elevation),
- 'dem': from_si(self.flow_units, base_demand, HydParam.Demand),
- 'pat': '',
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': junction_name,
+ 'elev': from_si(
+ self.flow_units, junction.elevation, HydParam.Elevation
+ ),
+ 'dem': from_si(self.flow_units, base_demand, HydParam.Demand),
+ 'pat': '',
+ 'com': ';',
+ }
if demand_pattern is not None:
E['pat'] = str(demand_pattern)
f.write(_JUNC_ENTRY.format(**E).encode('ascii'))
@@ -275,11 +321,18 @@ def _write_reservoirs(self, f, wn):
# nnames.sort()
for reservoir_name in nnames:
reservoir = wn.nodes[reservoir_name]
- if reservoir._is_isolated==True: #sina added this
+ # sina added this
+ if reservoir._is_isolated == True: # noqa: SLF001, E712
continue
- E = {'name': reservoir_name,
- 'head': from_si(self.flow_units, reservoir.head_timeseries.base_value, HydParam.HydraulicHead),
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': reservoir_name,
+ 'head': from_si(
+ self.flow_units,
+ reservoir.head_timeseries.base_value,
+ HydParam.HydraulicHead,
+ ),
+ 'com': ';',
+ }
if reservoir.head_timeseries.pattern is None:
E['pat'] = ''
else:
@@ -289,23 +342,43 @@ def _write_reservoirs(self, f, wn):
def _write_tanks(self, f, wn):
f.write('[TANKS]\n'.encode('ascii'))
- f.write(_TANK_LABEL.format(';ID', 'Elevation', 'Init Level', 'Min Level', 'Max Level',
- 'Diameter', 'Min Volume', 'Volume Curve').encode('ascii'))
+ f.write(
+ _TANK_LABEL.format(
+ ';ID',
+ 'Elevation',
+ 'Init Level',
+ 'Min Level',
+ 'Max Level',
+ 'Diameter',
+ 'Min Volume',
+ 'Volume Curve',
+ ).encode('ascii')
+ )
nnames = list(wn.tank_name_list)
# nnames.sort()
for tank_name in nnames:
tank = wn.nodes[tank_name]
- if tank._is_isolated==True: #sina added this
+ if tank._is_isolated == True: # sina added this # noqa: SLF001, E712
continue
- E = {'name': tank_name,
- 'elev': from_si(self.flow_units, tank.elevation, HydParam.Elevation),
- 'initlev': from_si(self.flow_units, tank.init_level, HydParam.HydraulicHead),
- 'minlev': from_si(self.flow_units, tank.min_level, HydParam.HydraulicHead),
- 'maxlev': from_si(self.flow_units, tank.max_level, HydParam.HydraulicHead),
- 'diam': from_si(self.flow_units, tank.diameter, HydParam.TankDiameter),
- 'minvol': from_si(self.flow_units, tank.min_vol, HydParam.Volume),
- 'curve': '',
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': tank_name,
+ 'elev': from_si(self.flow_units, tank.elevation, HydParam.Elevation),
+ 'initlev': from_si(
+ self.flow_units, tank.init_level, HydParam.HydraulicHead
+ ),
+ 'minlev': from_si(
+ self.flow_units, tank.min_level, HydParam.HydraulicHead
+ ),
+ 'maxlev': from_si(
+ self.flow_units, tank.max_level, HydParam.HydraulicHead
+ ),
+ 'diam': from_si(
+ self.flow_units, tank.diameter, HydParam.TankDiameter
+ ),
+ 'minvol': from_si(self.flow_units, tank.min_vol, HydParam.Volume),
+ 'curve': '',
+ 'com': ';',
+ }
if tank.vol_curve is not None:
E['curve'] = tank.vol_curve.name
f.write(_TANK_ENTRY.format(**E).encode('ascii'))
@@ -313,23 +386,37 @@ def _write_tanks(self, f, wn):
def _write_pipes(self, f, wn):
f.write('[PIPES]\n'.encode('ascii'))
- f.write(_PIPE_LABEL.format(';ID', 'Node1', 'Node2', 'Length', 'Diameter',
- 'Roughness', 'Minor Loss', 'Status').encode('ascii'))
+ f.write(
+ _PIPE_LABEL.format(
+ ';ID',
+ 'Node1',
+ 'Node2',
+ 'Length',
+ 'Diameter',
+ 'Roughness',
+ 'Minor Loss',
+ 'Status',
+ ).encode('ascii')
+ )
lnames = list(wn.pipe_name_list)
# lnames.sort()
for pipe_name in lnames:
pipe = wn.links[pipe_name]
- if pipe._is_isolated == True: #Sina added this
+ if pipe._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- E = {'name': pipe_name,
- 'node1': pipe.start_node_name,
- 'node2': pipe.end_node_name,
- 'len': from_si(self.flow_units, pipe.length, HydParam.Length),
- 'diam': from_si(self.flow_units, pipe.diameter, HydParam.PipeDiameter),
- 'rough': pipe.roughness,
- 'mloss': pipe.minor_loss,
- 'status': str(pipe.initial_status),
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': pipe_name,
+ 'node1': pipe.start_node_name,
+ 'node2': pipe.end_node_name,
+ 'len': from_si(self.flow_units, pipe.length, HydParam.Length),
+ 'diam': from_si(
+ self.flow_units, pipe.diameter, HydParam.PipeDiameter
+ ),
+ 'rough': pipe.roughness,
+ 'mloss': pipe.minor_loss,
+ 'status': str(pipe.initial_status),
+ 'com': ';',
+ }
if pipe.cv:
E['status'] = 'CV'
f.write(_PIPE_ENTRY.format(**E).encode('ascii'))
@@ -337,36 +424,46 @@ def _write_pipes(self, f, wn):
def _write_pumps(self, f, wn):
f.write('[PUMPS]\n'.encode('ascii'))
- f.write(_PUMP_LABEL.format(';ID', 'Node1', 'Node2', 'Properties').encode('ascii'))
+ f.write(
+ _PUMP_LABEL.format(';ID', 'Node1', 'Node2', 'Properties').encode('ascii')
+ )
lnames = list(wn.pump_name_list)
# lnames.sort()
for pump_name in lnames:
pump = wn.links[pump_name]
- if pump._is_isolated == True: #Sina added this
+ if pump._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- E = {'name': pump_name,
- 'node1': pump.start_node_name,
- 'node2': pump.end_node_name,
- 'ptype': pump.pump_type,
- 'params': '',
-# 'speed_keyword': 'SPEED',
-# 'speed': pump.speed_timeseries.base_value,
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': pump_name,
+ 'node1': pump.start_node_name,
+ 'node2': pump.end_node_name,
+ 'ptype': pump.pump_type,
+ 'params': '',
+ # 'speed_keyword': 'SPEED',
+ # 'speed': pump.speed_timeseries.base_value,
+ 'com': ';',
+ }
if pump.pump_type == 'HEAD':
E['params'] = pump.pump_curve_name
elif pump.pump_type == 'POWER':
- E['params'] = str(from_si(self.flow_units, pump.power, HydParam.Power))
+ E['params'] = str(
+ from_si(self.flow_units, pump.power, HydParam.Power)
+ )
else:
- raise RuntimeError('Only head or power info is supported of pumps.')
+ raise RuntimeError('Only head or power info is supported of pumps.') # noqa: EM101, TRY003
tmp_entry = _PUMP_ENTRY
if pump.speed_timeseries.base_value != 1:
E['speed_keyword'] = 'SPEED'
E['speed'] = pump.speed_timeseries.base_value
- tmp_entry = (tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {') +
- ' {speed_keyword:8s} {speed:15.11g} {com:>3s}\n')
+ tmp_entry = (
+ tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {')
+ + ' {speed_keyword:8s} {speed:15.11g} {com:>3s}\n'
+ )
if pump.speed_timeseries.pattern is not None:
- tmp_entry = (tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {') +
- ' {pattern_keyword:10s} {pattern:20s} {com:>3s}\n')
+ tmp_entry = (
+ tmp_entry.rstrip('\n').rstrip('}').rstrip('com:>3s').rstrip(' {')
+ + ' {pattern_keyword:10s} {pattern:20s} {com:>3s}\n'
+ )
E['pattern_keyword'] = 'PATTERN'
E['pattern'] = pump.speed_timeseries.pattern.name
f.write(tmp_entry.format(**E).encode('ascii'))
@@ -374,29 +471,45 @@ def _write_pumps(self, f, wn):
def _write_valves(self, f, wn):
f.write('[VALVES]\n'.encode('ascii'))
- f.write(_VALVE_LABEL.format(';ID', 'Node1', 'Node2', 'Diameter', 'Type', 'Setting', 'Minor Loss').encode('ascii'))
+ f.write(
+ _VALVE_LABEL.format(
+ ';ID', 'Node1', 'Node2', 'Diameter', 'Type', 'Setting', 'Minor Loss'
+ ).encode('ascii')
+ )
lnames = list(wn.valve_name_list)
# lnames.sort()
for valve_name in lnames:
valve = wn.links[valve_name]
- if valve._is_isolated == True: #Sina added this
+ if valve._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- E = {'name': valve_name,
- 'node1': valve.start_node_name,
- 'node2': valve.end_node_name,
- 'diam': from_si(self.flow_units, valve.diameter, HydParam.PipeDiameter),
- 'vtype': valve.valve_type,
- 'set': valve._initial_setting,
- 'mloss': valve.minor_loss,
- 'com': ';'}
+ E = { # noqa: N806
+ 'name': valve_name,
+ 'node1': valve.start_node_name,
+ 'node2': valve.end_node_name,
+ 'diam': from_si(
+ self.flow_units, valve.diameter, HydParam.PipeDiameter
+ ),
+ 'vtype': valve.valve_type,
+ 'set': valve._initial_setting, # noqa: SLF001
+ 'mloss': valve.minor_loss,
+ 'com': ';',
+ }
valve_type = valve.valve_type
formatter = _VALVE_ENTRY
- if valve_type in ['PRV', 'PSV', 'PBV']:
- valve_set = from_si(self.flow_units, valve._initial_setting, HydParam.Pressure)
+ if valve_type in ['PRV', 'PSV', 'PBV']: # noqa: PLR6201
+ valve_set = from_si(
+ self.flow_units,
+ valve._initial_setting, # noqa: SLF001
+ HydParam.Pressure,
+ )
elif valve_type == 'FCV':
- valve_set = from_si(self.flow_units, valve._initial_setting, HydParam.Flow)
+ valve_set = from_si(
+ self.flow_units,
+ valve._initial_setting, # noqa: SLF001
+ HydParam.Flow,
+ )
elif valve_type == 'TCV':
- valve_set = valve._initial_setting
+ valve_set = valve._initial_setting # noqa: SLF001
elif valve_type == 'GPV':
valve_set = valve.headloss_curve_name
formatter = _GPV_ENTRY
@@ -413,65 +526,82 @@ def _write_emitters(self, f, wn):
# njunctions.sort()
for junction_name in njunctions:
junction = wn.nodes[junction_name]
- if junction._is_isolated == True: #Sina added this
+ # Sina added this
+ if junction._is_isolated == True: # noqa: SLF001, E712
continue
- if junction._emitter_coefficient:
- val = from_si(self.flow_units, junction._emitter_coefficient, HydParam.Flow)
+ if junction._emitter_coefficient: # noqa: SLF001
+ val = from_si(
+ self.flow_units,
+ junction._emitter_coefficient, # noqa: SLF001
+ HydParam.Flow,
+ )
f.write(entry.format(junction_name, str(val)).encode('ascii'))
f.write('\n'.encode('ascii'))
- ### System Operation
-
- def _write_status(self, f, wn):
+ # System Operation
+
+ def _write_status(self, f, wn): # noqa: PLR6301
f.write('[STATUS]\n'.encode('ascii'))
- f.write( '{:10s} {:10s}\n'.format(';ID', 'Setting').encode('ascii'))
+ f.write('{:10s} {:10s}\n'.format(';ID', 'Setting').encode('ascii'))
for link_name, link in wn.links():
- if link._is_isolated == True: #Sina added this
+ if link._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if isinstance(link, Pipe):
continue
if isinstance(link, Pump):
setting = link.initial_setting
if type(setting) is float and setting != 1.0:
- f.write('{:10s} {:10.10g}\n'.format(link_name,
- setting).encode('ascii'))
- if link.initial_status in (LinkStatus.Closed,):
- f.write('{:10s} {:10s}\n'.format(link_name,
- LinkStatus(link.initial_status).name).encode('ascii'))
- if isinstance(link, wntrfr.network.Valve) and link.initial_status in (LinkStatus.Open, LinkStatus.Opened):
-# if link.initial_status in (LinkStatus.Closed,):
- f.write('{:10s} {:10s}\n'.format(link_name,
- LinkStatus(link.initial_status).name).encode('ascii'))
-# if link.initial_status is LinkStatus.Active:
-# valve_type = link.valve_type
-# if valve_type in ['PRV', 'PSV', 'PBV']:
-# setting = from_si(self.flow_units, link.initial_setting, HydParam.Pressure)
-# elif valve_type == 'FCV':
-# setting = from_si(self.flow_units, link.initial_setting, HydParam.Flow)
-# elif valve_type == 'TCV':
-# setting = link.initial_setting
-# else:
-# continue
-# continue
-# elif isinstance(link, wntrfr.network.Pump):
-# setting = link.initial_setting
-# else: continue
-# f.write('{:10s} {:10.10g}\n'.format(link_name,
-# setting).encode('ascii'))
-# f.write('\n'.encode('ascii'))
-
- def _write_controls(self, f, wn):
+ f.write(f'{link_name:10s} {setting:10.10g}\n'.encode('ascii'))
+ if link.initial_status == LinkStatus.Closed:
+ f.write(
+ f'{link_name:10s} {LinkStatus(link.initial_status).name:10s}\n'.encode(
+ 'ascii'
+ )
+ )
+ if isinstance(link, wntrfr.network.Valve) and link.initial_status in ( # noqa: PLR6201
+ LinkStatus.Open,
+ LinkStatus.Opened,
+ ):
+ # if link.initial_status in (LinkStatus.Closed,):
+ f.write(
+ f'{link_name:10s} {LinkStatus(link.initial_status).name:10s}\n'.encode(
+ 'ascii'
+ )
+ )
+
+ # if link.initial_status is LinkStatus.Active:
+ # valve_type = link.valve_type
+ # if valve_type in ['PRV', 'PSV', 'PBV']:
+ # setting = from_si(self.flow_units, link.initial_setting, HydParam.Pressure)
+ # elif valve_type == 'FCV':
+ # setting = from_si(self.flow_units, link.initial_setting, HydParam.Flow)
+ # elif valve_type == 'TCV':
+ # setting = link.initial_setting
+ # else:
+ # continue
+ # continue
+ # elif isinstance(link, wntrfr.network.Pump):
+ # setting = link.initial_setting
+ # else: continue
+ # f.write('{:10s} {:10.10g}\n'.format(link_name,
+ # setting).encode('ascii'))
+ # f.write('\n'.encode('ascii'))
+
+ def _write_controls(self, f, wn): # noqa: C901
def get_setting(control_action, control_name):
- value = control_action._value
- attribute = control_action._attribute.lower()
+ value = control_action._value # noqa: SLF001
+ attribute = control_action._attribute.lower() # noqa: SLF001
if attribute == 'status':
setting = LinkStatus(value).name
elif attribute == 'base_speed':
setting = str(value)
- elif attribute == 'setting' and isinstance(control_action._target_obj, Valve):
- valve = control_action._target_obj
+ elif attribute == 'setting' and isinstance(
+ control_action._target_obj, # noqa: SLF001
+ Valve,
+ ):
+ valve = control_action._target_obj # noqa: SLF001
valve_type = valve.valve_type
- if valve_type == 'PRV' or valve_type == 'PSV' or valve_type == 'PBV':
+ if valve_type == 'PRV' or valve_type == 'PSV' or valve_type == 'PBV': # noqa: PLR1714
setting = str(from_si(self.flow_units, value, HydParam.Pressure))
elif valve_type == 'FCV':
setting = str(from_si(self.flow_units, value, HydParam.Flow))
@@ -485,81 +615,111 @@ def get_setting(control_action, control_name):
setting = value
else:
setting = None
- logger.warning('Could not write control '+str(control_name)+' - skipping')
+ logger.warning(
+ 'Could not write control ' + str(control_name) + ' - skipping' # noqa: G003
+ )
return setting
f.write('[CONTROLS]\n'.encode('ascii'))
# Time controls and conditional controls only
for text, all_control in wn.controls():
- control_action = all_control._then_actions[0]
- if control_action._target_obj._is_isolated==True: #Sina added this
+ control_action = all_control._then_actions[0] # noqa: SLF001
+ # Sina added this
+ if control_action._target_obj._is_isolated == True: # noqa: SLF001, E712
continue
if all_control.epanet_control_type is not _ControlType.rule:
- if len(all_control._then_actions) != 1 or len(all_control._else_actions) != 0:
- logger.error('Too many actions on CONTROL "%s"'%text)
- raise RuntimeError('Too many actions on CONTROL "%s"'%text)
+ if (
+ len(all_control._then_actions) != 1 # noqa: SLF001
+ or len(all_control._else_actions) != 0 # noqa: SLF001
+ ):
+ logger.error('Too many actions on CONTROL "%s"' % text) # noqa: G002, UP031
+ raise RuntimeError('Too many actions on CONTROL "%s"' % text) # noqa: UP031
if not isinstance(control_action.target()[0], Link):
continue
- if isinstance(all_control._condition, (SimTimeCondition, TimeOfDayCondition)):
+ if isinstance(
+ all_control._condition, # noqa: SLF001
+ (SimTimeCondition, TimeOfDayCondition),
+ ):
entry = '{ltype} {link} {setting} AT {compare} {time:g}\n'
- vals = {'ltype': control_action._target_obj.link_type,
- 'link': control_action._target_obj.name,
- 'setting': get_setting(control_action, text),
- 'compare': 'TIME',
- 'time': all_control._condition._threshold / 3600.0}
+ vals = {
+ 'ltype': control_action._target_obj.link_type, # noqa: SLF001
+ 'link': control_action._target_obj.name, # noqa: SLF001
+ 'setting': get_setting(control_action, text),
+ 'compare': 'TIME',
+ 'time': all_control._condition._threshold / 3600.0, # noqa: SLF001
+ }
if vals['setting'] is None:
continue
- if isinstance(all_control._condition, TimeOfDayCondition):
+ if isinstance(all_control._condition, TimeOfDayCondition): # noqa: SLF001
vals['compare'] = 'CLOCKTIME'
f.write(entry.format(**vals).encode('ascii'))
- elif all_control._condition._source_obj._is_isolated == True: #Sina added this
+ elif (
+ all_control._condition._source_obj._is_isolated == True # noqa: SLF001, E712
+ ): # Sina added this
continue
- elif isinstance(all_control._condition, (ValueCondition)):
+ elif isinstance(all_control._condition, (ValueCondition)): # noqa: SLF001
entry = '{ltype} {link} {setting} IF {ntype} {node} {compare} {thresh}\n'
- vals = {'ltype': control_action._target_obj.link_type,
- 'link': control_action._target_obj.name,
- 'setting': get_setting(control_action, text),
- 'ntype': all_control._condition._source_obj.node_type,
- 'node': all_control._condition._source_obj.name,
- 'compare': 'above',
- 'thresh': 0.0}
+ vals = {
+ 'ltype': control_action._target_obj.link_type, # noqa: SLF001
+ 'link': control_action._target_obj.name, # noqa: SLF001
+ 'setting': get_setting(control_action, text),
+ 'ntype': all_control._condition._source_obj.node_type, # noqa: SLF001
+ 'node': all_control._condition._source_obj.name, # noqa: SLF001
+ 'compare': 'above',
+ 'thresh': 0.0,
+ }
if vals['setting'] is None:
continue
- if all_control._condition._relation in [np.less, np.less_equal, Comparison.le, Comparison.lt]:
+ if all_control._condition._relation in [ # noqa: PLR6201, SLF001
+ np.less,
+ np.less_equal,
+ Comparison.le,
+ Comparison.lt,
+ ]:
vals['compare'] = 'below'
- threshold = all_control._condition._threshold
- if isinstance(all_control._condition._source_obj, Tank):
- vals['thresh'] = from_si(self.flow_units, threshold, HydParam.HydraulicHead)
- elif isinstance(all_control._condition._source_obj, Junction):
- vals['thresh'] = from_si(self.flow_units, threshold, HydParam.Pressure)
- else:
- raise RuntimeError('Unknown control for EPANET INP files: %s' %type(all_control))
+ threshold = all_control._condition._threshold # noqa: SLF001
+ if isinstance(all_control._condition._source_obj, Tank): # noqa: SLF001
+ vals['thresh'] = from_si(
+ self.flow_units, threshold, HydParam.HydraulicHead
+ )
+ elif isinstance(all_control._condition._source_obj, Junction): # noqa: SLF001
+ vals['thresh'] = from_si(
+ self.flow_units, threshold, HydParam.Pressure
+ )
+ else:
+ raise RuntimeError( # noqa: TRY004
+ 'Unknown control for EPANET INP files: %s' # noqa: UP031
+ % type(all_control)
+ )
f.write(entry.format(**vals).encode('ascii'))
elif not isinstance(all_control, Control):
- raise RuntimeError('Unknown control for EPANET INP files: %s' % type(all_control))
+ raise RuntimeError(
+ 'Unknown control for EPANET INP files: %s' # noqa: UP031
+ % type(all_control)
+ )
f.write('\n'.encode('ascii'))
def _write_rules(self, f, wn):
f.write('[RULES]\n'.encode('ascii'))
- for text, all_control in wn.controls():
+ for text, all_control in wn.controls(): # noqa: B007
entry = '{}\n'
if all_control.epanet_control_type == _ControlType.rule:
- #Sina added thsi begin
+ # Sina added this begin
try:
- if all_control._then_actions[0]._target_obj._is_isolated==True:
+ if all_control._then_actions[0]._target_obj._is_isolated == True: # noqa: SLF001, E712
continue
- except:
+ except: # noqa: S110, E722
pass
-
+
try:
- if all_control.condition._source_obj._is_isolated==True:
+ if all_control.condition._source_obj._is_isolated == True: # noqa: SLF001, E712
continue
- except:
+ except: # noqa: S110, E722
pass
-
- #Sina added thsi end
- rule = _EpanetRule('blah', self.flow_units, self.mass_units)
+
+ # Sina added this end
+ rule = _EpanetRule('blah', self.flow_units, self.mass_units) # noqa: F821
rule.from_if_then_else(all_control)
f.write(entry.format(str(rule)).encode('ascii'))
f.write('\n'.encode('ascii'))
@@ -572,103 +732,176 @@ def _write_demands(self, f, wn):
nodes = list(wn.junction_name_list)
# nodes.sort()
for node in nodes:
- if wn.get_node(node)._is_isolated == True: #Sina added this
+ # Sina added this
+ if wn.get_node(node)._is_isolated == True: # noqa: SLF001, E712
continue
demands = wn.get_node(node).demand_timeseries_list
- #leak =
+ # leak =
if len(demands) > 1:
- for ct, demand in enumerate(demands):
+ for ct, demand in enumerate(demands): # noqa: B007, FURB148
cat = str(demand.category)
- #if cat == 'EN2 base':
+ # if cat == 'EN2 base':
# cat = ''
if cat.lower() == 'none':
cat = ''
else:
cat = ' ;' + demand.category
- E = {'node': node,
- 'base': from_si(self.flow_units, demand.base_value, HydParam.Demand),
- 'pat': '',
- 'cat': cat }
+ E = { # noqa: N806
+ 'node': node,
+ 'base': from_si(
+ self.flow_units, demand.base_value, HydParam.Demand
+ ),
+ 'pat': '',
+ 'cat': cat,
+ }
if demand.pattern_name in wn.pattern_name_list:
E['pat'] = demand.pattern_name
- f.write(entry.format(E['node'], str(E['base']), E['pat'], E['cat']).encode('ascii'))
+ f.write(
+ entry.format(
+ E['node'], str(E['base']), E['pat'], E['cat']
+ ).encode('ascii')
+ )
f.write('\n'.encode('ascii'))
- ### Water Quality
+ # Water Quality
def _write_quality(self, f, wn):
f.write('[QUALITY]\n'.encode('ascii'))
entry = '{:10s} {:10s}\n'
- label = '{:10s} {:10s}\n'
+ label = '{:10s} {:10s}\n' # noqa: F841
nnodes = list(wn.nodes.keys())
# nnodes.sort()
for node_name in nnodes:
node = wn.nodes[node_name]
- if node._is_isolated==True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if node.initial_quality:
if wn.options.quality.mode == 'CHEMICAL':
- quality = from_si(self.flow_units, node.initial_quality, QualParam.Concentration, mass_units=self.mass_units)
+ quality = from_si(
+ self.flow_units,
+ node.initial_quality,
+ QualParam.Concentration,
+ mass_units=self.mass_units,
+ )
elif wn.options.quality.mode == 'AGE':
- quality = from_si(self.flow_units, node.initial_quality, QualParam.WaterAge)
+ quality = from_si(
+ self.flow_units, node.initial_quality, QualParam.WaterAge
+ )
else:
quality = node.initial_quality
f.write(entry.format(node_name, str(quality)).encode('ascii'))
f.write('\n'.encode('ascii'))
def _write_reactions(self, f, wn):
- f.write( '[REACTIONS]\n'.encode('ascii'))
- f.write(';Type Pipe/Tank Coefficient\n'.encode('ascii'))
+ f.write('[REACTIONS]\n'.encode('ascii'))
+ f.write(
+ ';Type Pipe/Tank Coefficient\n'.encode('ascii')
+ )
entry_int = ' {:s} {:s} {:d}\n'
entry_float = ' {:s} {:s} {:<10.4f}\n'
for tank_name, tank in wn.nodes(Tank):
- if tank._is_isolated==True: #Sina added this
+ if tank._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if tank.bulk_rxn_coeff is not None:
- f.write(entry_float.format('TANK',tank_name,
- from_si(self.flow_units,
- tank.bulk_rxn_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.quality.bulk_rxn_order)).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'TANK',
+ tank_name,
+ from_si(
+ self.flow_units,
+ tank.bulk_rxn_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.quality.bulk_rxn_order,
+ ),
+ ).encode('ascii')
+ )
for pipe_name, pipe in wn.links(Pipe):
- if pipe._is_isolated==True: #Sina added this
+ if pipe._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if pipe.bulk_rxn_coeff is not None:
- f.write(entry_float.format('BULK',pipe_name,
- from_si(self.flow_units,
- pipe.bulk_rxn_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.quality.bulk_rxn_order)).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'BULK',
+ pipe_name,
+ from_si(
+ self.flow_units,
+ pipe.bulk_rxn_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.quality.bulk_rxn_order,
+ ),
+ ).encode('ascii')
+ )
if pipe.wall_rxn_coeff is not None:
- f.write(entry_float.format('WALL',pipe_name,
- from_si(self.flow_units,
- pipe.wall_rxn_coeff,
- QualParam.WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.quality.wall_rxn_order)).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'WALL',
+ pipe_name,
+ from_si(
+ self.flow_units,
+ pipe.wall_rxn_coeff,
+ QualParam.WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.quality.wall_rxn_order,
+ ),
+ ).encode('ascii')
+ )
f.write('\n'.encode('ascii'))
-# f.write('[REACTIONS]\n'.encode('ascii')) # EPANET GUI puts this line in here
- f.write(entry_int.format('ORDER', 'BULK', int(wn.options.quality.bulk_rxn_order)).encode('ascii'))
- f.write(entry_int.format('ORDER', 'TANK', int(wn.options.quality.tank_rxn_order)).encode('ascii'))
- f.write(entry_int.format('ORDER', 'WALL', int(wn.options.quality.wall_rxn_order)).encode('ascii'))
- f.write(entry_float.format('GLOBAL','BULK',
- from_si(self.flow_units,
- wn.options.quality.bulk_rxn_coeff,
- QualParam.BulkReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.quality.bulk_rxn_order)).encode('ascii'))
- f.write(entry_float.format('GLOBAL','WALL',
- from_si(self.flow_units,
- wn.options.quality.wall_rxn_coeff,
- QualParam.WallReactionCoeff,
- mass_units=self.mass_units,
- reaction_order=wn.options.quality.wall_rxn_order)).encode('ascii'))
+ # f.write('[REACTIONS]\n'.encode('ascii')) # EPANET GUI puts this line in here
+ f.write(
+ entry_int.format(
+ 'ORDER', 'BULK', int(wn.options.quality.bulk_rxn_order)
+ ).encode('ascii')
+ )
+ f.write(
+ entry_int.format(
+ 'ORDER', 'TANK', int(wn.options.quality.tank_rxn_order)
+ ).encode('ascii')
+ )
+ f.write(
+ entry_int.format(
+ 'ORDER', 'WALL', int(wn.options.quality.wall_rxn_order)
+ ).encode('ascii')
+ )
+ f.write(
+ entry_float.format(
+ 'GLOBAL',
+ 'BULK',
+ from_si(
+ self.flow_units,
+ wn.options.quality.bulk_rxn_coeff,
+ QualParam.BulkReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.quality.bulk_rxn_order,
+ ),
+ ).encode('ascii')
+ )
+ f.write(
+ entry_float.format(
+ 'GLOBAL',
+ 'WALL',
+ from_si(
+ self.flow_units,
+ wn.options.quality.wall_rxn_coeff,
+ QualParam.WallReactionCoeff,
+ mass_units=self.mass_units,
+ reaction_order=wn.options.quality.wall_rxn_order,
+ ),
+ ).encode('ascii')
+ )
if wn.options.quality.limiting_potential is not None:
- f.write(entry_float.format('LIMITING','POTENTIAL',wn.options.quality.limiting_potential).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'LIMITING', 'POTENTIAL', wn.options.quality.limiting_potential
+ ).encode('ascii')
+ )
if wn.options.quality.roughness_correl is not None:
- f.write(entry_float.format('ROUGHNESS','CORRELATION',wn.options.quality.roughness_correl).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'ROUGHNESS', 'CORRELATION', wn.options.quality.roughness_correl
+ ).encode('ascii')
+ )
f.write('\n'.encode('ascii'))
def _write_sources(self, f, wn):
@@ -676,54 +909,86 @@ def _write_sources(self, f, wn):
entry = '{:10s} {:10s} {:10s} {:10s}\n'
label = '{:10s} {:10s} {:10s} {:10s}\n'
f.write(label.format(';Node', 'Type', 'Quality', 'Pattern').encode('ascii'))
- nsources = list(wn._sources.keys())
+ nsources = list(wn._sources.keys()) # noqa: SLF001
# nsources.sort()
for source_name in nsources:
- source = wn._sources[source_name]
- if source._is_isolated==True: #Sina added this
+ source = wn._sources[source_name] # noqa: SLF001
+ if source._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if source.source_type.upper() == 'MASS':
- strength = from_si(self.flow_units, source.strength_timeseries.base_value, QualParam.SourceMassInject, self.mass_units)
- else: # CONC, SETPOINT, FLOWPACED
- strength = from_si(self.flow_units, source.strength_timeseries.base_value, QualParam.Concentration, self.mass_units)
-
- E = {'node': source.node_name,
- 'type': source.source_type,
- 'quality': str(strength),
- 'pat': ''}
+ strength = from_si(
+ self.flow_units,
+ source.strength_timeseries.base_value,
+ QualParam.SourceMassInject,
+ self.mass_units,
+ )
+ else: # CONC, SETPOINT, FLOWPACED
+ strength = from_si(
+ self.flow_units,
+ source.strength_timeseries.base_value,
+ QualParam.Concentration,
+ self.mass_units,
+ )
+
+ E = { # noqa: N806
+ 'node': source.node_name,
+ 'type': source.source_type,
+ 'quality': str(strength),
+ 'pat': '',
+ }
if source.strength_timeseries.pattern_name is not None:
E['pat'] = source.strength_timeseries.pattern_name
- f.write(entry.format(E['node'], E['type'], str(E['quality']), E['pat']).encode('ascii'))
+ f.write(
+ entry.format(
+ E['node'], E['type'], str(E['quality']), E['pat']
+ ).encode('ascii')
+ )
f.write('\n'.encode('ascii'))
- def _write_mixing(self, f, wn):
+ def _write_mixing(self, f, wn): # noqa: PLR6301
f.write('[MIXING]\n'.encode('ascii'))
- f.write('{:20s} {:5s} {}\n'.format(';Tank ID', 'Model', 'Fraction').encode('ascii'))
+ f.write(
+ '{:20s} {:5s} {}\n'.format(';Tank ID', 'Model', 'Fraction').encode(
+ 'ascii'
+ )
+ )
lnames = list(wn.tank_name_list)
# lnames.sort()
for tank_name in lnames:
tank = wn.nodes[tank_name]
- if tank._is_isolated == True: #Sina added this
+ if tank._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- if tank._mix_model is not None:
- if tank._mix_model in [MixType.Mixed, MixType.Mix1, 0]:
- f.write(' {:19s} MIXED\n'.format(tank_name).encode('ascii'))
- elif tank._mix_model in [MixType.TwoComp, MixType.Mix2, '2comp', '2COMP', 1]:
- f.write(' {:19s} 2COMP {}\n'.format(tank_name, tank._mix_frac).encode('ascii'))
- elif tank._mix_model in [MixType.FIFO, 2]:
- f.write(' {:19s} FIFO\n'.format(tank_name).encode('ascii'))
- elif tank._mix_model in [MixType.LIFO, 3]:
- f.write(' {:19s} LIFO\n'.format(tank_name).encode('ascii'))
- elif isinstance(tank._mix_model, str) and tank._mix_frac is not None:
- f.write(' {:19s} {} {}\n'.format(tank_name, tank._mix_model, tank._mix_frac).encode('ascii'))
- elif isinstance(tank._mix_model, str):
- f.write(' {:19s} {}\n'.format(tank_name, tank._mix_model).encode('ascii'))
+ if tank._mix_model is not None: # noqa: SLF001
+ if tank._mix_model in [MixType.Mixed, MixType.Mix1, 0]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} MIXED\n'.encode('ascii'))
+ elif tank._mix_model in [ # noqa: PLR6201, SLF001
+ MixType.TwoComp,
+ MixType.Mix2,
+ '2comp',
+ '2COMP',
+ 1,
+ ]:
+ f.write(
+ f' {tank_name:19s} 2COMP {tank._mix_frac}\n'.encode('ascii') # noqa: SLF001
+ )
+ elif tank._mix_model in [MixType.FIFO, 2]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} FIFO\n'.encode('ascii'))
+ elif tank._mix_model in [MixType.LIFO, 3]: # noqa: PLR6201, SLF001
+ f.write(f' {tank_name:19s} LIFO\n'.encode('ascii'))
+ elif isinstance(tank._mix_model, str) and tank._mix_frac is not None: # noqa: SLF001
+ f.write(
+ f' {tank_name:19s} {tank._mix_model} {tank._mix_frac}\n'.encode( # noqa: SLF001
+ 'ascii'
+ )
+ )
+ elif isinstance(tank._mix_model, str): # noqa: SLF001
+ f.write(f' {tank_name:19s} {tank._mix_model}\n'.encode('ascii')) # noqa: SLF001
else:
- logger.warning('Unknown mixing model: %s', tank._mix_model)
+ logger.warning('Unknown mixing model: %s', tank._mix_model) # noqa: SLF001
f.write('\n'.encode('ascii'))
- ### Options and Reporting
+ # Options and Reporting
def _write_options(self, f, wn):
f.write('[OPTIONS]\n'.encode('ascii'))
@@ -731,65 +996,165 @@ def _write_options(self, f, wn):
entry_float = '{:20s} {:.11g}\n'
f.write(entry_string.format('UNITS', self.flow_units.name).encode('ascii'))
- f.write(entry_string.format('HEADLOSS', wn.options.hydraulic.headloss).encode('ascii'))
-
- f.write(entry_float.format('SPECIFIC GRAVITY', wn.options.hydraulic.specific_gravity).encode('ascii'))
-
- f.write(entry_float.format('VISCOSITY', wn.options.hydraulic.viscosity).encode('ascii'))
-
- f.write(entry_float.format('TRIALS', wn.options.solver.trials).encode('ascii'))
-
- f.write(entry_float.format('ACCURACY', wn.options.solver.accuracy).encode('ascii'))
-
- f.write(entry_float.format('CHECKFREQ', wn.options.solver.checkfreq).encode('ascii'))
-
- f.write(entry_float.format('MAXCHECK', wn.options.solver.maxcheck).encode('ascii'))
+ f.write(
+ entry_string.format('HEADLOSS', wn.options.hydraulic.headloss).encode(
+ 'ascii'
+ )
+ )
+
+ f.write(
+ entry_float.format(
+ 'SPECIFIC GRAVITY', wn.options.hydraulic.specific_gravity
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format('VISCOSITY', wn.options.hydraulic.viscosity).encode(
+ 'ascii'
+ )
+ )
+
+ f.write(
+ entry_float.format('TRIALS', wn.options.solver.trials).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format('ACCURACY', wn.options.solver.accuracy).encode(
+ 'ascii'
+ )
+ )
+
+ f.write(
+ entry_float.format('CHECKFREQ', wn.options.solver.checkfreq).encode(
+ 'ascii'
+ )
+ )
+
+ f.write(
+ entry_float.format('MAXCHECK', wn.options.solver.maxcheck).encode(
+ 'ascii'
+ )
+ )
if wn.options.solver.damplimit != 0:
- f.write(entry_float.format('DAMPLIMIT', wn.options.solver.damplimit).encode('ascii'))
+ f.write(
+ entry_float.format('DAMPLIMIT', wn.options.solver.damplimit).encode(
+ 'ascii'
+ )
+ )
if wn.options.solver.unbalanced_value is None:
- f.write(entry_string.format('UNBALANCED', wn.options.solver.unbalanced).encode('ascii'))
+ f.write(
+ entry_string.format(
+ 'UNBALANCED', wn.options.solver.unbalanced
+ ).encode('ascii')
+ )
else:
- f.write('{:20s} {:s} {:d}\n'.format('UNBALANCED', wn.options.solver.unbalanced, wn.options.solver.unbalanced_value).encode('ascii'))
-
- #Sina Added here
+ f.write(
+ '{:20s} {:s} {:d}\n'.format(
+ 'UNBALANCED',
+ wn.options.solver.unbalanced,
+ wn.options.solver.unbalanced_value,
+ ).encode('ascii')
+ )
+
+ # Sina Added here
if wn.options.hydraulic.pattern is not None:
- f.write(entry_string.format('PATTERN', wn.options.hydraulic.pattern).encode('ascii'))
+ f.write(
+ entry_string.format('PATTERN', wn.options.hydraulic.pattern).encode(
+ 'ascii'
+ )
+ )
else:
f.write(entry_string.format('PATTERN', '1').encode('ascii'))
- f.write(entry_float.format('DEMAND MULTIPLIER', wn.options.hydraulic.demand_multiplier).encode('ascii'))
-
- f.write(entry_string.format('DEMAND MODEL', wn.options.hydraulic.demand_model).encode('ascii'))
-
- f.write(entry_float.format('MINIMUM PRESSURE', wn.options.hydraulic.minimum_pressure).encode('ascii'))
-
- f.write(entry_float.format('REQUIRED PRESSURE', wn.options.hydraulic.required_pressure).encode('ascii'))
-
- f.write(entry_float.format('PRESSURE EXPONENT', wn.options.hydraulic.pressure_exponent).encode('ascii'))
-
- f.write(entry_float.format('EMITTER EXPONENT', wn.options.hydraulic.emitter_exponent).encode('ascii'))
-
- if wn.options.quality.mode.upper() in ['NONE', 'AGE']:
- f.write(entry_string.format('QUALITY', wn.options.quality.mode).encode('ascii'))
- elif wn.options.quality.mode.upper() in ['TRACE']:
- f.write('{:20s} {} {}\n'.format('QUALITY', wn.options.quality.mode, wn.options.quality.trace_node).encode('ascii'))
+ f.write(
+ entry_float.format(
+ 'DEMAND MULTIPLIER', wn.options.hydraulic.demand_multiplier
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_string.format(
+ 'DEMAND MODEL', wn.options.hydraulic.demand_model
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format(
+ 'MINIMUM PRESSURE', wn.options.hydraulic.minimum_pressure
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format(
+ 'REQUIRED PRESSURE', wn.options.hydraulic.required_pressure
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format(
+ 'PRESSURE EXPONENT', wn.options.hydraulic.pressure_exponent
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format(
+ 'EMITTER EXPONENT', wn.options.hydraulic.emitter_exponent
+ ).encode('ascii')
+ )
+
+ if wn.options.quality.mode.upper() in ['NONE', 'AGE']: # noqa: PLR6201
+ f.write(
+ entry_string.format('QUALITY', wn.options.quality.mode).encode(
+ 'ascii'
+ )
+ )
+ elif wn.options.quality.mode.upper() == 'TRACE':
+ f.write(
+ '{:20s} {} {}\n'.format(
+ 'QUALITY', wn.options.quality.mode, wn.options.quality.trace_node
+ ).encode('ascii')
+ )
else:
- f.write('{:20s} {} {}\n'.format('QUALITY', wn.options.quality.chemical_name, wn.options.quality.wq_units).encode('ascii'))
-
- f.write(entry_float.format('DIFFUSIVITY', wn.options.quality.diffusivity).encode('ascii'))
-
- f.write(entry_float.format('TOLERANCE', wn.options.solver.tolerance).encode('ascii'))
+ f.write(
+ '{:20s} {} {}\n'.format(
+ 'QUALITY',
+ wn.options.quality.chemical_name,
+ wn.options.quality.wq_units,
+ ).encode('ascii')
+ )
+
+ f.write(
+ entry_float.format('DIFFUSIVITY', wn.options.quality.diffusivity).encode(
+ 'ascii'
+ )
+ )
+
+ f.write(
+ entry_float.format('TOLERANCE', wn.options.solver.tolerance).encode(
+ 'ascii'
+ )
+ )
if wn.options.hydraulic.hydraulics is not None:
- f.write('{:20s} {:s} {:<30s}\n'.format('HYDRAULICS', wn.options.hydraulic.hydraulics, wn.options.hydraulic.hydraulics_filename).encode('ascii'))
+ f.write(
+ '{:20s} {:s} {:<30s}\n'.format(
+ 'HYDRAULICS',
+ wn.options.hydraulic.hydraulics,
+ wn.options.hydraulic.hydraulics_filename,
+ ).encode('ascii')
+ )
if wn.options.graphics.map_filename is not None:
- f.write(entry_string.format('MAP', wn.options.graphics.map_filename).encode('ascii'))
+ f.write(
+ entry_string.format('MAP', wn.options.graphics.map_filename).encode(
+ 'ascii'
+ )
+ )
f.write('\n'.encode('ascii'))
- def _write_times(self, f, wn):
+ def _write_times(self, f, wn): # noqa: PLR6301
f.write('[TIMES]\n'.encode('ascii'))
entry = '{:20s} {:10s}\n'
time_entry = '{:20s} {:02d}:{:02d}:{:02d}\n'
@@ -798,7 +1163,9 @@ def _write_times(self, f, wn):
f.write(time_entry.format('DURATION', hrs, mm, sec).encode('ascii'))
hrs, mm, sec = _sec_to_string(wn.options.time.hydraulic_timestep)
- f.write(time_entry.format('HYDRAULIC TIMESTEP', hrs, mm, sec).encode('ascii'))
+ f.write(
+ time_entry.format('HYDRAULIC TIMESTEP', hrs, mm, sec).encode('ascii')
+ )
hrs, mm, sec = _sec_to_string(wn.options.time.quality_timestep)
f.write(time_entry.format('QUALITY TIMESTEP', hrs, mm, sec).encode('ascii'))
@@ -816,38 +1183,44 @@ def _write_times(self, f, wn):
f.write(time_entry.format('REPORT START', hrs, mm, sec).encode('ascii'))
hrs, mm, sec = _sec_to_string(wn.options.time.start_clocktime)
-
- #Sina
- day = int(hrs/24)
- hrs -=day*24
-
- if hrs < 12:
+
+ # Sina
+ day = int(hrs / 24)
+ hrs -= day * 24
+
+ if hrs < 12: # noqa: PLR2004
time_format = ' AM'
else:
hrs -= 12
time_format = ' PM'
- f.write('{:20s} {:02d}:{:02d}:{:02d}{:s}\n'.format('START CLOCKTIME', hrs, mm, sec, time_format).encode('ascii'))
+ f.write(
+ '{:20s} {:02d}:{:02d}:{:02d}{:s}\n'.format(
+ 'START CLOCKTIME', hrs, mm, sec, time_format
+ ).encode('ascii')
+ )
hrs, mm, sec = _sec_to_string(wn.options.time.rule_timestep)
- ### TODO: RULE TIMESTEP is not written?!
+ # TODO: RULE TIMESTEP is not written?! # noqa: TD002
# f.write(time_entry.format('RULE TIMESTEP', hrs, mm, int(sec)).encode('ascii'))
- f.write(entry.format('STATISTIC', wn.options.results.statistic).encode('ascii'))
+ f.write(
+ entry.format('STATISTIC', wn.options.results.statistic).encode('ascii')
+ )
f.write('\n'.encode('ascii'))
- def _write_coordinates(self, f, wn):
+ def _write_coordinates(self, f, wn): # noqa: PLR6301
f.write('[COORDINATES]\n'.encode('ascii'))
entry = '{:10s} {:20.9f} {:20.9f}\n'
label = '{:10s} {:10s} {:10s}\n'
f.write(label.format(';Node', 'X-Coord', 'Y-Coord').encode('ascii'))
for name, node in wn.nodes():
- if node._is_isolated == True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
val = node.coordinates
f.write(entry.format(name, val[0], val[1]).encode('ascii'))
f.write('\n'.encode('ascii'))
- def _write_vertices(self, f, wn):
+ def _write_vertices(self, f, wn): # noqa: PLR6301
f.write('[VERTICES]\n'.encode('ascii'))
entry = '{:10s} {:20.9f} {:20.9f}\n'
label = '{:10s} {:10s} {:10s}\n'
@@ -856,13 +1229,13 @@ def _write_vertices(self, f, wn):
# lnames.sort()
for pipe_name in lnames:
pipe = wn.links[pipe_name]
- if pipe._is_isolated == True: #Sina added this
+ if pipe._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
- for vert in pipe._vertices:
+ for vert in pipe._vertices: # noqa: SLF001
f.write(entry.format(pipe_name, vert[0], vert[1]).encode('ascii'))
f.write('\n'.encode('ascii'))
- def _write_tags(self, f, wn):
+ def _write_tags(self, f, wn): # noqa: PLR6301
f.write('[TAGS]\n'.encode('ascii'))
entry = '{:10s} {:10s} {:10s}\n'
label = '{:10s} {:10s} {:10s}\n'
@@ -871,7 +1244,7 @@ def _write_tags(self, f, wn):
# nnodes.sort()
for node_name in nnodes:
node = wn.nodes[node_name]
- if node._is_isolated == True: #Sina added this
+ if node._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if node.tag:
f.write(entry.format('NODE', node_name, node.tag).encode('ascii'))
@@ -879,25 +1252,25 @@ def _write_tags(self, f, wn):
nlinks.sort()
for link_name in nlinks:
link = wn.links[link_name]
- if link._is_isolated == True: #Sina added this
+ if link._is_isolated == True: # Sina added this # noqa: SLF001, E712
continue
if link.tag:
f.write(entry.format('LINK', link_name, link.tag).encode('ascii'))
f.write('\n'.encode('ascii'))
- ### End of File
+ # End of File
+
class BinFile(wntrfr.epanet.io.BinFile):
- """
- EPANET binary output file reader class.
-
+ """EPANET binary output file reader class.
+
This class provides read functionality for EPANET binary output files.
-
+
Parameters
----------
results_type : list of :class:`~wntrfr.epanet.util.ResultType`, default=None
This parameter is *only* active when using a subclass of the BinFile that implements
- a custom reader or writer.
+ a custom reader or writer.
If ``None``, then all results will be saved (node quality, demand, link flow, etc.).
Otherwise, a list of result types can be passed to limit the memory used.
network : bool, default=False
@@ -909,21 +1282,28 @@ class BinFile(wntrfr.epanet.io.BinFile):
Save the statistics lines (different from the stats flag in the inp file) that are
automatically calculated regarding hydraulic conditions.
convert_status : bool, default=True
- Convert the EPANET link status (8 values) to simpler WNTR status (3 values). By
+ Convert the EPANET link status (8 values) to simpler WNTR status (3 values). By
default, this is done, and the encoded-cause status values are converted simple state
values, instead.
Returns
- ----------
+ -------
:class:`~wntrfr.sim.results.SimulationResults`
A WNTR results object will be created and added to the instance after read.
"""
- def __init__(self, result_types=None, network=False, energy=False, statistics=False,
- convert_status=True):
+
+ def __init__(
+ self,
+ result_types=None, # noqa: ARG002
+ network=False, # noqa: FBT002, ARG002
+ energy=False, # noqa: FBT002, ARG002
+ statistics=False, # noqa: FBT002, ARG002
+ convert_status=True, # noqa: FBT002, ARG002
+ ):
super().__init__()
- def read(self, filename, custom_handlers=False, start_time = None):
+ def read(self, filename, custom_handlers=False, start_time=None): # noqa: C901, FBT002, PLR0914, PLR0915
"""Read a binary file and create a results object.
Parameters
@@ -931,12 +1311,13 @@ def read(self, filename, custom_handlers=False, start_time = None):
filename : str
An EPANET BIN output file
custom_handlers : bool, optional
- If true, then the the custom, by-line handlers will be used. (:func:`~save_ep_line`,
+ If true, then the the custom, by-line handlers will be used. (:func:`~save_ep_line`,
:func:`~setup_ep_results`, :func:`~finalize_save`, etc.) Otherwise read will use
a faster, all-at-once reader that reads all results.
start_time : int
If the simulation is interval based, then start_time can identify the time passed after
the last simulation. Start_time will be added to all th timings in the result.
+
Returns
-------
object
@@ -947,17 +1328,17 @@ def read(self, filename, custom_handlers=False, start_time = None):
to change how it saves the results. Specifically, overload :func:`~setup_ep_results`,
:func:`~save_ep_line` and :func:`~finalize_save` to change how extended period
simulation results in a different format (such as directly to a file or database).
-
+
"""
self.results = wntrfr.sim.SimulationResults()
logger.debug(start_time)
- if start_time == None:
+ if start_time == None: # noqa: E711
start_time = 0
- logger.debug('Read binary EPANET data from %s',filename)
- dt_str = '|S{}'.format(self.idlen)
- with open(filename, 'rb') as fin:
+ logger.debug('Read binary EPANET data from %s', filename)
+ dt_str = f'|S{self.idlen}'
+ with open(filename, 'rb') as fin: # noqa: PTH123
ftype = self.ftype
- idlen = self.idlen
+ idlen = self.idlen # noqa: F841
logger.debug('... read prolog information ...')
prolog = np.fromfile(fin, dtype=np.int32, count=15)
magic1 = prolog[0]
@@ -975,26 +1356,42 @@ def read(self, filename, custom_handlers=False, start_time = None):
reportstart = prolog[12]
reportstep = prolog[13]
duration = prolog[14]
- logger.debug('EPANET/Toolkit version %d',version)
- logger.debug('Nodes: %d; Tanks/Resrv: %d Links: %d; Pumps: %d; Valves: %d',
- nnodes, ntanks, nlinks, npumps, nvalve)
- logger.debug('WQ opt: %s; Trace Node: %s; Flow Units %s; Pressure Units %s',
- wqopt, srctrace, flowunits, presunits)
- logger.debug('Statistics: %s; Report Start %d, step %d; Duration=%d sec',
- statsflag, reportstart, reportstep, duration)
+ logger.debug('EPANET/Toolkit version %d', version)
+ logger.debug(
+ 'Nodes: %d; Tanks/Resrv: %d Links: %d; Pumps: %d; Valves: %d',
+ nnodes,
+ ntanks,
+ nlinks,
+ npumps,
+ nvalve,
+ )
+ logger.debug(
+ 'WQ opt: %s; Trace Node: %s; Flow Units %s; Pressure Units %s',
+ wqopt,
+ srctrace,
+ flowunits,
+ presunits,
+ )
+ logger.debug(
+ 'Statistics: %s; Report Start %d, step %d; Duration=%d sec',
+ statsflag,
+ reportstart,
+ reportstep,
+ duration,
+ )
# Ignore the title lines
np.fromfile(fin, dtype=np.uint8, count=240)
inpfile = np.fromfile(fin, dtype=np.uint8, count=260)
rptfile = np.fromfile(fin, dtype=np.uint8, count=260)
chemical = str(np.fromfile(fin, dtype=dt_str, count=1)[0])
-# wqunits = ''.join([chr(f) for f in np.fromfile(fin, dtype=np.uint8, count=idlen) if f!=0 ])
+ # wqunits = ''.join([chr(f) for f in np.fromfile(fin, dtype=np.uint8, count=idlen) if f!=0 ])
wqunits = str(np.fromfile(fin, dtype=dt_str, count=1)[0])
- mass = wqunits.split('/',1)[0]
- if mass in ['mg', 'ug', u'mg', u'ug']:
+ mass = wqunits.split('/', 1)[0]
+ if mass in ['mg', 'ug', 'mg', 'ug']: # noqa: PLR6201
massunits = MassUnits[mass]
else:
- massunits = MassUnits.mg
+ massunits = MassUnits.mg
self.flow_units = flowunits
self.pres_units = presunits
self.quality_type = wqopt
@@ -1013,18 +1410,26 @@ def read(self, filename, custom_handlers=False, start_time = None):
self.rpt_file = rptfile
nodenames = []
linknames = []
- nodenames = np.array(np.fromfile(fin, dtype=dt_str, count=nnodes), dtype=str).tolist()
- linknames = np.array(np.fromfile(fin, dtype=dt_str, count=nlinks), dtype=str).tolist()
+ nodenames = np.array(
+ np.fromfile(fin, dtype=dt_str, count=nnodes), dtype=str
+ ).tolist()
+ linknames = np.array(
+ np.fromfile(fin, dtype=dt_str, count=nlinks), dtype=str
+ ).tolist()
self.node_names = nodenames
self.link_names = linknames
- linkstart = np.array(np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int)
- linkend = np.array(np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int)
+ linkstart = np.array( # noqa: F841
+ np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int
+ )
+ linkend = np.array( # noqa: F841
+ np.fromfile(fin, dtype=np.int32, count=nlinks), dtype=int
+ )
linktype = np.fromfile(fin, dtype=np.int32, count=nlinks)
- tankidxs = np.fromfile(fin, dtype=np.int32, count=ntanks)
- tankarea = np.fromfile(fin, dtype=np.dtype(ftype), count=ntanks)
- elevation = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
- linklen = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- diameter = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
+ tankidxs = np.fromfile(fin, dtype=np.int32, count=ntanks) # noqa: F841
+ tankarea = np.fromfile(fin, dtype=np.dtype(ftype), count=ntanks) # noqa: F841
+ elevation = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes) # noqa: F841
+ linklen = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks) # noqa: F841
+ diameter = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks) # noqa: F841
"""
self.save_network_desc_line('link_start', linkstart)
self.save_network_desc_line('link_end', linkend)
@@ -1036,18 +1441,25 @@ def read(self, filename, custom_handlers=False, start_time = None):
self.save_network_desc_line('link_diameter', diameter)
"""
logger.debug('... read energy data ...')
- for i in range(npumps):
- pidx = int(np.fromfile(fin,dtype=np.int32, count=1))
+ for i in range(npumps): # noqa: B007
+ pidx = int(np.fromfile(fin, dtype=np.int32, count=1))
energy = np.fromfile(fin, dtype=np.dtype(ftype), count=6)
- self.save_energy_line(pidx, linknames[pidx-1], energy)
+ self.save_energy_line(pidx, linknames[pidx - 1], energy)
peakenergy = np.fromfile(fin, dtype=np.dtype(ftype), count=1)
self.peak_energy = peakenergy
logger.debug('... read EP simulation data ...')
- reporttimes = np.arange(reportstart, duration+reportstep, reportstep) + start_time
+ reporttimes = (
+ np.arange(reportstart, duration + reportstep, reportstep)
+ + start_time
+ )
nrptsteps = len(reporttimes)
- statsN = nrptsteps
- if statsflag in [StatisticsType.Maximum, StatisticsType.Minimum, StatisticsType.Range]:
+ statsN = nrptsteps # noqa: N806, F841
+ if statsflag in [ # noqa: PLR6201
+ StatisticsType.Maximum,
+ StatisticsType.Minimum,
+ StatisticsType.Range,
+ ]:
nrptsteps = 1
reporttimes = [reportstart + reportstep]
self.num_periods = nrptsteps
@@ -1097,23 +1509,43 @@ def read(self, filename, custom_handlers=False, start_time = None):
"""
if custom_handlers is True:
logger.debug('... set up results object ...')
- #self.setup_ep_results(reporttimes, nodenames, linknames)
- #print(nodenames[5712]+' '+nodenames[5717]+' '+nodenames[5718]+' ')
+ # self.setup_ep_results(reporttimes, nodenames, linknames)
+ # print(nodenames[5712]+' '+nodenames[5717]+' '+nodenames[5718]+' ')
for ts in range(nrptsteps):
try:
- demand = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
- #print(repr(demand[5712])+' '+repr(demand[5717])+' '+repr(demand[5718]))
+ demand = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nnodes
+ )
+ # print(repr(demand[5712])+' '+repr(demand[5717])+' '+repr(demand[5718]))
head = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
- pressure = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
- quality = np.fromfile(fin, dtype=np.dtype(ftype), count=nnodes)
+ pressure = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nnodes
+ )
+ quality = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nnodes
+ )
flow = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- velocity = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- headloss = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- linkquality = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- linkstatus = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- linksetting = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- reactionrate = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
- frictionfactor = np.fromfile(fin, dtype=np.dtype(ftype), count=nlinks)
+ velocity = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ headloss = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ linkquality = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ linkstatus = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ linksetting = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ reactionrate = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
+ frictionfactor = np.fromfile(
+ fin, dtype=np.dtype(ftype), count=nlinks
+ )
self.save_ep_line(ts, ResultType.demand, demand)
self.save_ep_line(ts, ResultType.head, head)
self.save_ep_line(ts, ResultType.pressure, pressure)
@@ -1125,71 +1557,132 @@ def read(self, filename, custom_handlers=False, start_time = None):
self.save_ep_line(ts, ResultType.status, linkstatus)
self.save_ep_line(ts, ResultType.setting, linksetting)
self.save_ep_line(ts, ResultType.rxnrate, reactionrate)
- self.save_ep_line(ts, ResultType.frictionfact, frictionfactor)
- except Exception as e:
- logger.exception('Error reading or writing EP line: %s', e)
- logger.warning('Missing results from report period %d',ts)
+ self.save_ep_line(
+ ts, ResultType.frictionfact, frictionfactor
+ )
+ except Exception as e: # noqa: PERF203
+ logger.exception('Error reading or writing EP line: %s', e) # noqa: TRY401
+ logger.warning('Missing results from report period %d', ts)
else:
-# type_list = 4*nnodes*['node'] + 8*nlinks*['link']
- name_list = nodenames*4 + linknames*8
- valuetype = nnodes*['demand']+nnodes*['head']+nnodes*['pressure']+nnodes*['quality'] + nlinks*['flow']+nlinks*['velocity']+nlinks*['headloss']+nlinks*['linkquality']+nlinks*['linkstatus']+nlinks*['linksetting']+nlinks*['reactionrate']+nlinks*['frictionfactor']
-
-# tuples = zip(type_list, valuetype, name_list)
+ # type_list = 4*nnodes*['node'] + 8*nlinks*['link']
+ name_list = nodenames * 4 + linknames * 8
+ valuetype = (
+ nnodes * ['demand']
+ + nnodes * ['head']
+ + nnodes * ['pressure']
+ + nnodes * ['quality']
+ + nlinks * ['flow']
+ + nlinks * ['velocity']
+ + nlinks * ['headloss']
+ + nlinks * ['linkquality']
+ + nlinks * ['linkstatus']
+ + nlinks * ['linksetting']
+ + nlinks * ['reactionrate']
+ + nlinks * ['frictionfactor']
+ )
+
+ # tuples = zip(type_list, valuetype, name_list)
tuples = list(zip(valuetype, name_list))
-# tuples = [(valuetype[i], v) for i, v in enumerate(name_list)]
- index = pd.MultiIndex.from_tuples(tuples, names=['value','name'])
+ # tuples = [(valuetype[i], v) for i, v in enumerate(name_list)]
+ index = pd.MultiIndex.from_tuples(tuples, names=['value', 'name'])
try:
- data = np.fromfile(fin, dtype = np.dtype(ftype), count = (4*nnodes+8*nlinks)*nrptsteps)
- data = np.reshape(data, (nrptsteps, (4*nnodes+8*nlinks)))
+ data = np.fromfile(
+ fin,
+ dtype=np.dtype(ftype),
+ count=(4 * nnodes + 8 * nlinks) * nrptsteps,
+ )
+ data = np.reshape(data, (nrptsteps, (4 * nnodes + 8 * nlinks)))
except Exception as e:
- logger.exception('Failed to process file: %s', e)
-
- df = pd.DataFrame(data.transpose(), index =index, columns = reporttimes)
- df = df.transpose()
-
+ logger.exception('Failed to process file: %s', e) # noqa: TRY401
+
+ df = pd.DataFrame(data.transpose(), index=index, columns=reporttimes) # noqa: PD901
+ df = df.transpose() # noqa: PD901
+
self.results.node = {}
self.results.link = {}
self.results.network_name = self.inp_file
-
+
# Node Results
- self.results.node['demand'] = HydParam.Demand._to_si(self.flow_units, df['demand'])
- self.results.node['head'] = HydParam.HydraulicHead._to_si(self.flow_units, df['head'])
- self.results.node['pressure'] = HydParam.Pressure._to_si(self.flow_units, df['pressure'])
+ self.results.node['demand'] = HydParam.Demand._to_si( # noqa: SLF001
+ self.flow_units, df['demand']
+ )
+ self.results.node['head'] = HydParam.HydraulicHead._to_si( # noqa: SLF001
+ self.flow_units, df['head']
+ )
+ self.results.node['pressure'] = HydParam.Pressure._to_si( # noqa: SLF001
+ self.flow_units, df['pressure']
+ )
# Water Quality Results (node and link)
if self.quality_type is QualType.Chem:
- self.results.node['quality'] = QualParam.Concentration._to_si(self.flow_units, df['quality'], mass_units=self.mass_units)
- self.results.link['linkquality'] = QualParam.Concentration._to_si(self.flow_units, df['linkquality'], mass_units=self.mass_units)
+ self.results.node['quality'] = QualParam.Concentration._to_si( # noqa: SLF001
+ self.flow_units, df['quality'], mass_units=self.mass_units
+ )
+ self.results.link['linkquality'] = (
+ QualParam.Concentration._to_si( # noqa: SLF001
+ self.flow_units,
+ df['linkquality'],
+ mass_units=self.mass_units,
+ )
+ )
elif self.quality_type is QualType.Age:
- self.results.node['quality'] = QualParam.WaterAge._to_si(self.flow_units, df['quality'], mass_units=self.mass_units)
- self.results.link['linkquality'] = QualParam.WaterAge._to_si(self.flow_units, df['linkquality'], mass_units=self.mass_units)
+ self.results.node['quality'] = QualParam.WaterAge._to_si( # noqa: SLF001
+ self.flow_units, df['quality'], mass_units=self.mass_units
+ )
+ self.results.link['linkquality'] = QualParam.WaterAge._to_si( # noqa: SLF001
+ self.flow_units,
+ df['linkquality'],
+ mass_units=self.mass_units,
+ )
else:
self.results.node['quality'] = df['quality']
self.results.link['linkquality'] = df['linkquality']
# Link Results
- self.results.link['flowrate'] = HydParam.Flow._to_si(self.flow_units, df['flow'])
+ self.results.link['flowrate'] = HydParam.Flow._to_si( # noqa: SLF001
+ self.flow_units, df['flow']
+ )
self.results.link['headloss'] = df['headloss'] # Unit is per 1000
- self.results.link['velocity'] = HydParam.Velocity._to_si(self.flow_units, df['velocity'])
-
-# self.results.link['status'] = df['linkstatus']
+ self.results.link['velocity'] = HydParam.Velocity._to_si( # noqa: SLF001
+ self.flow_units, df['velocity']
+ )
+
+ # self.results.link['status'] = df['linkstatus']
status = np.array(df['linkstatus'])
if self.convert_status:
- status[status <= 2] = 0
- status[status == 3] = 1
- status[status >= 5] = 1
- status[status == 4] = 2
- self.results.link['status'] = pd.DataFrame(data=status, columns=linknames, index=reporttimes)
-
+ status[status <= 2] = 0 # noqa: PLR2004
+ status[status == 3] = 1 # noqa: PLR2004
+ status[status >= 5] = 1 # noqa: PLR2004
+ status[status == 4] = 2 # noqa: PLR2004
+ self.results.link['status'] = pd.DataFrame(
+ data=status, columns=linknames, index=reporttimes
+ )
+
settings = np.array(df['linksetting'])
- settings[:, linktype == EN.PRV] = to_si(self.flow_units, settings[:, linktype == EN.PRV], HydParam.Pressure)
- settings[:, linktype == EN.PSV] = to_si(self.flow_units, settings[:, linktype == EN.PSV], HydParam.Pressure)
- settings[:, linktype == EN.PBV] = to_si(self.flow_units, settings[:, linktype == EN.PBV], HydParam.Pressure)
- settings[:, linktype == EN.FCV] = to_si(self.flow_units, settings[:, linktype == EN.FCV], HydParam.Flow)
- self.results.link['setting'] = pd.DataFrame(data=settings, columns=linknames, index=reporttimes)
+ settings[:, linktype == EN.PRV] = to_si(
+ self.flow_units,
+ settings[:, linktype == EN.PRV],
+ HydParam.Pressure,
+ )
+ settings[:, linktype == EN.PSV] = to_si(
+ self.flow_units,
+ settings[:, linktype == EN.PSV],
+ HydParam.Pressure,
+ )
+ settings[:, linktype == EN.PBV] = to_si(
+ self.flow_units,
+ settings[:, linktype == EN.PBV],
+ HydParam.Pressure,
+ )
+ settings[:, linktype == EN.FCV] = to_si(
+ self.flow_units, settings[:, linktype == EN.FCV], HydParam.Flow
+ )
+ self.results.link['setting'] = pd.DataFrame(
+ data=settings, columns=linknames, index=reporttimes
+ )
self.results.link['frictionfact'] = df['frictionfactor']
self.results.link['rxnrate'] = df['reactionrate']
-
+
logger.debug('... read epilog ...')
# Read the averages and then the number of periods for checks
averages = np.fromfile(fin, dtype=np.dtype(ftype), count=4)
@@ -1198,11 +1691,13 @@ def read(self, filename, custom_handlers=False, start_time = None):
warnflag = np.fromfile(fin, dtype=np.int32, count=1)
magic2 = np.fromfile(fin, dtype=np.int32, count=1)
if magic1 != magic2:
- logger.critical('The magic number did not match -- binary incomplete or incorrectly read. If you believe this file IS complete, please try a different float type. Current type is "%s"',ftype)
- #print numperiods, warnflag, magic
+ logger.critical(
+ 'The magic number did not match -- binary incomplete or incorrectly read. If you believe this file IS complete, please try a different float type. Current type is "%s"',
+ ftype,
+ )
+ # print numperiods, warnflag, magic
if warnflag != 0:
logger.warning('Warnings were issued during simulation')
- self.finalize_save(magic1==magic2, warnflag)
-
- return self.results
+ self.finalize_save(magic1 == magic2, warnflag)
+ return self.results
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/results.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/results.py
index 02baee04a..2884ebb3b 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/results.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/results.py
@@ -1,10 +1,8 @@
-from wntrfr.sim.results import SimulationResults
+from wntrfr.sim.results import SimulationResults # noqa: CPY001, D100
class SimulationResults(SimulationResults):
- """
- Water network simulation results class.
- """
+ """Water network simulation results class."""
def __init__(self):
super().__init__()
diff --git a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/toolkit.py b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/toolkit.py
index 7433a570b..192375a10 100644
--- a/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/toolkit.py
+++ b/modules/systemPerformance/REWET/REWET/EnhancedWNTR/sim/toolkit.py
@@ -1,78 +1,89 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed May 26 16:11:36 2021
+"""Created on Wed May 26 16:11:36 2021
@author: snaeimi
-"""
-import wntrfr.epanet.toolkit
-import numpy as np
+""" # noqa: CPY001, D400
+
import ctypes
-import os, sys
-from pkg_resources import resource_filename
+import logging
+import os
import platform
+import sys
+
+import numpy as np
+import wntrfr.epanet.toolkit
+from pkg_resources import resource_filename
-import logging
logger = logging.getLogger(__name__)
-class EpanetException(Exception):
+
+class EpanetException(Exception): # noqa: N818, D101
pass
-class ENepanet(wntrfr.epanet.toolkit.ENepanet):
- def __init__(self, inpfile='', rptfile='', binfile='', changed_epanet=False, version=2.2):
- if changed_epanet==False or changed_epanet==True:
- self.changed_epanet=changed_epanet
+
+class ENepanet(wntrfr.epanet.toolkit.ENepanet): # noqa: D101
+ def __init__( # noqa: C901
+ self,
+ inpfile='',
+ rptfile='',
+ binfile='',
+ changed_epanet=False, # noqa: FBT002
+ version=2.2,
+ ):
+ if changed_epanet == False or changed_epanet == True: # noqa: E712, PLR1714
+ self.changed_epanet = changed_epanet
else:
- raise ValueError("changed_epanet must be a boolean value")
-
- if changed_epanet==False:
- try:
+ raise ValueError('changed_epanet must be a boolean value') # noqa: EM101, TRY003
+
+ if changed_epanet == False: # noqa: E712
+ try: # noqa: SIM105
super().__init__(inpfile, rptfile, binfile, version=version)
- except:
- pass # to add robustness for the time when for the WNTR
- #cannot load the umodified DLLs for any reason
+ except: # noqa: S110, E722
+ pass # to add robustness for the time when for the WNTR
+ # cannot load the umodified DLLs for any reason
else:
-
- if float(version) != 2.2:
- raise ValueError("EPANET version must be 2.2 when using tegh changed version")
-
- elif float(version) == 2.2:
- libnames = ["epanet22_mod", "epanet22_win32_mod"]
- if "64" in platform.machine():
- libnames.insert(0, "epanet22_amd64_mod")
+ if float(version) != 2.2: # noqa: PLR2004
+ raise ValueError( # noqa: TRY003
+ 'EPANET version must be 2.2 when using tegh changed version' # noqa: EM101
+ )
+
+ elif float(version) == 2.2: # noqa: RET506, PLR2004
+ libnames = ['epanet22_mod', 'epanet22_win32_mod']
+ if '64' in platform.machine():
+ libnames.insert(0, 'epanet22_amd64_mod')
for lib in libnames:
try:
- if os.name in ["nt", "dos"]:
+ if os.name in ['nt', 'dos']: # noqa: PLR6201
libepanet = resource_filename(
- __name__, "Windows/%s.dll" % lib
+ __name__,
+ 'Windows/%s.dll' % lib, # noqa: UP031
)
self.ENlib = ctypes.windll.LoadLibrary(libepanet)
- elif sys.platform in ["darwin"]:
+ elif sys.platform == 'darwin':
libepanet = resource_filename(
- __name__, "Darwin/lib%s.dylib" % lib
+ __name__,
+ 'Darwin/lib%s.dylib' % lib, # noqa: UP031
)
self.ENlib = ctypes.cdll.LoadLibrary(libepanet)
else:
libepanet = resource_filename(
- __name__, "Linux/lib%s.so" % lib
+ __name__,
+ 'Linux/lib%s.so' % lib, # noqa: UP031
)
self.ENlib = ctypes.cdll.LoadLibrary(libepanet)
- return
- except Exception as E1:
+ return # noqa: TRY300
+ except Exception as E1: # noqa: PERF203
if lib == libnames[-1]:
- raise E1
- pass
+ raise E1 # noqa: TRY201
finally:
- if version >= 2.2 and '32' not in lib:
+ if version >= 2.2 and '32' not in lib: # noqa: PLR2004
self._project = ctypes.c_uint64()
- elif version >= 2.2:
+ elif version >= 2.2: # noqa: PLR2004
self._project = ctypes.c_uint32()
else:
- self._project = None
-
-
- def ENn(self, inpfile=None, rptfile=None, binfile=None):
- """
- Opens an EPANET input file and reads in network data
+ self._project = None
+
+ def ENn(self, inpfile=None, rptfile=None, binfile=None): # noqa: N802
+ """Opens an EPANET input file and reads in network data
Parameters
----------
@@ -82,23 +93,23 @@ def ENn(self, inpfile=None, rptfile=None, binfile=None):
Output file to create (default to constructor value)
binfile : str
Binary output file to create (default to constructor value)
-
- """
+
+ """ # noqa: D400, D401
inpfile = inpfile.encode('ascii')
- rptfile = rptfile.encode('ascii') #''.encode('ascii')
+ rptfile = rptfile.encode('ascii') # ''.encode('ascii')
binfile = binfile.encode('ascii')
- s = "s"
+ s = 's'
self.errcode = self.ENlib.EN_runproject(inpfile, rptfile, binfile, s)
self._error()
- if self.errcode < 100:
+ if self.errcode < 100: # noqa: PLR2004
self.fileLoaded = True
- return
-
-
-
- def ENSetIgnoreFlag(self, ignore_flag=0):
- if abs(ignore_flag - np.round(ignore_flag))>0.00001 or ignore_flag<0:
- logger.error('ignore_flag must be int value and bigger than zero'+str(ignore_flag))
- flag=ctypes.c_int(int(ignore_flag))
- #print('++++++++++++++++++++++')
- #self.ENlib.ENEXTENDEDsetignoreflag(flag)
\ No newline at end of file
+
+ def ENSetIgnoreFlag(self, ignore_flag=0): # noqa: D102, N802, PLR6301
+ if abs(ignore_flag - np.round(ignore_flag)) > 0.00001 or ignore_flag < 0: # noqa: PLR2004
+ logger.error(
+ 'ignore_flag must be int value and bigger than zero' # noqa: G003
+ + str(ignore_flag)
+ )
+ flag = ctypes.c_int(int(ignore_flag)) # noqa: F841
+ # print('++++++++++++++++++++++')
+ # self.ENlib.ENEXTENDEDsetignoreflag(flag)
diff --git a/modules/systemPerformance/REWET/REWET/Example/exampe_config.txt b/modules/systemPerformance/REWET/REWET/Example/exampe_config.txt
index a1a5d126c..17fbac57b 100644
--- a/modules/systemPerformance/REWET/REWET/Example/exampe_config.txt
+++ b/modules/systemPerformance/REWET/REWET/Example/exampe_config.txt
@@ -3,7 +3,7 @@ Net3RA Crew_Data/Net3RA.txt
Net3DA Crew_Data/Net3DA.txt
[SHIFTS]
-;name begining end
+;name beginning end
D 8 20
N 20 8
@@ -40,7 +40,7 @@ DistrAgent 1 inspect:distr isolate:distr repair:distr
DistrAgent 2 EPICENTERDIST EPICENTERDIST EPICENTERDIST
[JOBS]
-;agent_type action:entity time EDN
+;agent_type action:entity time END
;---------- Inspection Phase ----------
RepairAgent inspect:trunk FIXED:1800 CHECK
RepairAgent inspect:distr FIXED:1800 CHECK
@@ -63,7 +63,7 @@ DistrAgent repair:distr FIXED:144000 DN_repair
[DEFINE]
-;EDN
+;END
;---------- Reroute for Major-Trunkline Definition ----------
MJTRreroute DEFAULT METHOD_PROBABILITY:1:1
MJTRreroute 1 RECONNECT:PIPESIZEFACTOR:0.05 REMOVE_LEAK:LEAKFACTOR:1
@@ -84,4 +84,4 @@ trunk_leak_repair 1 REPAIR
;---------- Reroute for Distribution Node Definition ----------
DN_repair DEFAULT METHOD_PROBABILITY:1:1
-DN_repair 1 REPAIR
\ No newline at end of file
+DN_repair 1 REPAIR
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Designer.py
index 27cbdd2db..9bb31e5b6 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Designer.py
@@ -1,209 +1,239 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 23:25:30 2022
+"""Created on Tue Nov 1 23:25:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+import pandas as pd
from PyQt5 import QtCore, QtGui, QtWidgets
+
from .Damage_Discovery_Window import Ui_damage_discovery
-import pandas as pd
-class Damage_Discovery_Designer(Ui_damage_discovery):
+
+class Damage_Discovery_Designer(Ui_damage_discovery): # noqa: D101
def __init__(self, damage_discovery_model):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
self.damage_discovery_model = damage_discovery_model.copy()
-
"""
Field Validators
"""
- self.leak_amount_line.setValidator(QtGui.QDoubleValidator(0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.leak_time_line.setValidator(QtGui.QIntValidator(0, 10000*3600))
- self.time_line.setValidator(QtGui.QIntValidator(0, 10000*3600))
+ self.leak_amount_line.setValidator(
+ QtGui.QDoubleValidator(
+ 0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation
+ )
+ )
+ self.leak_time_line.setValidator(QtGui.QIntValidator(0, 10000 * 3600))
+ self.time_line.setValidator(QtGui.QIntValidator(0, 10000 * 3600))
-
if self.damage_discovery_model['method'] == 'leak_based':
self.enableLeakBased()
leak_amount = self.damage_discovery_model['leak_amount']
- leak_time = self.damage_discovery_model['leak_time']
+ leak_time = self.damage_discovery_model['leak_time']
self.leak_amount_line.setText(str(leak_amount))
self.leak_time_line.setText(str(leak_time))
-
-
+
elif self.damage_discovery_model['method'] == 'time_based':
self.enableTimeBased()
- time_discovery_ratio = self.damage_discovery_model['time_discovery_ratio']
+ time_discovery_ratio = self.damage_discovery_model[
+ 'time_discovery_ratio'
+ ]
self.populateTimeDiscoveryRatioTable(time_discovery_ratio)
-
+
"""
Signal Connections
"""
- self.discovery_ratio_line.setValidator(QtGui.QDoubleValidator(0, 1, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.discovery_ratio_line.textChanged.connect(self.discoveryRatioValidatorHelper)
+ self.discovery_ratio_line.setValidator(
+ QtGui.QDoubleValidator(
+ 0, 1, 20, notation=QtGui.QDoubleValidator.StandardNotation
+ )
+ )
+ self.discovery_ratio_line.textChanged.connect(
+ self.discoveryRatioValidatorHelper
+ )
self.leak_based_radio.toggled.connect(self.methodRadioButtonToggled)
self.time_based_radio.toggled.connect(self.methodRadioButtonToggled)
self.add_button.clicked.connect(self.addTimeDiscoveryRatioByButton)
self.remove_button.clicked.connect(self.removeTimeDiscoveryRatioByButton)
self.buttonBox.accepted.connect(self.okButtonPressed)
-
-
- def discoveryRatioValidatorHelper(self, x):
- discovery_ratio = float( self.discovery_ratio_line.text())
-
+
+ def discoveryRatioValidatorHelper(self, x): # noqa: ARG002, N802, D102
+ discovery_ratio = float(self.discovery_ratio_line.text())
+
if discovery_ratio > 1:
self.discovery_ratio_line.setText(str(1.0))
- def enableLeakBased(self):
+ def enableLeakBased(self): # noqa: N802, D102
self.leak_based_radio.setChecked(True)
-
+
self.leak_anount_label.setEnabled(True)
self.leak_amount_line.setEnabled(True)
self.leak_time_label.setEnabled(True)
self.leak_time_line.setEnabled(True)
-
+
self.time_line.setEnabled(False)
self.discovery_ratio_line.setEnabled(False)
self.time_discovery_ratio_table.setEnabled(False)
self.add_button.setEnabled(False)
self.remove_button.setEnabled(False)
-
- def enableTimeBased(self):
+
+ def enableTimeBased(self): # noqa: N802, D102
self.time_based_radio.setChecked(True)
self.leak_anount_label.setEnabled(False)
self.leak_amount_line.setEnabled(False)
self.leak_time_label.setEnabled(False)
self.leak_time_line.setEnabled(False)
-
+
self.time_line.setEnabled(True)
self.discovery_ratio_line.setEnabled(True)
self.time_discovery_ratio_table.setEnabled(True)
self.add_button.setEnabled(True)
self.remove_button.setEnabled(True)
-
- def clearTimeDiscoveryRatioTable(self):
- for i in range(self.time_discovery_ratio_table.rowCount()):
+
+ def clearTimeDiscoveryRatioTable(self): # noqa: N802, D102
+ for i in range(self.time_discovery_ratio_table.rowCount()): # noqa: B007
self.time_discovery_ratio_table.removeRow(0)
-
- def okButtonPressed(self):
-
+
+ def okButtonPressed(self): # noqa: C901, N802, D102
if self.leak_based_radio.isChecked():
leak_amount = self.leak_amount_line.text()
- leak_time = self.leak_time_line.text()
-
- if leak_amount == '':
- self.errorMSG("Empty Vlaue", "Please fill the 'Leak Amont' field.")
+ leak_time = self.leak_time_line.text()
+
+ if leak_amount == '': # noqa: PLC1901
+ self.errorMSG('Empty Vlaue', "Please fill the 'Leak Amont' field.")
return
- elif leak_time == '':
- self.errorMSG("Empty Vlaue", "Please fill the 'Leak Time' field.")
+ elif leak_time == '': # noqa: PLC1901, RET505
+ self.errorMSG('Empty Vlaue', "Please fill the 'Leak Time' field.")
return
-
+
leak_amount = float(leak_amount)
- leak_time = int(float(leak_amount))
-
+ leak_time = int(float(leak_amount))
+
self.damage_discovery_model['leak_amount'] = leak_amount
- self.damage_discovery_model['leak_time' ] = leak_time
-
+ self.damage_discovery_model['leak_time'] = leak_time
+
elif self.time_based_radio.isChecked():
if 'time_discovery_ratio' not in self.damage_discovery_model:
- self.errorMSG("Discovery Ratio Error", "Discovery Ratio Table is empty")
+ self.errorMSG(
+ 'Discovery Ratio Error', 'Discovery Ratio Table is empty'
+ )
return
-
+
if self.damage_discovery_model['time_discovery_ratio'].empty:
- self.errorMSG("Discovery Ratio Error", "Discovery Ratio Table is empty")
+ self.errorMSG(
+ 'Discovery Ratio Error', 'Discovery Ratio Table is empty'
+ )
return
-
- if self.damage_discovery_model['time_discovery_ratio'].is_monotonic_increasing == False:
- self.errorMSG("Discovery Ratio Error", "Discovery Ratio data must be monotonic through time")
+
+ if (
+ self.damage_discovery_model[ # noqa: E712
+ 'time_discovery_ratio'
+ ].is_monotonic_increasing
+ == False
+ ):
+ self.errorMSG(
+ 'Discovery Ratio Error',
+ 'Discovery Ratio data must be monotonic through time',
+ )
return
if self.leak_based_radio.isChecked():
if 'time_discovery_ratio' in self.damage_discovery_model:
self.damage_discovery_model.pop('time_discovery_ratio')
-
+
self.damage_discovery_model['method'] = 'leak_based'
elif self.time_based_radio.isChecked():
if 'leak_amount' in self.damage_discovery_model:
self.damage_discovery_model.pop('leak_amount')
-
+
if 'leak_time' in self.damage_discovery_model:
self.damage_discovery_model.pop('leak_time')
-
+
self.damage_discovery_model['method'] = 'time_based'
self._window.accept()
-
- def populateTimeDiscoveryRatioTable(self, time_discovery_ratio):
+
+ def populateTimeDiscoveryRatioTable(self, time_discovery_ratio): # noqa: N802, D102
for time, discovery_ratio in time_discovery_ratio.iteritems():
number_of_rows = self.time_discovery_ratio_table.rowCount()
self.time_discovery_ratio_table.insertRow(number_of_rows)
-
- time_item = QtWidgets.QTableWidgetItem(str(time) )
- discovery_ratio_item = QtWidgets.QTableWidgetItem(str(discovery_ratio) )
-
- time_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- discovery_ratio_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
-
- self.time_discovery_ratio_table.setItem(number_of_rows, 0, time_item )
- self.time_discovery_ratio_table.setItem(number_of_rows, 1, discovery_ratio_item )
-
- def addTimeDiscoveryRatioByButton(self):
- time = self.time_line.text()
+
+ time_item = QtWidgets.QTableWidgetItem(str(time))
+ discovery_ratio_item = QtWidgets.QTableWidgetItem(str(discovery_ratio))
+
+ time_item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled)
+ discovery_ratio_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+
+ self.time_discovery_ratio_table.setItem(number_of_rows, 0, time_item)
+ self.time_discovery_ratio_table.setItem(
+ number_of_rows, 1, discovery_ratio_item
+ )
+
+ def addTimeDiscoveryRatioByButton(self): # noqa: N802, D102
+ time = self.time_line.text()
discovery_ratio = self.discovery_ratio_line.text()
-
- if time == '' or discovery_ratio == '':
+
+ if time == '' or discovery_ratio == '': # noqa: PLC1901
return
if 'time_discovery_ratio' not in self.damage_discovery_model:
self.damage_discovery_model['time_discovery_ratio'] = pd.Series()
time_discovery_ratio = self.damage_discovery_model['time_discovery_ratio']
-
+
if int(float(time)) in time_discovery_ratio.index:
- self.errorMSG("Duplicate Time", "There is a duplicate time. Please remove the time first")
+ self.errorMSG(
+ 'Duplicate Time',
+ 'There is a duplicate time. Please remove the time first',
+ )
return
-
- time = int(float(time) )
+
+ time = int(float(time))
discovery_ratio = float(discovery_ratio)
-
+
time_discovery_ratio.loc[time] = discovery_ratio
- self.damage_discovery_model['time_discovery_ratio'] = time_discovery_ratio.sort_index()
+ self.damage_discovery_model['time_discovery_ratio'] = (
+ time_discovery_ratio.sort_index()
+ )
self.clearTimeDiscoveryRatioTable()
- self.populateTimeDiscoveryRatioTable(self.damage_discovery_model['time_discovery_ratio'])
-
- def removeTimeDiscoveryRatioByButton(self):
+ self.populateTimeDiscoveryRatioTable(
+ self.damage_discovery_model['time_discovery_ratio']
+ )
+
+ def removeTimeDiscoveryRatioByButton(self): # noqa: N802, D102
items = self.time_discovery_ratio_table.selectedItems()
if len(items) < 1:
return
-
+
row_number = []
- for i in range (0, len(items)):
+ for i in range(len(items)):
selected_row = items[i].row()
row_number.append(selected_row)
-
+
row_number = list(set(row_number))
-
+
time_discovery_ratio = self.damage_discovery_model['time_discovery_ratio']
-
+
for selected_row in row_number:
time = self.time_discovery_ratio_table.item(selected_row, 0).text()
time_discovery_ratio = time_discovery_ratio.drop(time)
- self.damage_discovery_model['time_discovery_ratio'] = time_discovery_ratio
+ self.damage_discovery_model['time_discovery_ratio'] = time_discovery_ratio
self.clearTimeDiscoveryRatioTable()
- self.populateTimeDiscoveryRatioTable
-
- def methodRadioButtonToggled(self):
+ self.populateTimeDiscoveryRatioTable # noqa: B018
+
+ def methodRadioButtonToggled(self): # noqa: N802, D102
if self.leak_based_radio.isChecked():
self.enableLeakBased()
elif self.time_based_radio.isChecked():
self.enableTimeBased()
-
- def errorMSG(self, error_title, error_msg, error_more_msg=None):
+
+ def errorMSG(self, error_title, error_msg, error_more_msg=None): # noqa: D102, N802, PLR6301
error_widget = QtWidgets.QMessageBox()
error_widget.setIcon(QtWidgets.QMessageBox.Critical)
error_widget.setText(error_msg)
error_widget.setWindowTitle(error_title)
error_widget.setStandardButtons(QtWidgets.QMessageBox.Ok)
- if error_more_msg!=None:
+ if error_more_msg != None: # noqa: E711
error_widget.setInformativeText(error_more_msg)
- error_widget.exec_()
\ No newline at end of file
+ error_widget.exec_()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Window.py
index 2a2bee99c..286e75a78 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Damage_Discovery_Window.py
@@ -1,96 +1,105 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Damage_Discovery_Window.ui'
+# Form implementation generated from reading ui file 'Damage_Discovery_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
-from PyQt5 import QtCore, QtGui, QtWidgets
+from PyQt5 import QtCore, QtWidgets
-class Ui_damage_discovery(object):
- def setupUi(self, damage_discovery):
- damage_discovery.setObjectName("damage_discovery")
+class Ui_damage_discovery: # noqa: D101
+ def setupUi(self, damage_discovery): # noqa: N802, D102
+ damage_discovery.setObjectName('damage_discovery')
damage_discovery.resize(450, 400)
damage_discovery.setMinimumSize(QtCore.QSize(450, 400))
damage_discovery.setMaximumSize(QtCore.QSize(450, 400))
self.buttonBox = QtWidgets.QDialogButtonBox(damage_discovery)
self.buttonBox.setGeometry(QtCore.QRect(350, 20, 81, 61))
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.groupBox = QtWidgets.QGroupBox(damage_discovery)
self.groupBox.setGeometry(QtCore.QRect(19, 19, 311, 351))
- self.groupBox.setObjectName("groupBox")
+ self.groupBox.setObjectName('groupBox')
self.leak_amount_line = QtWidgets.QLineEdit(self.groupBox)
self.leak_amount_line.setGeometry(QtCore.QRect(80, 50, 51, 20))
- self.leak_amount_line.setObjectName("leak_amount_line")
+ self.leak_amount_line.setObjectName('leak_amount_line')
self.leak_anount_label = QtWidgets.QLabel(self.groupBox)
self.leak_anount_label.setGeometry(QtCore.QRect(10, 50, 71, 16))
- self.leak_anount_label.setObjectName("leak_anount_label")
+ self.leak_anount_label.setObjectName('leak_anount_label')
self.leak_time_line = QtWidgets.QLineEdit(self.groupBox)
self.leak_time_line.setGeometry(QtCore.QRect(210, 50, 81, 20))
- self.leak_time_line.setObjectName("leak_time_line")
+ self.leak_time_line.setObjectName('leak_time_line')
self.time_discovery_ratio_table = QtWidgets.QTableWidget(self.groupBox)
self.time_discovery_ratio_table.setGeometry(QtCore.QRect(10, 141, 211, 191))
- self.time_discovery_ratio_table.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
- self.time_discovery_ratio_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
+ self.time_discovery_ratio_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.ExtendedSelection
+ )
+ self.time_discovery_ratio_table.setSelectionBehavior(
+ QtWidgets.QAbstractItemView.SelectRows
+ )
self.time_discovery_ratio_table.setRowCount(0)
- self.time_discovery_ratio_table.setObjectName("time_discovery_ratio_table")
+ self.time_discovery_ratio_table.setObjectName('time_discovery_ratio_table')
self.time_discovery_ratio_table.setColumnCount(2)
item = QtWidgets.QTableWidgetItem()
self.time_discovery_ratio_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.time_discovery_ratio_table.setHorizontalHeaderItem(1, item)
- self.time_discovery_ratio_table.horizontalHeader().setStretchLastSection(True)
+ self.time_discovery_ratio_table.horizontalHeader().setStretchLastSection(
+ True
+ )
self.time_discovery_ratio_table.verticalHeader().setVisible(False)
self.time_discovery_ratio_table.verticalHeader().setStretchLastSection(False)
self.leak_time_label = QtWidgets.QLabel(self.groupBox)
self.leak_time_label.setGeometry(QtCore.QRect(150, 50, 61, 16))
- self.leak_time_label.setObjectName("leak_time_label")
+ self.leak_time_label.setObjectName('leak_time_label')
self.leak_based_radio = QtWidgets.QRadioButton(self.groupBox)
self.leak_based_radio.setGeometry(QtCore.QRect(10, 20, 111, 17))
- self.leak_based_radio.setObjectName("leak_based_radio")
+ self.leak_based_radio.setObjectName('leak_based_radio')
self.time_based_radio = QtWidgets.QRadioButton(self.groupBox)
self.time_based_radio.setGeometry(QtCore.QRect(10, 90, 111, 17))
- self.time_based_radio.setObjectName("time_based_radio")
+ self.time_based_radio.setObjectName('time_based_radio')
self.time_line = QtWidgets.QLineEdit(self.groupBox)
self.time_line.setGeometry(QtCore.QRect(10, 120, 101, 20))
- self.time_line.setObjectName("time_line")
+ self.time_line.setObjectName('time_line')
self.discovery_ratio_line = QtWidgets.QLineEdit(self.groupBox)
self.discovery_ratio_line.setGeometry(QtCore.QRect(110, 120, 111, 20))
- self.discovery_ratio_line.setObjectName("discovery_ratio_line")
+ self.discovery_ratio_line.setObjectName('discovery_ratio_line')
self.add_button = QtWidgets.QPushButton(self.groupBox)
self.add_button.setGeometry(QtCore.QRect(230, 120, 51, 23))
- self.add_button.setObjectName("add_button")
+ self.add_button.setObjectName('add_button')
self.remove_button = QtWidgets.QPushButton(self.groupBox)
self.remove_button.setGeometry(QtCore.QRect(230, 150, 51, 23))
- self.remove_button.setObjectName("remove_button")
+ self.remove_button.setObjectName('remove_button')
self.retranslateUi(damage_discovery)
self.buttonBox.rejected.connect(damage_discovery.reject)
QtCore.QMetaObject.connectSlotsByName(damage_discovery)
- def retranslateUi(self, damage_discovery):
+ def retranslateUi(self, damage_discovery): # noqa: N802, D102
_translate = QtCore.QCoreApplication.translate
- damage_discovery.setWindowTitle(_translate("damage_discovery", "Damage Discovery"))
- self.groupBox.setTitle(_translate("damage_discovery", "Leak Model"))
- self.leak_anount_label.setText(_translate("damage_discovery", "Leak Amount"))
+ damage_discovery.setWindowTitle(
+ _translate('damage_discovery', 'Damage Discovery')
+ )
+ self.groupBox.setTitle(_translate('damage_discovery', 'Leak Model'))
+ self.leak_anount_label.setText(_translate('damage_discovery', 'Leak Amount'))
item = self.time_discovery_ratio_table.horizontalHeaderItem(0)
- item.setText(_translate("damage_discovery", "Time"))
+ item.setText(_translate('damage_discovery', 'Time'))
item = self.time_discovery_ratio_table.horizontalHeaderItem(1)
- item.setText(_translate("damage_discovery", "Discovery Ratio"))
- self.leak_time_label.setText(_translate("damage_discovery", "leak time"))
- self.leak_based_radio.setText(_translate("damage_discovery", "Leak Based"))
- self.time_based_radio.setText(_translate("damage_discovery", "Time Based"))
- self.add_button.setText(_translate("damage_discovery", "add"))
- self.remove_button.setText(_translate("damage_discovery", "Remove"))
+ item.setText(_translate('damage_discovery', 'Discovery Ratio'))
+ self.leak_time_label.setText(_translate('damage_discovery', 'leak time'))
+ self.leak_based_radio.setText(_translate('damage_discovery', 'Leak Based'))
+ self.time_based_radio.setText(_translate('damage_discovery', 'Time Based'))
+ self.add_button.setText(_translate('damage_discovery', 'add'))
+ self.remove_button.setText(_translate('damage_discovery', 'Remove'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
damage_discovery = QtWidgets.QDialog()
ui = Ui_damage_discovery()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Damage_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Damage_Tab_Designer.py
index 7c556dc49..f5a966dda 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Damage_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Damage_Tab_Designer.py
@@ -1,418 +1,578 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Fri Oct 28 12:50:24 2022
+"""Created on Fri Oct 28 12:50:24 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
import os
-import pickle
-from PyQt5 import QtCore, QtGui, QtWidgets
+import pickle # noqa: S403
+
import pandas as pd
-from .Scenario_Dialog_Designer import Scenario_Dialog_Designer
-from .Pipe_Damage_Model_Designer import Pipe_Damage_Model_Designer
+from PyQt5 import QtCore, QtWidgets
+
from .Node_Damage_Model_Designer import Node_Damage_Model_Designer
+from .Pipe_Damage_Model_Designer import Pipe_Damage_Model_Designer
+from .Scenario_Dialog_Designer import Scenario_Dialog_Designer
-class Damage_Tab_Designer():
+class Damage_Tab_Designer: # noqa: D101
def __init__(self):
- #self.pipe_damage_model = {"CI":{"alpha":-0.0038, "beta":0.1096, "gamma":0.0196, "a":2, "b":1 }, "DI":{"alpha":-0.0038, "beta":0.05, "gamma":0.04, "a":2, "b":1 } }
- #self.node_damage_model = {'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
-
- """
- These are variables that are shared between ui and settings.
- """
+ # self.pipe_damage_model = {"CI":{"alpha":-0.0038, "beta":0.1096, "gamma":0.0196, "a":2, "b":1 }, "DI":{"alpha":-0.0038, "beta":0.05, "gamma":0.04, "a":2, "b":1 } }
+ # self.node_damage_model = {'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"}
+ """These are variables that are shared between ui and settings.""" # noqa: D401
self.setDamageSettings(self.settings, self.scenario_list)
-
+
"""
Reassignment of shared variables.
"""
- self.damage_input_directory = os.getcwd()
- self.current_xlsx_directory = os.getcwd()
- if self.scenario_list == None:
- self.scenario_list = pd.DataFrame(columns=["Scenario Name", "Pipe Damage", "Nodal Damage", "Pump Damage", "Tank Damage", "Probability"])
- self.scneraio_validated = False
+ self.damage_input_directory = os.getcwd() # noqa: PTH109
+ self.current_xlsx_directory = os.getcwd() # noqa: PTH109
+ if self.scenario_list == None: # noqa: E711
+ self.scenario_list = pd.DataFrame(
+ columns=[
+ 'Scenario Name',
+ 'Pipe Damage',
+ 'Nodal Damage',
+ 'Pump Damage',
+ 'Tank Damage',
+ 'Probability',
+ ]
+ )
+ self.scneraio_validated = False
self.damage_pipe_model_reviewed = False
-
+
"""
- ui value assigments.
+ ui value assignments.
"""
self.setDamageUI()
-
+
"""
Signals connection.
"""
self.add_scenario_button.clicked.connect(self.addNewScenarioByButton)
- self.damage_directory_browse_button.clicked.connect(self.browseDamageDirectoryByButton)
+ self.damage_directory_browse_button.clicked.connect(
+ self.browseDamageDirectoryByButton
+ )
self.remove_scenario_button.clicked.connect(self.removeScenarioByButton)
self.load_scenario_button.clicked.connect(self.loadScenarioByButton)
self.save_scenario_button.clicked.connect(self.saveScenarioByButton)
self.validate_scenario_button.clicked.connect(self.validateScenarioByButton)
- self.pipe_damage_modeling_button.clicked.connect(self.pipeDamageSettingByButton)
- self.node_damage_modeling_button.clicked.connect(self.nodeDamageSettingByButton)
+ self.pipe_damage_modeling_button.clicked.connect(
+ self.pipeDamageSettingByButton
+ )
+ self.node_damage_modeling_button.clicked.connect(
+ self.nodeDamageSettingByButton
+ )
self.file_type_excel_radio.toggled.connect(self.fileTypeChanged)
self.file_type_pickle_radio.toggled.connect(self.fileTypeChanged)
-
- def getDamageSettings(self):
-
+
+ def getDamageSettings(self): # noqa: N802, D102
if len(self.scenario_list) < 1:
- self.errorMSG("REWET", "Damage scenario list is empty.")
+ self.errorMSG('REWET', 'Damage scenario list is empty.')
return False
- if self.damage_input_directory == '':
- self.errorMSG("REWET", "No Damage Input Directory is selected.")
+ if self.damage_input_directory == '': # noqa: PLC1901
+ self.errorMSG('REWET', 'No Damage Input Directory is selected.')
return False
-
- self.settings.scenario['pipe_damage_model' ] = self.pipe_damage_model
- self.settings.scenario['node_damage_model' ] = self.node_damage_model
- self.settings.scenario['Pipe_damage_input_method' ] = self.pipe_damage_input_method
- self.settings.process['pipe_damage_file_directory'] = self.damage_input_directory
- #self.scenario_list -- already set
+
+ self.settings.scenario['pipe_damage_model'] = self.pipe_damage_model
+ self.settings.scenario['node_damage_model'] = self.node_damage_model
+ self.settings.scenario['Pipe_damage_input_method'] = (
+ self.pipe_damage_input_method
+ )
+ self.settings.process['pipe_damage_file_directory'] = (
+ self.damage_input_directory
+ )
+ # self.scenario_list -- already set
return True
-
- def setDamageUI(self):
- self.damage_direcotry_line.setText(self.damage_input_directory )
+
+ def setDamageUI(self): # noqa: N802, D102
+ self.damage_direcotry_line.setText(self.damage_input_directory)
self.clearScnearioTable()
self.populateScenarioTable()
-
- def setDamageSettings(self, settings, scenario_list):
- self.pipe_damage_model = settings.scenario['pipe_damage_model' ]
- self.node_damage_model = settings.scenario['node_damage_model' ]
- self.pipe_damage_input_method = settings.scenario['Pipe_damage_input_method' ]
- self.damage_input_directory = settings.process['pipe_damage_file_directory']
- self.scenario_list = scenario_list
-
- def addNewScenarioByButton(self):
+
+ def setDamageSettings(self, settings, scenario_list): # noqa: N802, D102
+ self.pipe_damage_model = settings.scenario['pipe_damage_model']
+ self.node_damage_model = settings.scenario['node_damage_model']
+ self.pipe_damage_input_method = settings.scenario['Pipe_damage_input_method']
+ self.damage_input_directory = settings.process['pipe_damage_file_directory']
+ self.scenario_list = scenario_list
+
+ def addNewScenarioByButton(self): # noqa: N802, D102
new_scenario_dialoge = Scenario_Dialog_Designer()
-
+
error = True
- while(error):
+ while error:
error = False
- return_value = new_scenario_dialoge._window.exec_()
-
+ return_value = new_scenario_dialoge._window.exec_() # noqa: SLF001
+
if return_value == 0:
return
-
+
scenario_name = new_scenario_dialoge.scenario_name_line.text()
pipe_damage_name = new_scenario_dialoge.pipe_damage_line.text()
node_damage_name = new_scenario_dialoge.node_damage_line.text()
pump_damage_name = new_scenario_dialoge.pump_damage_line.text()
tank_damage_name = new_scenario_dialoge.tank_damage_line.text()
- probability = new_scenario_dialoge.probability_line.text()
- probability = float(probability)
-
+ probability = new_scenario_dialoge.probability_line.text()
+ probability = float(probability)
+
if len(scenario_name) < 1:
- self.errorMSG("Empty Scneario Name", "Please enter a scenario name")
+ self.errorMSG('Empty Scenario Name', 'Please enter a scenario name')
error = True
-
+
if len(pipe_damage_name) < 1:
- self.errorMSG("Empty Pipe Damage Name", "Please enter a pipe damage name")
+ self.errorMSG(
+ 'Empty Pipe Damage Name', 'Please enter a pipe damage name'
+ )
error = True
-
+
if len(node_damage_name) < 1:
- self.errorMSG("Empty Node Damage Name", "Please enter a node damage name")
+ self.errorMSG(
+ 'Empty Node Damage Name', 'Please enter a node damage name'
+ )
error = True
-
+
if len(pump_damage_name) < 1:
- self.errorMSG("Empty Pump Damage Name", "Please enter a pump damage name")
+ self.errorMSG(
+ 'Empty Pump Damage Name', 'Please enter a pump damage name'
+ )
error = True
-
+
if len(tank_damage_name) < 1:
- self.errorMSG("Empty Tank Damage Name", "Please enter a tank damage name")
+ self.errorMSG(
+ 'Empty Tank Damage Name', 'Please enter a tank damage name'
+ )
error = True
-
- if (self.scenario_list["Scenario Name"]==scenario_name).any():
- self.errorMSG("Duplicate Scneario Name", "Please Have the Scenario Name changed")
+
+ if (self.scenario_list['Scenario Name'] == scenario_name).any():
+ self.errorMSG(
+ 'Duplicate Scenario Name',
+ 'Please Have the Scenario Name changed',
+ )
error = True
- new_row = {"Scenario Name":scenario_name, "Pipe Damage":pipe_damage_name, "Nodal Damage":node_damage_name, "Pump Damage":pump_damage_name, "Tank Damage":tank_damage_name, "Probability":probability}
- self.scenario_list = self.scenario_list.append(new_row, ignore_index = True)
+ new_row = {
+ 'Scenario Name': scenario_name,
+ 'Pipe Damage': pipe_damage_name,
+ 'Nodal Damage': node_damage_name,
+ 'Pump Damage': pump_damage_name,
+ 'Tank Damage': tank_damage_name,
+ 'Probability': probability,
+ }
+ self.scenario_list = self.scenario_list.append(new_row, ignore_index=True)
self.clearScnearioTable()
self.populateScenarioTable()
- self.scneraio_validated = False
+ self.scneraio_validated = False
self.damage_pipe_model_reviewed = False
-
- def fileTypeChanged(self, checked):
+
+ def fileTypeChanged(self, checked): # noqa: ARG002, N802, D102
if self.file_type_excel_radio.isChecked():
self.pipe_damage_input_method = 'excel'
else:
self.pipe_damage_input_method = 'pickle'
-
- def removeScenarioByButton(self):
+
+ def removeScenarioByButton(self): # noqa: N802, D102
items = self.scenario_table.selectedItems()
if len(items) < 1:
return
-
+
row_number = []
- for i in range (0, len(items)):
+ for i in range(len(items)):
selected_row = items[i].row()
row_number.append(selected_row)
-
+
row_number = list(set(row_number))
-
+
for selected_row in row_number:
scenario_name = self.scenario_table.item(selected_row, 0).text()
- to_be_removed_index = (self.scenario_list[self.scenario_list == scenario_name]).index[0]
+ to_be_removed_index = (
+ self.scenario_list[self.scenario_list == scenario_name]
+ ).index[0]
self.scenario_list = self.scenario_list.drop(to_be_removed_index)
self.scenario_list = self.scenario_list.reset_index(drop=True)
self.clearScnearioTable()
self.populateScenarioTable()
- self.scneraio_validated = False
+ self.scneraio_validated = False
self.damage_pipe_model_reviewed = False
-
- def loadScenarioByButton(self):
- file = QtWidgets.QFileDialog.getOpenFileName(self.asli_MainWindow, 'Open file',
- self.current_xlsx_directory,"scenrario file (*.xlsx)")
- if file[0] == '':
+
+ def loadScenarioByButton(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self.asli_MainWindow,
+ 'Open file',
+ self.current_xlsx_directory,
+ 'scenrario file (*.xlsx)',
+ )
+ if file[0] == '': # noqa: PLC1901
return
split_addr = os.path.split(file[0])
-
+
temp = self.getScnearioListFromXLSX(file[0])
- if type(temp) == type(None):
+ if temp is None:
return
self.scenario_list = temp
-
+
self.current_xlsx_directory = split_addr[0]
self.wdn_addr_line.setText(file[0])
self.clearScnearioTable()
self.populateScenarioTable()
- self.scneraio_validated = False
+ self.scneraio_validated = False
self.damage_pipe_model_reviewed = False
-
- def saveScenarioByButton(self):
-
- file = QtWidgets.QFileDialog.getSaveFileName(self.asli_MainWindow, 'Save file',
- self.current_xlsx_directory,"Excel file (*.xlsx)")
+
+ def saveScenarioByButton(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getSaveFileName(
+ self.asli_MainWindow,
+ 'Save file',
+ self.current_xlsx_directory,
+ 'Excel file (*.xlsx)',
+ )
split_addr = os.path.split(file[0])
- self.current_xlsx_directory = split_addr[0]
-
+ self.current_xlsx_directory = split_addr[0]
+
self.scenario_list.to_excel(file[0])
-
- def validateScenarioByButton(self):
+
+ def validateScenarioByButton(self): # noqa: C901, N802, D102
self.status_text.setText('Validating Damage Scnearios')
if_validate_successful = True
- text_output = ""
+ text_output = ''
scneario_list = self.scenario_list
-
+
all_pipe_material = set()
-
- damage_pipe_not_exist_List = []
- damage_nodal_not_exist_List = []
- damage_pump_not_exist_List = []
- damage_tank_not_exist_List = []
-
- for index, row in scneario_list.iterrows():
- damage_pipe_name = row["Pipe Damage"]
- damage_pipe_addr = os.path.join(self.damage_input_directory, damage_pipe_name)
- if not os.path.exists(damage_pipe_addr):
+
+ damage_pipe_not_exist_List = [] # noqa: N806
+ damage_nodal_not_exist_List = [] # noqa: N806
+ damage_pump_not_exist_List = [] # noqa: N806
+ damage_tank_not_exist_List = [] # noqa: N806
+
+ for index, row in scneario_list.iterrows(): # noqa: B007
+ damage_pipe_name = row['Pipe Damage']
+ damage_pipe_addr = os.path.join( # noqa: PTH118
+ self.damage_input_directory, damage_pipe_name
+ )
+ if not os.path.exists(damage_pipe_addr): # noqa: PTH110
damage_pipe_not_exist_List.append(damage_pipe_name)
-
- damage_node_name = row["Nodal Damage"]
- damage_nodal_addr = os.path.join(self.damage_input_directory, damage_node_name)
- if not os.path.exists(damage_nodal_addr):
+
+ damage_node_name = row['Nodal Damage']
+ damage_nodal_addr = os.path.join( # noqa: PTH118
+ self.damage_input_directory, damage_node_name
+ )
+ if not os.path.exists(damage_nodal_addr): # noqa: PTH110
damage_nodal_not_exist_List.append(damage_node_name)
-
- damage_pump_name = row["Pump Damage"]
- damage_pump_addr = os.path.join(self.damage_input_directory, damage_pump_name)
- if not os.path.exists(damage_pump_addr):
+
+ damage_pump_name = row['Pump Damage']
+ damage_pump_addr = os.path.join( # noqa: PTH118
+ self.damage_input_directory, damage_pump_name
+ )
+ if not os.path.exists(damage_pump_addr): # noqa: PTH110
damage_pump_not_exist_List.append(damage_pump_name)
-
- damage_tank_name = row["Tank Damage"]
- damage_tank_addr = os.path.join(self.damage_input_directory, damage_tank_name)
- if not os.path.exists(damage_tank_addr):
+
+ damage_tank_name = row['Tank Damage']
+ damage_tank_addr = os.path.join( # noqa: PTH118
+ self.damage_input_directory, damage_tank_name
+ )
+ if not os.path.exists(damage_tank_addr): # noqa: PTH110
damage_tank_not_exist_List.append(damage_tank_name)
-
+
if len(damage_pipe_not_exist_List) > 0:
- text_output += "The follwing pipe damage files could not be found.\n"+repr(damage_pipe_not_exist_List) +"\n"
+ text_output += (
+ 'The following pipe damage files could not be found.\n'
+ + repr(damage_pipe_not_exist_List)
+ + '\n'
+ )
if_validate_successful = False
if len(damage_nodal_not_exist_List) > 0:
- text_output += "The follwing node damage files could not be found.\n"+repr(damage_nodal_not_exist_List) +"\n"
+ text_output += (
+ 'The following node damage files could not be found.\n'
+ + repr(damage_nodal_not_exist_List)
+ + '\n'
+ )
if_validate_successful = False
if len(damage_pump_not_exist_List) > 0:
- text_output += "The follwing pump damage files could not be found.\n"+repr(damage_pump_not_exist_List) +"\n"
+ text_output += (
+ 'The following pump damage files could not be found.\n'
+ + repr(damage_pump_not_exist_List)
+ + '\n'
+ )
if_validate_successful = False
if len(damage_tank_not_exist_List) > 0:
- text_output += "The follwing tank damage files could not be found.\n"+repr(damage_tank_not_exist_List) +"\n"
+ text_output += (
+ 'The following tank damage files could not be found.\n'
+ + repr(damage_tank_not_exist_List)
+ + '\n'
+ )
if_validate_successful = False
-
+
try:
- must_have_pipe_columns = set(["time", "pipe_id", "damage_loc", "type", "Material"])
- for index, row in scneario_list.iterrows():
- damage_pipe_name = row["Pipe Damage"]
+ must_have_pipe_columns = set( # noqa: C405
+ ['time', 'pipe_id', 'damage_loc', 'type', 'Material']
+ )
+ for index, row in scneario_list.iterrows(): # noqa: B007
+ damage_pipe_name = row['Pipe Damage']
if self.pipe_damage_input_method == 'excel':
- pipe_damage = pd.read_excel(os.path.join(self.damage_input_directory, damage_pipe_name) )
- elif self.pipe_damage_input_method == "pickle":
- with open(os.path.join(self.damage_input_directory, damage_pipe_name), "rb") as f:
- pipe_damage = pickle.load(f)
- index_list = pipe_damage.index
+ pipe_damage = pd.read_excel(
+ os.path.join(self.damage_input_directory, damage_pipe_name) # noqa: PTH118
+ )
+ elif self.pipe_damage_input_method == 'pickle':
+ with open( # noqa: PTH123
+ os.path.join(self.damage_input_directory, damage_pipe_name), # noqa: PTH118
+ 'rb',
+ ) as f:
+ pipe_damage = pickle.load(f) # noqa: S301
+ index_list = pipe_damage.index
pipe_damage = pd.DataFrame.from_dict(pipe_damage.to_list())
- pipe_damage.loc[:,'time'] = index_list
+ pipe_damage.loc[:, 'time'] = index_list
if len(index_list) == 0:
available_columns = set(pipe_damage.columns)
- not_available_columns = must_have_pipe_columns - available_columns
+ not_available_columns = (
+ must_have_pipe_columns - available_columns
+ )
pipe_damage.loc[:, not_available_columns] = None
- #print(pipe_damage)
- #pipe_damage = pd.DataFrame.from_dict( )
- #pipe_damage.index.name = 'time'
- #pipe_damage = pipe_damage.reset_index(drop=False)
- # print(pipe_damage)
+ # print(pipe_damage)
+ # pipe_damage = pd.DataFrame.from_dict( )
+ # pipe_damage.index.name = 'time'
+ # pipe_damage = pipe_damage.reset_index(drop=False)
+ # print(pipe_damage)
available_columns = set(pipe_damage.columns)
not_available_columns = must_have_pipe_columns - available_columns
if len(not_available_columns) > 0:
- text_output += "In pipe damage file= "+repr(damage_pipe_name) + "the following headers are missing: " + repr(not_available_columns) + "\n"
+ text_output += (
+ 'In pipe damage file= '
+ + repr(damage_pipe_name)
+ + 'the following headers are missing: '
+ + repr(not_available_columns)
+ + '\n'
+ )
if_validate_successful = False
-
- new_material_set = set(pipe_damage['Material'].unique() )
+
+ new_material_set = set(pipe_damage['Material'].unique())
all_pipe_material = all_pipe_material.union(new_material_set)
-
- must_have_node_columns = set(["time", "node_name", "Number_of_damages", "node_Pipe_Length"])
- for index, row in scneario_list.iterrows():
- damage_node_name = row["Nodal Damage"]
+
+ must_have_node_columns = set( # noqa: C405
+ ['time', 'node_name', 'Number_of_damages', 'node_Pipe_Length']
+ )
+ for index, row in scneario_list.iterrows(): # noqa: B007
+ damage_node_name = row['Nodal Damage']
if self.pipe_damage_input_method == 'excel':
- node_damage = pd.read_excel(os.path.join(self.damage_input_directory, damage_node_name) )
- elif self.pipe_damage_input_method == "pickle":
- with open(os.path.join(self.damage_input_directory, damage_node_name), "rb") as f:
- node_damage = pickle.load(f)
- index_list = node_damage.index
+ node_damage = pd.read_excel(
+ os.path.join(self.damage_input_directory, damage_node_name) # noqa: PTH118
+ )
+ elif self.pipe_damage_input_method == 'pickle':
+ with open( # noqa: PTH123
+ os.path.join(self.damage_input_directory, damage_node_name), # noqa: PTH118
+ 'rb',
+ ) as f:
+ node_damage = pickle.load(f) # noqa: S301
+ index_list = node_damage.index
node_damage = pd.DataFrame.from_dict(node_damage.to_list())
- node_damage.loc[:,'time'] = index_list
+ node_damage.loc[:, 'time'] = index_list
if len(index_list) == 0:
available_columns = set(node_damage.columns)
- not_available_columns = must_have_node_columns - available_columns
+ not_available_columns = (
+ must_have_node_columns - available_columns
+ )
pipe_damage.loc[:, not_available_columns] = None
available_columns = set(node_damage.columns)
not_available_columns = must_have_node_columns - available_columns
if len(not_available_columns) > 0:
- text_output += "In node damage file= "+repr(damage_node_name) + "the following headers are missing: " + repr(not_available_columns) + "\n"
+ text_output += (
+ 'In node damage file= '
+ + repr(damage_node_name)
+ + 'the following headers are missing: '
+ + repr(not_available_columns)
+ + '\n'
+ )
if_validate_successful = False
-
- must_have_pump_columns = set(["time", "Pump_ID", "Restore_time"])
- for index, row in scneario_list.iterrows():
- damage_pump_name = row["Pump Damage"]
+
+ must_have_pump_columns = set(['time', 'Pump_ID', 'Restore_time']) # noqa: C405
+ for index, row in scneario_list.iterrows(): # noqa: B007
+ damage_pump_name = row['Pump Damage']
if self.pipe_damage_input_method == 'excel':
- pump_damage = pd.read_excel(os.path.join(self.damage_input_directory, damage_pump_name) )
- elif self.pipe_damage_input_method == "pickle":
- with open(os.path.join(self.damage_input_directory, damage_pump_name), "rb") as f:
- pump_damage = pickle.load(f)
+ pump_damage = pd.read_excel(
+ os.path.join(self.damage_input_directory, damage_pump_name) # noqa: PTH118
+ )
+ elif self.pipe_damage_input_method == 'pickle':
+ with open( # noqa: PTH123
+ os.path.join(self.damage_input_directory, damage_pump_name), # noqa: PTH118
+ 'rb',
+ ) as f:
+ pump_damage = pickle.load(f) # noqa: S301
pump_damage = pump_damage.reset_index(drop=False)
available_columns = set(pump_damage.columns)
- not_available_columns = must_have_pump_columns - available_columns
+ not_available_columns = (
+ must_have_pump_columns - available_columns
+ )
pump_damage.loc[:, not_available_columns] = None
- #index_list = pump_damage.index
- #pump_damage = pd.DataFrame.from_dict(pump_damage.to_list() )
-
+ # index_list = pump_damage.index
+ # pump_damage = pd.DataFrame.from_dict(pump_damage.to_list() )
+
available_columns = set(pump_damage.columns)
not_available_columns = must_have_pump_columns - available_columns
-
+
if len(not_available_columns) > 0 and len(pump_damage) > 0:
- text_output += "In pump damage file= "+repr(damage_pump_name) + "the following headers are missing: " + repr(not_available_columns) + "\n"
+ text_output += (
+ 'In pump damage file= '
+ + repr(damage_pump_name)
+ + 'the following headers are missing: '
+ + repr(not_available_columns)
+ + '\n'
+ )
if_validate_successful = False
-
- must_have_tank_columns = set(["time", "Tank_ID", "Restore_time"])
- for index, row in scneario_list.iterrows():
- damage_tank_name = row["Tank Damage"]
+
+ must_have_tank_columns = set(['time', 'Tank_ID', 'Restore_time']) # noqa: C405
+ for index, row in scneario_list.iterrows(): # noqa: B007
+ damage_tank_name = row['Tank Damage']
if self.pipe_damage_input_method == 'excel':
- tank_damage = pd.read_excel(os.path.join(self.damage_input_directory, damage_tank_name) )
- elif self.pipe_damage_input_method == "pickle":
- with open(os.path.join(self.damage_input_directory, damage_tank_name), "rb") as f:
- tank_damage = pickle.load(f)
+ tank_damage = pd.read_excel(
+ os.path.join(self.damage_input_directory, damage_tank_name) # noqa: PTH118
+ )
+ elif self.pipe_damage_input_method == 'pickle':
+ with open( # noqa: PTH123
+ os.path.join(self.damage_input_directory, damage_tank_name), # noqa: PTH118
+ 'rb',
+ ) as f:
+ tank_damage = pickle.load(f) # noqa: S301
tank_damage = tank_damage.reset_index(drop=False)
available_columns = set(tank_damage.columns)
- not_available_columns = must_have_tank_columns - available_columns
+ not_available_columns = (
+ must_have_tank_columns - available_columns
+ )
tank_damage.loc[:, not_available_columns] = None
-
+
available_columns = set(tank_damage.columns)
not_available_columns = must_have_tank_columns - available_columns
if len(not_available_columns) > 0 and len(damage_tank_name) > 0:
- text_output += "In tank damage file= "+repr(damage_tank_name) + "the following headers are missing: " + repr(not_available_columns) + "\n"
+ text_output += (
+ 'In tank damage file= '
+ + repr(damage_tank_name)
+ + 'the following headers are missing: '
+ + repr(not_available_columns)
+ + '\n'
+ )
if_validate_successful = False
- except Exception as exp:
- raise exp
+ except Exception as exp: # noqa: TRY302
+ raise exp # noqa: TRY201
if_validate_successful = False
- text_output += "An error happened. File type might be wrong in addition to other problems. More information:\n"+ repr(exp)
-
- if if_validate_successful == True:
- text_output += "Damage Scenario List Validated Sucessfully"
- not_defined_materials = all_pipe_material - set(self.pipe_damage_model.keys() )
+ text_output += (
+ 'An error happened. File type might be wrong in addition to other problems. More information:\n'
+ + repr(exp)
+ )
+
+ if if_validate_successful == True: # noqa: E712
+ text_output += 'Damage Scenario List Validated Sucessfully'
+ not_defined_materials = all_pipe_material - set(
+ self.pipe_damage_model.keys()
+ )
if len(not_defined_materials) > 0:
- default_material_model = self.settings.scenario['default_pipe_damage_model']
- new_material_model = dict(zip(not_defined_materials, [default_material_model for i in range(len(not_defined_materials)) ] ) )
+ default_material_model = self.settings.scenario[
+ 'default_pipe_damage_model'
+ ]
+ new_material_model = dict(
+ zip(
+ not_defined_materials,
+ [
+ default_material_model
+ for i in range(len(not_defined_materials))
+ ],
+ )
+ )
self.pipe_damage_model.update(new_material_model)
self.scneraio_validated = True
-
+
self.status_text.setText(text_output)
-
- def pipeDamageSettingByButton(self):
- if self.scneraio_validated == False:
- self.errorMSG('REWET', 'You must validate damage scenarios sucessfully before reviewing pipe damage models.')
+
+ def pipeDamageSettingByButton(self): # noqa: N802, D102
+ if self.scneraio_validated == False: # noqa: E712
+ self.errorMSG(
+ 'REWET',
+ 'You must validate damage scenarios successfully before reviewing pipe damage models.',
+ )
return
pipe_designer = Pipe_Damage_Model_Designer(self.pipe_damage_model)
- return_value = pipe_designer._window.exec_()
-
+ return_value = pipe_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
self.pipe_damage_model = pipe_designer.pipe_damage_model
self.damage_pipe_model_reviewed = True
-
-
- def nodeDamageSettingByButton(self):
+
+ def nodeDamageSettingByButton(self): # noqa: N802, D102
node_designer = Node_Damage_Model_Designer(self.node_damage_model)
- return_value = node_designer._window.exec_()
-
+ return_value = node_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
self.node_damage_model = node_designer.node_damage_model
-
- def browseDamageDirectoryByButton(self):
- directory = QtWidgets.QFileDialog.getExistingDirectory(self.asli_MainWindow, "Select Directory", self.current_xlsx_directory)
- if directory == '':
+
+ def browseDamageDirectoryByButton(self): # noqa: N802, D102
+ directory = QtWidgets.QFileDialog.getExistingDirectory(
+ self.asli_MainWindow, 'Select Directory', self.current_xlsx_directory
+ )
+ if directory == '': # noqa: PLC1901
return
self.current_xlsx_directory = self.current_xlsx_directory
self.damage_input_directory = directory
self.damage_direcotry_line.setText(directory)
-
- def getScnearioListFromXLSX(self, scenario_file_addr):
+
+ def getScnearioListFromXLSX(self, scenario_file_addr): # noqa: N802, D102
scn_list = pd.read_excel(scenario_file_addr)
-
- must_be_headers = ["Scenario Name", "Pipe Damage", "Nodal Damage", "Pump Damage", "Tank Damage", "Probability"]
+
+ must_be_headers = [
+ 'Scenario Name',
+ 'Pipe Damage',
+ 'Nodal Damage',
+ 'Pump Damage',
+ 'Tank Damage',
+ 'Probability',
+ ]
available_headers = scn_list.columns.tolist()
-
+
not_available_headers = set(must_be_headers) - set(available_headers)
- if len(not_available_headers)> 1:
- self.status_text.setText("failed to open the scenario file. the folowing columns are missing and need to be in teh file: "+ repr(not_available_headers))
+ if len(not_available_headers) > 1:
+ self.status_text.setText(
+ 'failed to open the scenario file. the following columns are missing and need to be in the file: '
+ + repr(not_available_headers)
+ )
return None
- else:
- self.status_text.setText("Opened file Sucessfully.")
+ else: # noqa: RET505
+ self.status_text.setText('Opened file Successfully.')
scn_list = scn_list[must_be_headers]
-
- return scn_list
-
- def populateScenarioTable(self):
-
- for index, row in self.scenario_list.iterrows():
+
+ return scn_list # noqa: RET504
+
+ def populateScenarioTable(self): # noqa: N802, D102
+ for index, row in self.scenario_list.iterrows(): # noqa: B007
number_of_rows = self.scenario_table.rowCount()
self.scenario_table.insertRow(number_of_rows)
-
- scenario_item = QtWidgets.QTableWidgetItem(row["Scenario Name"] )
- pipe_damage_item = QtWidgets.QTableWidgetItem(row["Pipe Damage" ] )
- node_damage_item = QtWidgets.QTableWidgetItem(row["Nodal Damage" ] )
- pump_damage_item = QtWidgets.QTableWidgetItem(row["Pump Damage" ] )
- tank_damage_item = QtWidgets.QTableWidgetItem(row["Tank Damage" ] )
- probability_item = QtWidgets.QTableWidgetItem(str(row["Probability" ] ) )
-
- scenario_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- pipe_damage_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- node_damage_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- pump_damage_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- tank_damage_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
- probability_item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
-
- self.scenario_table.setItem(number_of_rows, 0, scenario_item )
- self.scenario_table.setItem(number_of_rows, 1, pipe_damage_item )
- self.scenario_table.setItem(number_of_rows, 2, node_damage_item )
- self.scenario_table.setItem(number_of_rows, 3, pump_damage_item )
- self.scenario_table.setItem(number_of_rows, 4, tank_damage_item )
- self.scenario_table.setItem(number_of_rows, 5, probability_item )
-
- def clearScnearioTable(self):
- for i in range(self.scenario_table.rowCount()):
+
+ scenario_item = QtWidgets.QTableWidgetItem(row['Scenario Name'])
+ pipe_damage_item = QtWidgets.QTableWidgetItem(row['Pipe Damage'])
+ node_damage_item = QtWidgets.QTableWidgetItem(row['Nodal Damage'])
+ pump_damage_item = QtWidgets.QTableWidgetItem(row['Pump Damage'])
+ tank_damage_item = QtWidgets.QTableWidgetItem(row['Tank Damage'])
+ probability_item = QtWidgets.QTableWidgetItem(str(row['Probability']))
+
+ scenario_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+ pipe_damage_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+ node_damage_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+ pump_damage_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+ tank_damage_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+ probability_item.setFlags(
+ QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
+ )
+
+ self.scenario_table.setItem(number_of_rows, 0, scenario_item)
+ self.scenario_table.setItem(number_of_rows, 1, pipe_damage_item)
+ self.scenario_table.setItem(number_of_rows, 2, node_damage_item)
+ self.scenario_table.setItem(number_of_rows, 3, pump_damage_item)
+ self.scenario_table.setItem(number_of_rows, 4, tank_damage_item)
+ self.scenario_table.setItem(number_of_rows, 5, probability_item)
+
+ def clearScnearioTable(self): # noqa: N802, D102
+ for i in range(self.scenario_table.rowCount()): # noqa: B007
self.scenario_table.removeRow(0)
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Hydraulic_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Hydraulic_Tab_Designer.py
index 80f1a1fa7..a54a60964 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Hydraulic_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Hydraulic_Tab_Designer.py
@@ -1,112 +1,122 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Oct 27 19:19:02 2022
+"""Created on Thu Oct 27 19:19:02 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-from PyQt5 import QtGui, QtWidgets
import os
-class Hydraulic_Tab_Designer():
+from PyQt5 import QtGui, QtWidgets
+
+
+class Hydraulic_Tab_Designer: # noqa: D101
def __init__(self):
-
- """
- These are variables that are shared between ui and settings.
- """
+ """These are variables that are shared between ui and settings.""" # noqa: D401
self.setHydraulicSettings(self.settings)
-
+
"""
- ui value assigments.
+ ui value assignments.
"""
self.setHydraulicUI()
"""
Field Validators.
"""
- self.demand_ratio_line.setValidator(QtGui.QDoubleValidator(0.0, 1.0, 3, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.minimum_pressure_line.setValidator(QtGui.QIntValidator(0.0, 2147483647) )
- self.required_pressure_line.setValidator(QtGui.QIntValidator(0.0, 2147483647) )
- self.hydraulic_time_step_line.setValidator(QtGui.QIntValidator(0.0, 2147483647) )
-
+ self.demand_ratio_line.setValidator(
+ QtGui.QDoubleValidator(
+ 0.0, 1.0, 3, notation=QtGui.QDoubleValidator.StandardNotation
+ )
+ )
+ self.minimum_pressure_line.setValidator(QtGui.QIntValidator(0.0, 2147483647))
+ self.required_pressure_line.setValidator(
+ QtGui.QIntValidator(0.0, 2147483647)
+ )
+ self.hydraulic_time_step_line.setValidator(
+ QtGui.QIntValidator(0.0, 2147483647)
+ )
+
"""
Signals connection.
"""
self.demand_ratio_line.textChanged.connect(self.demandRatioValidatorHelper)
self.wdn_browse_button.clicked.connect(self.wdnFileBroweserClicked)
- self.hydraulic_time_step_line.textEdited.connect(self.hydraulicTimeStepValidatorHelper)
- self.current_inp_directory = os.getcwd()
-
- def getHydraulicSettings(self):
-
- if self.wn_inp == '':
- self.errorMSG("REWET", "Water distribution network File must be provided")
+ self.hydraulic_time_step_line.textEdited.connect(
+ self.hydraulicTimeStepValidatorHelper
+ )
+ self.current_inp_directory = os.getcwd() # noqa: PTH109
+
+ def getHydraulicSettings(self): # noqa: N802, D102
+ if self.wn_inp == '': # noqa: PLC1901
+ self.errorMSG(
+ 'REWET', 'Water distribution network File must be provided'
+ )
return False
-
- #self.wn_inp -- already set
- self.demand_ratio = float(self.demand_ratio_line.text() )
- self.minimum_pressure = float(self.minimum_pressure_line.text() )
- self.required_pressure = float(self.required_pressure_line.text() )
- self.hydraulic_time_step = int(float(self.hydraulic_time_step_line.text() ) )
-
- self.settings.process['WN_INP' ] = self.wn_inp
- self.settings.process['demand_ratio' ] = self.demand_ratio
- self.settings.process['solver_type' ] = self.solver
- self.settings.scenario['minimum_pressure' ] = self.minimum_pressure
- self.settings.scenario['required_pressure' ] = self.required_pressure
+
+ # self.wn_inp -- already set
+ self.demand_ratio = float(self.demand_ratio_line.text())
+ self.minimum_pressure = float(self.minimum_pressure_line.text())
+ self.required_pressure = float(self.required_pressure_line.text())
+ self.hydraulic_time_step = int(float(self.hydraulic_time_step_line.text()))
+
+ self.settings.process['WN_INP'] = self.wn_inp
+ self.settings.process['demand_ratio'] = self.demand_ratio
+ self.settings.process['solver_type'] = self.solver
+ self.settings.scenario['minimum_pressure'] = self.minimum_pressure
+ self.settings.scenario['required_pressure'] = self.required_pressure
self.settings.scenario['hydraulic_time_step'] = self.hydraulic_time_step
-
+
return True
-
- def setHydraulicUI(self):
- self.wdn_addr_line.setText( self.wn_inp)
+
+ def setHydraulicUI(self): # noqa: N802, D102
+ self.wdn_addr_line.setText(self.wn_inp)
self.last_demand_ratio_value = self.demand_ratio
- self.demand_ratio_line.setText(str(self.last_demand_ratio_value) )
-
+ self.demand_ratio_line.setText(str(self.last_demand_ratio_value))
+
if self.solver == 'ModifiedEPANETV2.2':
self.modified_epanet_radio.setChecked(True)
elif self.solver == 'WNTR':
self.wntr_solver_radio.setChecked(True)
else:
- raise ValueError("Unknown value for solver: " + repr(self.solver) )
-
- self.minimum_pressure_line.setText(str(self.minimum_pressure) )
- self.required_pressure_line.setText(str(self.required_pressure) )
- self.hydraulic_time_step_line.setText(str(self.hydraulic_time_step) )
-
-
- def setHydraulicSettings(self, settings):
- self.wn_inp = settings.process['WN_INP' ]
- self.demand_ratio = settings.process['demand_ratio' ]
- self.solver = settings.process['solver_type' ]
- self.minimum_pressure = settings.scenario['minimum_pressure' ]
- self.required_pressure = settings.scenario['required_pressure' ]
+ raise ValueError('Unknown value for solver: ' + repr(self.solver))
+
+ self.minimum_pressure_line.setText(str(self.minimum_pressure))
+ self.required_pressure_line.setText(str(self.required_pressure))
+ self.hydraulic_time_step_line.setText(str(self.hydraulic_time_step))
+
+ def setHydraulicSettings(self, settings): # noqa: N802, D102
+ self.wn_inp = settings.process['WN_INP']
+ self.demand_ratio = settings.process['demand_ratio']
+ self.solver = settings.process['solver_type']
+ self.minimum_pressure = settings.scenario['minimum_pressure']
+ self.required_pressure = settings.scenario['required_pressure']
self.hydraulic_time_step = settings.scenario['hydraulic_time_step']
-
- def demandRatioValidatorHelper(self, x):
+
+ def demandRatioValidatorHelper(self, x): # noqa: N802, D102
if float(x) > 1:
self.demand_ratio_line.setText(self.last_demand_ratio_value)
else:
self.last_demand_ratio_value = x
- #print(x)
-
- def hydraulicTimeStepValidatorHelper(self, x):
+ # print(x)
+
+ def hydraulicTimeStepValidatorHelper(self, x): # noqa: ARG002, N802, D102
try:
- hydraulic_time_step = int(float( self.hydraulic_time_step_line.text()) )
- except:
+ hydraulic_time_step = int(float(self.hydraulic_time_step_line.text()))
+ except: # noqa: E722
hydraulic_time_step = 0
- simulation_time_step = int(float( self.simulation_time_step_line.text()) )
-
+ simulation_time_step = int(float(self.simulation_time_step_line.text()))
+
if hydraulic_time_step > simulation_time_step:
self.hydraulic_time_step_line.setText(str(simulation_time_step))
-
- def wdnFileBroweserClicked(self):
- file = QtWidgets.QFileDialog.getOpenFileName(self.asli_MainWindow, 'Open file',
- self.current_inp_directory,"inp file (*.inp)")
- if file[0] == '':
+
+ def wdnFileBroweserClicked(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self.asli_MainWindow,
+ 'Open file',
+ self.current_inp_directory,
+ 'inp file (*.inp)',
+ )
+ if file[0] == '': # noqa: PLC1901
return
split_addr = os.path.split(file[0])
- self.current_inp_directory = split_addr[0]
+ self.current_inp_directory = split_addr[0]
self.wn_inp = file[0]
-
+
self.wdn_addr_line.setText(file[0])
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Input_IO.py b/modules/systemPerformance/REWET/REWET/GUI/Input_IO.py
index 8fe08dfa5..3d1e55ee7 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Input_IO.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Input_IO.py
@@ -1,102 +1,124 @@
-import os
+import os # noqa: CPY001, D100, N999
+import pickle # noqa: S403
+
import pandas as pd
-import pickle
-##################### Read files From Pickle #####################
-def read_pipe_damage_seperate_pickle_file(directory, all_damages_file_name):
- file_dest=os.path.join(directory, all_damages_file_name)
- with open(file_dest, 'rb') as f:
- _all_damages = pickle.load(f)
+# Read files From Pickle #####################
+def read_pipe_damage_seperate_pickle_file(directory, all_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, all_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _all_damages = pickle.load(f) # noqa: S301
+
+ return _all_damages # noqa: RET504
+
+
+def read_node_damage_seperate_pickle_file(directory, all_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, all_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _node_damages = pickle.load(f) # noqa: S301
- return _all_damages
+ return _node_damages # noqa: RET504
-def read_node_damage_seperate_pickle_file(directory, all_damages_file_name):
- file_dest=os.path.join(directory, all_damages_file_name)
- with open(file_dest, 'rb') as f:
- _node_damages = pickle.load(f)
-
- return _node_damages
-def read_tank_damage_seperate_pickle_file(directory, tank_damages_file_name):
- file_dest=os.path.join(directory, tank_damages_file_name)
- with open(file_dest, 'rb') as f:
- _tank_damages = pickle.load(f)
+def read_tank_damage_seperate_pickle_file(directory, tank_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, tank_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _tank_damages = pickle.load(f) # noqa: S301
- return _tank_damages
+ return _tank_damages # noqa: RET504
-def read_pump_damage_seperate_pickle_file(directory, pump_damages_file_name):
- file_dest=os.path.join(directory, pump_damages_file_name)
- with open(file_dest, 'rb') as f:
- _pump_damages = pickle.load(f)
- return _pump_damages
+def read_pump_damage_seperate_pickle_file(directory, pump_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, pump_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _pump_damages = pickle.load(f) # noqa: S301
-##################### Read files From Excel #####################
+ return _pump_damages # noqa: RET504
-def read_pipe_damage_seperate_EXCEL_file(directory, pipe_damages_file_name):
- ss=None
- file_dest=os.path.join(directory, pipe_damages_file_name)
- ss=pd.read_excel(file_dest)
- ss.sort_values(['pipe_id','damage_time','damage_loc'],ascending=[True,True,False], ignore_index=True, inplace=True)
+
+# Read files From Excel #####################
+
+
+def read_pipe_damage_seperate_EXCEL_file(directory, pipe_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, pipe_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.sort_values(
+ ['pipe_id', 'damage_time', 'damage_loc'],
+ ascending=[True, True, False],
+ ignore_index=True,
+ inplace=True, # noqa: PD002
+ )
unique_time = ss.groupby(['pipe_id']).time.unique()
- if 1 in [0 if len(i)<=1 else 1 for i in unique_time]: # checks if there are any pipe id with more than two unqiue time values
- raise ValueError("All damage location for one pipe should happen at the same time")
- ss.set_index('time', inplace=True)
+ if 1 in [
+ 0 if len(i) <= 1 else 1 for i in unique_time
+ ]: # checks if there are any pipe id with more than two unique time values
+ raise ValueError( # noqa: TRY003
+ 'All damage location for one pipe should happen at the same time' # noqa: EM101
+ )
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.pipe_id = ss.pipe_id.astype(str)
return pd.Series(ss.to_dict('records'), index=ss.index)
-def read_node_damage_seperate_EXCEL_file(directory, node_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, node_damages_file_name)
- ss = pd.read_excel(file_dest)
- ss.set_index('time', inplace=True)
+
+def read_node_damage_seperate_EXCEL_file(directory, node_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, node_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.node_name = ss.node_name.astype(str)
return pd.Series(ss.to_dict('records'), index=ss.index)
-def read_tank_damage_seperate_EXCEL_file(directory, tank_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, tank_damages_file_name)
- ss = pd.read_excel(file_dest)
-# ss.set_index('Tank_ID', inplace=True)
- ss.set_index('time', inplace=True)
+
+def read_tank_damage_seperate_EXCEL_file(directory, tank_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, tank_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ # ss.set_index('Tank_ID', inplace=True)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.Tank_ID = ss.Tank_ID.astype(str)
- #ss = ss['Tank_ID']
+ # ss = ss['Tank_ID']
return ss
-def read_pump_damage_seperate_EXCEL_file(directory, pump_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, pump_damages_file_name)
- ss = pd.read_excel(file_dest)
- ss.set_index('time', inplace=True)
+
+def read_pump_damage_seperate_EXCEL_file(directory, pump_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, pump_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.Pump_ID = ss.Pump_ID.astype(str)
return ss
-##################### Save Results #####################
-def save_single(settings, result, name, restoration_data):
+# Save Results #####################
+
+
+def save_single(settings, result, name, restoration_data): # noqa: D103
result_file_directory = settings.process['result_directory']
- result_name = name + '.res'
- settings_name = name + '.xlsx'
-
- file_dest = os.path.join(result_file_directory, result_name)
- print("Saving: "+str(file_dest))
- with open(file_dest, 'wb') as f:
+ result_name = name + '.res'
+ settings_name = name + '.xlsx'
+
+ file_dest = os.path.join(result_file_directory, result_name) # noqa: PTH118
+ print('Saving: ' + str(file_dest)) # noqa: T201
+ with open(file_dest, 'wb') as f: # noqa: PTH123
pickle.dump(result, f)
-
-
- process_set = pd.Series(settings.process.settings)
+
+ process_set = pd.Series(settings.process.settings)
scenario_set = pd.Series(settings.scenario.settings)
- _set = pd.Series(process_set.to_list()+scenario_set.to_list(), index=process_set.index.to_list()+scenario_set.index.to_list())
- file_dest = os.path.join(result_file_directory, settings_name)
+ _set = pd.Series(
+ process_set.to_list() + scenario_set.to_list(),
+ index=process_set.index.to_list() + scenario_set.index.to_list(),
+ )
+ file_dest = os.path.join(result_file_directory, settings_name) # noqa: PTH118
_set.to_excel(file_dest)
-
+
if settings.process['dmg_rst_data_save']:
- #file_dest = os.path.join(result_file_directory, 'restoration_file.pkl')
- #rest_data_out = pd.DataFrame.from_dict(restoration_data)
- #rest_data_out.to_pickle(file_dest)
- file_dest = os.path.join(result_file_directory, name+'_registry.pkl')
- print("Saving: "+str(file_dest))
- with open(file_dest, 'wb') as f:
- pickle.dump(restoration_data, f)
\ No newline at end of file
+ # file_dest = os.path.join(result_file_directory, 'restoration_file.pkl')
+ # rest_data_out = pd.DataFrame.from_dict(restoration_data)
+ # rest_data_out.to_pickle(file_dest)
+ file_dest = os.path.join(result_file_directory, name + '_registry.pkl') # noqa: PTH118
+ print('Saving: ' + str(file_dest)) # noqa: T201
+ with open(file_dest, 'wb') as f: # noqa: PTH123
+ pickle.dump(restoration_data, f)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Designer.py
index 60567bb6c..f546d864a 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Designer.py
@@ -1,17 +1,16 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Nov 2 13:25:40 2022
+"""Created on Wed Nov 2 13:25:40 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from PyQt5 import QtWidgets
+
from .Main_Help_Window import Ui_Main_Help_Window
-class Main_Help_Designer(Ui_Main_Help_Window):
+
+class Main_Help_Designer(Ui_Main_Help_Window): # noqa: D101
def __init__(self):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
-
+
self.buttonBox.rejected.connect(self._window.close)
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.py
index fa1e4be13..3c4b60159 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Main_Help_Window.ui'
+# Form implementation generated from reading ui file 'Main_Help_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
@@ -10,64 +8,77 @@
from PyQt5 import QtCore, QtGui, QtWidgets
-class Ui_Main_Help_Window(object):
- def setupUi(self, Main_Help_Window):
- Main_Help_Window.setObjectName("Main_Help_Window")
+class Ui_Main_Help_Window: # noqa: D101
+ def setupUi(self, Main_Help_Window): # noqa: N802, N803, D102
+ Main_Help_Window.setObjectName('Main_Help_Window')
Main_Help_Window.resize(680, 320)
Main_Help_Window.setMinimumSize(QtCore.QSize(680, 320))
Main_Help_Window.setMaximumSize(QtCore.QSize(680, 320))
self.layoutWidget = QtWidgets.QWidget(Main_Help_Window)
self.layoutWidget.setGeometry(QtCore.QRect(20, 20, 641, 281))
- self.layoutWidget.setObjectName("layoutWidget")
+ self.layoutWidget.setObjectName('layoutWidget')
self.main_layout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.main_layout.setContentsMargins(0, 0, 0, 0)
- self.main_layout.setObjectName("main_layout")
+ self.main_layout.setObjectName('main_layout')
self.label = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
- font.setFamily("Times New Roman")
+ font.setFamily('Times New Roman')
font.setPointSize(11)
self.label.setFont(font)
self.label.setWordWrap(True)
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.main_layout.addWidget(self.label)
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
- self.gridLayout_4.setObjectName("gridLayout_4")
- spacerItem = QtWidgets.QSpacerItem(50, 0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ self.gridLayout_4.setObjectName('gridLayout_4')
+ spacerItem = QtWidgets.QSpacerItem( # noqa: N806
+ 50, 0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum
+ )
self.gridLayout_4.addItem(spacerItem, 1, 1, 1, 1)
- spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ spacerItem1 = QtWidgets.QSpacerItem( # noqa: N806
+ 40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum
+ )
self.gridLayout_4.addItem(spacerItem1, 0, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.layoutWidget)
- self.label_4.setText("")
- self.label_4.setPixmap(QtGui.QPixmap(":/resources/resources/both_logos.jpg"))
- self.label_4.setObjectName("label_4")
+ self.label_4.setText('')
+ self.label_4.setPixmap(QtGui.QPixmap(':/resources/resources/both_logos.jpg'))
+ self.label_4.setObjectName('label_4')
self.gridLayout_4.addWidget(self.label_4, 0, 1, 1, 1)
- spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ spacerItem2 = QtWidgets.QSpacerItem( # noqa: N806
+ 40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum
+ )
self.gridLayout_4.addItem(spacerItem2, 0, 2, 1, 1)
self.main_layout.addLayout(self.gridLayout_4)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
- self.horizontalLayout_2.setObjectName("horizontalLayout_2")
- spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ self.horizontalLayout_2.setObjectName('horizontalLayout_2')
+ spacerItem3 = QtWidgets.QSpacerItem( # noqa: N806
+ 40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum
+ )
self.horizontalLayout_2.addItem(spacerItem3)
self.buttonBox = QtWidgets.QDialogButtonBox(self.layoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setObjectName('buttonBox')
self.horizontalLayout_2.addWidget(self.buttonBox)
self.main_layout.addLayout(self.horizontalLayout_2)
self.retranslateUi(Main_Help_Window)
QtCore.QMetaObject.connectSlotsByName(Main_Help_Window)
- def retranslateUi(self, Main_Help_Window):
+ def retranslateUi(self, Main_Help_Window): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Main_Help_Window.setWindowTitle(_translate("Main_Help_Window", "Help"))
- self.label.setText(_translate("Main_Help_Window", "REstoration of Water after an Event Tool (REWET) is created by Sina Naeimi and Rachel Davidson at University of Delaware. REWET is as it as and developers gurantee neither the usability of the software nor the validity of data it produce in any way."))
-from . import REWET_Resource_rc
+ Main_Help_Window.setWindowTitle(_translate('Main_Help_Window', 'Help'))
+ self.label.setText(
+ _translate(
+ 'Main_Help_Window',
+ 'REstoration of Water after an Event Tool (REWET) is created by Sina Naeimi and Rachel Davidson at University of Delaware. REWET is as it as and developers guarantee neither the usability of the software nor the validity of data it produce in any way.',
+ )
+ )
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Main_Help_Window = QtWidgets.QDialog()
ui = Ui_Main_Help_Window()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.ui b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.ui
index 5b3222025..f1625ce40 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.ui
+++ b/modules/systemPerformance/REWET/REWET/GUI/Main_Help_Window.ui
@@ -44,7 +44,7 @@
- REstoration of Water after an Event Tool (REWET) is created by Sina Naeimi and Rachel Davidson at University of Delaware. REWET is as it as and developers gurantee neither the usability of the software nor the validity of data it produce in any way.
+ REstoration of Water after an Event Tool (REWET) is created by Sina Naeimi and Rachel Davidson at University of Delaware. REWET is as it as and developers guarantee neither the usability of the software nor the validity of data it produce in any way.
true
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Map_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Map_Designer.py
index cef51f43e..08781bba0 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Map_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Map_Designer.py
@@ -1,619 +1,808 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 18:29:50 2022
+"""Created on Thu Nov 10 18:29:50 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-
-from PyQt5 import QtWidgets, QtGui
-import numpy as np
-import pandas as pd
import geopandas as gpd
-from shapely.geometry import Point
import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
from GUI.Subsitute_Layer_Designer import Subsitute_Layer_Designer
-from GUI.Symbology_Designer import Symbology_Designer
-
-
-single_scenario_map_options = ['', 'Quantity Return', 'Delivery Return', ]
-multi_scenario_map_options = ['','Quantity Outage vs. Exceedance','Delivery Outage vs. Exceedance', 'Quantity Exceedance vs. Time', 'Delivery Exceedance vs. Time']
-map_settings = { 'Quantity Return':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"0.75"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}],
- 'Delivery Return':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}],
- 'Quantity Outage vs. Exceedance':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}, {"Label":"Ex. Prob.", "Type":"Float Line", "Default":"0.09", "Validator":{"Min":0, "Max":1}}],
- 'Delivery Outage vs. Exceedance':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}, {"Label":"Ex. Prob.", "Type":"Int Line", "Default":str(24*3600), "Validator":{"Min":0, "Max":1000*24*3600}}],
- 'Quantity Exceedance vs. Time':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}, {"Label":"Outage Time", "Type":"Float Line", "Default":"0.09", "Validator":{"Min":0, "Max":1}}],
- 'Delivery Exceedance vs. Time':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}, {"Label":"Time Window", "Type":"Int Line", "Default":"7200"}, {"Label":"Outage Time", "Type":"Int Line", "Default":str(24*3600), "Validator":{"Min":0, "Max":1000*24*3600}}]}
-norm = plt.Normalize(1,4)
+from GUI.Symbology_Designer import Symbology_Designer
+from PyQt5 import QtGui, QtWidgets
+from shapely.geometry import Point
+
+single_scenario_map_options = [
+ '',
+ 'Quantity Return',
+ 'Delivery Return',
+]
+multi_scenario_map_options = [
+ '',
+ 'Quantity Outage vs. Exceedance',
+ 'Delivery Outage vs. Exceedance',
+ 'Quantity Exceedance vs. Time',
+ 'Delivery Exceedance vs. Time',
+]
+map_settings = {
+ 'Quantity Return': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '0.75'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ ],
+ 'Delivery Return': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ ],
+ 'Quantity Outage vs. Exceedance': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ {
+ 'Label': 'Ex. Prob.',
+ 'Type': 'Float Line',
+ 'Default': '0.09',
+ 'Validator': {'Min': 0, 'Max': 1},
+ },
+ ],
+ 'Delivery Outage vs. Exceedance': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ {
+ 'Label': 'Ex. Prob.',
+ 'Type': 'Int Line',
+ 'Default': str(24 * 3600),
+ 'Validator': {'Min': 0, 'Max': 1000 * 24 * 3600},
+ },
+ ],
+ 'Quantity Exceedance vs. Time': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ {
+ 'Label': 'Outage Time',
+ 'Type': 'Float Line',
+ 'Default': '0.09',
+ 'Validator': {'Min': 0, 'Max': 1},
+ },
+ ],
+ 'Delivery Exceedance vs. Time': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ {'Label': 'Time Window', 'Type': 'Int Line', 'Default': '7200'},
+ {
+ 'Label': 'Outage Time',
+ 'Type': 'Int Line',
+ 'Default': str(24 * 3600),
+ 'Validator': {'Min': 0, 'Max': 1000 * 24 * 3600},
+ },
+ ],
+}
+norm = plt.Normalize(1, 4)
cmap = plt.cm.RdYlGn
-class Time_Unit_Combo(QtWidgets.QComboBox):
+
+class Time_Unit_Combo(QtWidgets.QComboBox): # noqa: D101
def __init__(self):
super().__init__()
- time_units = ["second", "hour", "day"]
+ time_units = ['second', 'hour', 'day']
self.addItems(time_units)
-
- def changeMapTimeUnit(self, raw_time_map, value_columns_name):
-
+
+ def changeMapTimeUnit(self, raw_time_map, value_columns_name): # noqa: N802, D102
time_justified_map = raw_time_map.copy()
-
+
time_unit = self.currentText()
- data = time_justified_map[value_columns_name]
-
- #time_justified_map = time_justified_map.reset_index()
-
-
- if time_unit == "second":
- return raw_time_map.copy()
- elif time_unit == "hour":
- data = data/3600
- elif time_unit == "day":
- data = data/3600/24
+ data = time_justified_map[value_columns_name]
+
+ # time_justified_map = time_justified_map.reset_index()
+
+ if time_unit == 'second':
+ return raw_time_map.copy()
+ elif time_unit == 'hour': # noqa: RET505
+ data = data / 3600 # noqa: PLR6104
+ elif time_unit == 'day':
+ data = data / 3600 / 24
else:
- raise ValueError("Unknown unit time: "+repr(time_unit) )
-
+ raise ValueError('Unknown unit time: ' + repr(time_unit))
+
for ind in data.index.to_list():
time_justified_map.loc[ind, value_columns_name] = data.loc[ind]
return time_justified_map
-class Yes_No_Combo(QtWidgets.QComboBox):
+
+class Yes_No_Combo(QtWidgets.QComboBox): # noqa: D101
def __init__(self):
super().__init__()
- self.addItems(["No", "Yes"])
+ self.addItems(['No', 'Yes'])
-class Map_Designer():
- def __init__(self):
- self.current_raw_map = None
- self.current_map = None
- self.annotation_map = None
- self.plotted_map = None
- self.subsitute_layer_addr = None
- self.subsitute_layer = None
+class Map_Designer: # noqa: D101, PLR0904
+ def __init__(self):
+ self.current_raw_map = None
+ self.current_map = None
+ self.annotation_map = None
+ self.plotted_map = None
+ self.subsitute_layer_addr = None
+ self.subsitute_layer = None
self.iUse_substitute_layer = False
- self.map_settings_widgets = {}
- self.symbology = {"Method":"FisherJenks", "kw":{"k":5}, "Color":"Blues"}
-
+ self.map_settings_widgets = {}
+ self.symbology = {'Method': 'FisherJenks', 'kw': {'k': 5}, 'Color': 'Blues'}
+
self.main_tab.currentChanged.connect(self.tabChangedMap)
- self.map_all_scenarios_checkbox.stateChanged.connect(self.mapAllScenarioCheckboxChanged)
+ self.map_all_scenarios_checkbox.stateChanged.connect(
+ self.mapAllScenarioCheckboxChanged
+ )
self.save_map_button.clicked.connect(self.saveCurrentMapByButton)
- self.map_scenario_combo.currentTextChanged.connect(self.resultScenarioChanged)
+ self.map_scenario_combo.currentTextChanged.connect(
+ self.resultScenarioChanged
+ )
self.map_type_combo.currentTextChanged.connect(self.mapTypeChanegd)
self.annotation_checkbox.stateChanged.connect(self.AnnotationCheckboxChanged)
- self.annotation_event_combo.currentTextChanged.connect(self.getAnnotationtype)
- self.mpl_map.canvas.fig.canvas.mpl_connect("motion_notify_event", self.mouseHovered)
- self.mpl_map.canvas.fig.canvas.mpl_connect("button_press_event", self.mouseClicked)
-
-
-
+ self.annotation_event_combo.currentTextChanged.connect(
+ self.getAnnotationtype
+ )
+ self.mpl_map.canvas.fig.canvas.mpl_connect(
+ 'motion_notify_event', self.mouseHovered
+ )
+ self.mpl_map.canvas.fig.canvas.mpl_connect(
+ 'button_press_event', self.mouseClicked
+ )
+
"""
Signals
"""
- self.annotation_radius_line.editingFinished.connect(self.annotationRadiusChanegd)
+ self.annotation_radius_line.editingFinished.connect(
+ self.annotationRadiusChanegd
+ )
self.spatial_join_button.clicked.connect(self.openSubsituteLayerWindow)
self.major_tick_size_line.editingFinished.connect(self.majorTickSet)
self.symbology_button.clicked.connect(self.symbologyByButton)
-
+
"""
Validators
"""
- self.annotation_radius_line.setValidator(QtGui.QDoubleValidator(0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.major_tick_size_line.setValidator(QtGui.QIntValidator(0, 64) )
-
+ self.annotation_radius_line.setValidator(
+ QtGui.QDoubleValidator(
+ 0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation
+ )
+ )
+ self.major_tick_size_line.setValidator(QtGui.QIntValidator(0, 64))
+
self.map_value_columns_name = None
- self.anottation_type = "None"
+ self.anottation_type = 'None'
self.annotation_column = None
-
+
self.initializeMap()
-
- def initializeMap(self):
+
+ def initializeMap(self): # noqa: N802, D102
self.setMapAllScenarios(True)
self.map_all_scenarios_checkbox.setChecked(True)
self.map_scenario_combo.clear()
self.map_scenario_combo.addItems(self.result_scenarios)
- #self.current_map_data = None
-
- def symbologyByButton(self):
- sym = Symbology_Designer(self.symbology, self.plotted_map, self.map_value_columns_name)
- val = sym._window.exec_()
-
+ # self.current_map_data = None
+
+ def symbologyByButton(self): # noqa: N802, D102
+ sym = Symbology_Designer(
+ self.symbology, self.plotted_map, self.map_value_columns_name
+ )
+ val = sym._window.exec_() # noqa: SLF001
+
if val == 1:
self.symbology = sym.sym
self.plotMap(self.map_value_columns_name)
-
- def majorTickSet(self):
+
+ def majorTickSet(self): # noqa: N802, D102
major_tick_fond_size = self.major_tick_size_line.text()
major_tick_fond_size = float(major_tick_fond_size)
-
- self.mpl_map.canvas.ax.tick_params(axis='both', which='major', labelsize=major_tick_fond_size)
+
+ self.mpl_map.canvas.ax.tick_params(
+ axis='both', which='major', labelsize=major_tick_fond_size
+ )
self.mpl_map.canvas.fig.canvas.draw_idle()
-
- def openSubsituteLayerWindow(self):
- demand_node_temporary_layer = self.project_result.createGeopandasPointDataFrameForNodes()
- sub_layer = Subsitute_Layer_Designer(self.subsitute_layer_addr, self.subsitute_layer, self.iUse_substitute_layer, demand_node_temporary_layer)
- val = sub_layer._window.exec_()
+
+ def openSubsituteLayerWindow(self): # noqa: N802, D102
+ demand_node_temporary_layer = (
+ self.project_result.createGeopandasPointDataFrameForNodes()
+ )
+ sub_layer = Subsitute_Layer_Designer(
+ self.subsitute_layer_addr,
+ self.subsitute_layer,
+ self.iUse_substitute_layer,
+ demand_node_temporary_layer,
+ )
+ val = sub_layer._window.exec_() # noqa: SLF001
if val == 1:
- self.subsitute_layer_addr = sub_layer.subsitute_layer_addr
- self.subsitute_layer = sub_layer.subsitute_layer
+ self.subsitute_layer_addr = sub_layer.subsitute_layer_addr
+ self.subsitute_layer = sub_layer.subsitute_layer
self.iUse_substitute_layer = sub_layer.iUse_substitute_layer
self.plotMap(self.map_value_columns_name)
-
- def annotationRadiusChanegd(self):
+
+ def annotationRadiusChanegd(self): # noqa: N802, D102
annotation_radius = self.annotation_radius_line.text()
self.annotation_map = self.plotted_map.copy(deep=True)
- if annotation_radius=="":
+ if annotation_radius == '': # noqa: PLC1901
annotation_radius = 0
- self.annotation_radius_line.settext("0")
+ self.annotation_radius_line.settext('0')
annotation_radius = float(annotation_radius)
for ind, val in self.current_map.geometry.iteritems():
self.annotation_map.geometry.loc[ind] = val.buffer(annotation_radius)
-
- def AnnotationCheckboxChanged(self, state):
+
+ def AnnotationCheckboxChanged(self, state): # noqa: N802, D102
if state == 0:
self.annotation_event_combo.setEnabled(False)
self.annotation_radius_line.setEnabled(False)
- self.anottation_type = "None"
+ self.anottation_type = 'None'
self.annot.set_visible(False)
- elif state == 2:
+ elif state == 2: # noqa: PLR2004
self.annotation_event_combo.setEnabled(True)
self.annotation_radius_line.setEnabled(True)
self.getAnnotationtype()
-
- def mapAllScenarioCheckboxChanged(self, state):
+
+ def mapAllScenarioCheckboxChanged(self, state): # noqa: N802, D102
if state == 0:
self.setMapAllScenarios(False)
- elif state == 2:
+ elif state == 2: # noqa: PLR2004
self.setMapAllScenarios(True)
-
- def getAnnotationtype(self, text=None):
+
+ def getAnnotationtype(self, text=None): # noqa: ARG002, N802, D102
combo_value = self.annotation_event_combo.currentText()
- if combo_value == "Mouse hover":
- self.anottation_type = combo_value
- elif combo_value == "Mouse click":
+ if combo_value == 'Mouse hover' or combo_value == 'Mouse click': # noqa: PLR1714
self.anottation_type = combo_value
else:
- raise ValueError("unknown annotation type: "+repr(combo_value))
-
- def mouseHovered(self, event):
- if self.anottation_type != "Mouse hover":
+ raise ValueError('unknown annotation type: ' + repr(combo_value))
+
+ def mouseHovered(self, event): # noqa: N802, D102
+ if self.anottation_type != 'Mouse hover':
return
-
- if type(self.current_map) == type(None):
+
+ if type(self.current_map) == type(None): # noqa: E721
return
self.putAnnotation(event)
-
- def mouseClicked(self, event):
- if self.anottation_type != "Mouse click":
+
+ def mouseClicked(self, event): # noqa: N802, D102
+ if self.anottation_type != 'Mouse click':
return
-
- if type(self.current_map) == type(None):
+
+ if type(self.current_map) == type(None): # noqa: E721
return
-
+
if event.button != 1:
return
-
+
self.putAnnotation(event)
-
- def putAnnotation(self, event):
+
+ def putAnnotation(self, event): # noqa: N802, D102
vis = self.annot.get_visible()
if event.inaxes == self.mpl_map.canvas.ax:
- #print((event.xdata, event.ydata) )
+ # print((event.xdata, event.ydata) )
mouse_point = Point(event.xdata, event.ydata)
s = self.annotation_map.geometry.contains(mouse_point)
- s_index_list = s[s==True].index
-
+ s_index_list = s[s == True].index # noqa: E712
+
if len(s_index_list) >= 1:
cont = True
s_index = s_index_list[0]
elif len(s_index_list) == 0:
cont = False
-
+
if cont:
- #print(len(s_index_list))
+ # print(len(s_index_list))
data = self.annotation_map.loc[s_index, self.map_value_columns_name]
- if type(data) == pd.core.series.Series:
+ if type(data) == pd.core.series.Series: # noqa: E721
data = data.iloc[0]
text = repr(data)
self.update_annot(text, event)
self.annot.set_visible(True)
self.mpl_map.canvas.fig.canvas.draw_idle()
- else:
- if vis:
- self.annot.set_visible(False)
- self.mpl_map.canvas.fig.canvas.draw_idle()
-
-
- def update_annot(self, text, event):
+ elif vis:
+ self.annot.set_visible(False)
+ self.mpl_map.canvas.fig.canvas.draw_idle()
+
+ def update_annot(self, text, event): # noqa: D102
self.annot.xy = (event.xdata, event.ydata)
self.annot.set_text(text)
self.annot.get_bbox_patch().set_facecolor(cmap(norm(1)))
self.annot.get_bbox_patch().set_alpha(0.4)
-
- def clearMapPlot(self):
+
+ def clearMapPlot(self): # noqa: N802, D102
self.mpl_map.canvas.ax.cla()
-
- def plotMap(self, value_columns_name):
+
+ def plotMap(self, value_columns_name): # noqa: N802, D102
self.clearMapPlot()
self.mpl_map.canvas.ax.clear()
- #for ind, val in self.current_map.geometry.iteritems():
- #self.current_map.geometry.loc[ind] = val.buffer(2000)
- #self.mpl_map.canvas.ax.clear()
+ # for ind, val in self.current_map.geometry.iteritems():
+ # self.current_map.geometry.loc[ind] = val.buffer(2000)
+ # self.mpl_map.canvas.ax.clear()
data = self.current_map
- #print(data.head() )
-
- self.annot = self.mpl_map.canvas.ax.annotate("", xy=(0, 0), xytext=(20, 20),textcoords="offset points",
- bbox=dict(boxstyle="round", fc="w"),
- arrowprops=dict(arrowstyle="->"))
+ # print(data.head() )
+
+ self.annot = self.mpl_map.canvas.ax.annotate(
+ '',
+ xy=(0, 0),
+ xytext=(20, 20),
+ textcoords='offset points',
+ bbox=dict(boxstyle='round', fc='w'), # noqa: C408
+ arrowprops=dict(arrowstyle='->'), # noqa: C408
+ )
self.annot.set_visible(False)
- if self.iUse_substitute_layer == True:
+ if self.iUse_substitute_layer == True: # noqa: E712
data = data.set_crs(crs=self.subsitute_layer.crs)
joined_map = gpd.sjoin(self.subsitute_layer, data)
- #joined_map.plot(ax=self.mpl_map.canvas.ax, column=value_columns_name, cmap="Blues", legend=True)
+ # joined_map.plot(ax=self.mpl_map.canvas.ax, column=value_columns_name, cmap="Blues", legend=True)
data = joined_map
else:
pass
self.annotation_map = data.copy(deep=True)
- #data.to_file("Northridge/ss2.shp")
+ # data.to_file("Northridge/ss2.shp")
self.plotted_map = self.prepareForLegend(data, value_columns_name)
- self.plotted_map.plot(ax=self.mpl_map.canvas.ax, column=value_columns_name, cmap=self.symbology["Color"], categorical=True, legend="True", scheme=self.symbology["Method"], classification_kwds=self.symbology["kw"])
+ self.plotted_map.plot(
+ ax=self.mpl_map.canvas.ax,
+ column=value_columns_name,
+ cmap=self.symbology['Color'],
+ categorical=True,
+ legend='True',
+ scheme=self.symbology['Method'],
+ classification_kwds=self.symbology['kw'],
+ )
self.mpl_map.canvas.ax.ticklabel_format(axis='both', style='plain')
- #self.majorTickSet()
-
- #labels = self.mpl_map.canvas.ax.get_xticks()
- #self.mpl_map.canvas.ax.set_xticklabels(labels, rotation=45, ha='right')
- #self.mpl_map.canvas.ax.plot(self.current_map.index, self.current_map.to_list())
-
- self.mpl_map.canvas.draw()
+ # self.majorTickSet()
+
+ # labels = self.mpl_map.canvas.ax.get_xticks()
+ # self.mpl_map.canvas.ax.set_xticklabels(labels, rotation=45, ha='right')
+ # self.mpl_map.canvas.ax.plot(self.current_map.index, self.current_map.to_list())
+
+ self.mpl_map.canvas.draw()
self.mpl_map.canvas.fig.tight_layout()
-
- def prepareForLegend(self, data, value_columns_name):
+
+ def prepareForLegend(self, data, value_columns_name): # noqa: D102, N802, PLR6301
return data.copy(deep=True)
data = data.copy(deep=True)
min_value = data[value_columns_name].min()
max_value = data[value_columns_name].max()
- step = (max_value - min_value)/5
+ step = (max_value - min_value) / 5
-
step_array = np.arange(min_value, max_value, step)
step_array = step_array.tolist()
step_array.append(max_value)
-
+
for i in range(len(step_array) - 1):
- step_max = step_array[i+1]
+ step_max = step_array[i + 1]
step_min = step_array[i]
- index_list = data[(data[value_columns_name] < step_max) & (data[value_columns_name] > step_min)].index
- #print(index_list)
+ index_list = data[
+ (data[value_columns_name] < step_max)
+ & (data[value_columns_name] > step_min)
+ ].index
+ # print(index_list)
for ind in index_list:
- data.loc[ind, value_columns_name]= step_max
-
+ data.loc[ind, value_columns_name] = step_max
+
return data
-
-
- def setMapAllScenarios(self, flag):
- if flag == True:
+
+ def setMapAllScenarios(self, flag): # noqa: N802, D102
+ if flag == True: # noqa: E712
self.map_all_scenarios_checkbox.setChecked(True)
self.map_scenario_combo.setEnabled(False)
self.map_type_combo.clear()
self.map_type_combo.addItems(multi_scenario_map_options)
self.clearMapPlot()
- elif flag == False:
+ elif flag == False: # noqa: E712
self.map_all_scenarios_checkbox.setChecked(False)
self.map_scenario_combo.setEnabled(True)
self.map_type_combo.clear()
self.map_type_combo.addItems(single_scenario_map_options)
self.clearMapPlot()
else:
- raise ValueError("Unknown flag: " + repr(flag))
-
- def resultScenarioChanged(self, text):
- self.map_result_current_scenario = text #self.map_scenario_combo.getText()
-
- def mapTypeChanegd(self, text):
- if self.project_result == None:
+ raise ValueError('Unknown flag: ' + repr(flag))
+
+ def resultScenarioChanged(self, text): # noqa: N802, D102
+ self.map_result_current_scenario = text # self.map_scenario_combo.getText()
+
+ def mapTypeChanegd(self, text): # noqa: N802, D102
+ if self.project_result == None: # noqa: E711
return
- self.current_map_type = text
+ self.current_map_type = text
self.setMapSettingBox(text)
self.calculateCurrentMap()
-
- def calculateCurrentMap(self):
-
+
+ def calculateCurrentMap(self): # noqa: C901, N802, D102
map_type = self.current_map_type
if map_type == 'Quantity Outage vs. Exceedance':
-
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
- exeedance_probability = self.map_settings_widgets["Ex. Prob."].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
- else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
- exeedance_probability = float(exeedance_probability)
-
- self.map_value_columns_name = "res"
- map_data = self.project_result.AS_getOutage_4(LOS='QN', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- #print(map_data)
- self.current_raw_map = self.project_result.getDLQNExceedenceProbabilityMap(map_data, ihour=True , param=exeedance_probability)
- #self.current_map = self.current_raw_map.copy()
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, self.map_value_columns_name)
-
- #print(exeedance_probability)
- self.plotMap(self.map_value_columns_name)
-
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+ exeedance_probability = self.map_settings_widgets['Ex. Prob.'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
+ else:
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+ exeedance_probability = float(exeedance_probability)
+
+ self.map_value_columns_name = 'res'
+ map_data = self.project_result.AS_getOutage_4(
+ LOS='QN',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ # print(map_data)
+ self.current_raw_map = (
+ self.project_result.getDLQNExceedenceProbabilityMap(
+ map_data, ihour=True, param=exeedance_probability
+ )
+ )
+ # self.current_map = self.current_raw_map.copy()
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, self.map_value_columns_name
+ )
+
+ # print(exeedance_probability)
+ self.plotMap(self.map_value_columns_name)
+
elif map_type == 'Delivery Outage vs. Exceedance':
-
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
- exeedance_probability = self.map_settings_widgets["Ex. Prob."].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
- else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
- exeedance_probability = float(exeedance_probability)
-
- self.map_value_columns_name = "res"
- map_data = self.project_result.AS_getOutage_4(LOS='DL', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- #print(map_data)
- self.current_raw_map = self.project_result.getDLQNExceedenceProbabilityMap(map_data, ihour=True , param=exeedance_probability)
- #self.current_map = self.current_raw_map.copy()
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, self.map_value_columns_name)
-
- #print(exeedance_probability)
- self.plotMap(self.map_value_columns_name)
-
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+ exeedance_probability = self.map_settings_widgets['Ex. Prob.'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
+ else:
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+ exeedance_probability = float(exeedance_probability)
+
+ self.map_value_columns_name = 'res'
+ map_data = self.project_result.AS_getOutage_4(
+ LOS='DL',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ # print(map_data)
+ self.current_raw_map = (
+ self.project_result.getDLQNExceedenceProbabilityMap(
+ map_data, ihour=True, param=exeedance_probability
+ )
+ )
+ # self.current_map = self.current_raw_map.copy()
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, self.map_value_columns_name
+ )
+
+ # print(exeedance_probability)
+ self.plotMap(self.map_value_columns_name)
+
elif map_type == 'Quantity Exceedance vs. Time':
-
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
- outage_time = self.map_settings_widgets["Outage Time"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
- else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
- outage_time = int(float(outage_time) )
-
- self.map_value_columns_name = "res"
- map_data = self.project_result.AS_getOutage_4(LOS='QN', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- #print(map_data)
- self.current_raw_map = self.project_result.getDLQNExceedenceProbabilityMap(map_data, ihour=False , param=outage_time)
- #self.current_map = self.current_raw_map.copy()
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, self.map_value_columns_name)
-
- #print(exeedance_probability)
- self.plotMap(self.map_value_columns_name)
-
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+ outage_time = self.map_settings_widgets['Outage Time'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
+ else:
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+ outage_time = int(float(outage_time))
+
+ self.map_value_columns_name = 'res'
+ map_data = self.project_result.AS_getOutage_4(
+ LOS='QN',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ # print(map_data)
+ self.current_raw_map = (
+ self.project_result.getDLQNExceedenceProbabilityMap(
+ map_data, ihour=False, param=outage_time
+ )
+ )
+ # self.current_map = self.current_raw_map.copy()
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, self.map_value_columns_name
+ )
+
+ # print(exeedance_probability)
+ self.plotMap(self.map_value_columns_name)
+
elif map_type == 'Delivery Exceedance vs. Time':
-
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
- outage_time = self.map_settings_widgets["Outage Time"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
- else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
- outage_time = int(float(outage_time) )
-
- self.map_value_columns_name = "res"
- map_data = self.project_result.AS_getOutage_4(LOS='DL', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- #print(map_data)
- self.current_raw_map = self.project_result.getDLQNExceedenceProbabilityMap(map_data, ihour=False , param=outage_time)
- #self.current_map = self.current_raw_map.copy()
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, self.map_value_columns_name)
-
- #print(exeedance_probability)
- self.plotMap(self.map_value_columns_name)
-
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+ outage_time = self.map_settings_widgets['Outage Time'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
+ else:
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+ outage_time = int(float(outage_time))
+
+ self.map_value_columns_name = 'res'
+ map_data = self.project_result.AS_getOutage_4(
+ LOS='DL',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ # print(map_data)
+ self.current_raw_map = (
+ self.project_result.getDLQNExceedenceProbabilityMap(
+ map_data, ihour=False, param=outage_time
+ )
+ )
+ # self.current_map = self.current_raw_map.copy()
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, self.map_value_columns_name
+ )
+
+ # print(exeedance_probability)
+ self.plotMap(self.map_value_columns_name)
+
elif map_type == 'Quantity Return':
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
-
- scn_name = self.map_scenario_combo.currentText()
- self.current_raw_map = self.project_result.getOutageTimeGeoPandas_4(scn_name, LOS='QN', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- value_column_label = "restoration_time"
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, value_column_label)
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+
+ scn_name = self.map_scenario_combo.currentText()
+ self.current_raw_map = self.project_result.getOutageTimeGeoPandas_4(
+ scn_name,
+ LOS='QN',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ value_column_label = 'restoration_time'
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, value_column_label
+ )
self.plotMap(value_column_label)
-
+
self.map_value_columns_name = value_column_label
-
+
elif map_type == 'Delivery Return':
- iConsider_leak = self.map_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.map_settings_widgets["leak Criteria"].text()
- time_window = self.map_settings_widgets["Time Window"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
+ iConsider_leak = self.map_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.map_settings_widgets['leak Criteria'].text()
+ time_window = self.map_settings_widgets['Time Window'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
else:
- iConsider_leak = False
-
- leak_ratio = float(leak_ratio)
- time_window = int(float(time_window) )
-
- scn_name = self.map_scenario_combo.currentText()
- self.current_raw_map = self.project_result.getOutageTimeGeoPandas_4(scn_name, LOS='DL', iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=time_window)
- value_column_label = "restoration_time"
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, value_column_label)
+ iConsider_leak = False # noqa: N806
+
+ leak_ratio = float(leak_ratio)
+ time_window = int(float(time_window))
+
+ scn_name = self.map_scenario_combo.currentText()
+ self.current_raw_map = self.project_result.getOutageTimeGeoPandas_4(
+ scn_name,
+ LOS='DL',
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=time_window,
+ )
+ value_column_label = 'restoration_time'
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, value_column_label
+ )
self.plotMap(value_column_label)
-
+
self.map_value_columns_name = value_column_label
-
+
elif map_type == 'SSI':
return
- #self.current_map_data = (map_type, pd.DataFrame())
- iPopulation = self.map_settings_widgets["Population"].currentText()
- scn_name = self.map_scenario_combo.currentText()
- self.current_raw_map = self.project_result.getSystemServiceabilityIndexMap(scn_name, iPopulation=iPopulation)
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map)
- self.plotMap("SSI", "Time")
- elif map_type == '':
+ # self.current_map_data = (map_type, pd.DataFrame())
+ iPopulation = self.map_settings_widgets['Population'].currentText() # noqa: N806
+ scn_name = self.map_scenario_combo.currentText()
+ self.current_raw_map = (
+ self.project_result.getSystemServiceabilityIndexMap(
+ scn_name, iPopulation=iPopulation
+ )
+ )
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map
+ )
+ self.plotMap('SSI', 'Time')
+ elif map_type == '': # noqa: PLC1901
return
else:
- raise
-
- #self.annotation_map = self.current_raw_map.copy()
+ raise # noqa: PLE0704
+
+ # self.annotation_map = self.current_raw_map.copy()
self.annotationRadiusChanegd()
-
-
- def setMapSettingBox(self, map_type):
- for i in range(self.map_settings_table.rowCount()):
+
+ def setMapSettingBox(self, map_type): # noqa: N802, D102
+ for i in range(self.map_settings_table.rowCount()): # noqa: B007
self.map_settings_table.removeRow(0)
-
+
if map_type in map_settings:
- self.populateMapSettingsTable(map_settings[map_type] )
+ self.populateMapSettingsTable(map_settings[map_type])
else:
pass
- #raise ValueError("Unknown Map type: "+repr(map_type))
-
- def populateMapSettingsTable(self, settings_content):
+ # raise ValueError("Unknown Map type: "+repr(map_type))
+
+ def populateMapSettingsTable(self, settings_content): # noqa: C901, N802, D102
self.map_settings_widgets.clear()
vertical_header = []
- cell_type_list = []
- default_list = []
- content_list = []
- validator_list = []
+ cell_type_list = []
+ default_list = []
+ content_list = []
+ validator_list = []
for row in settings_content:
for k in row:
- if k == "Label":
+ if k == 'Label':
vertical_header.append(row[k])
- elif k == "Type":
+ elif k == 'Type':
cell_type_list.append(row[k])
- elif k == "Default":
+ elif k == 'Default':
default_list.append(row[k])
-
- if "Content" in row:
- content_list.append(row["Content" ])
+
+ if 'Content' in row:
+ content_list.append(row['Content'])
else:
content_list.append(None)
-
- if "Validator" in row:
- validator_list.append(row["Validator" ])
+
+ if 'Validator' in row:
+ validator_list.append(row['Validator'])
else:
validator_list.append(None)
-
- self.map_settings_table.setColumnCount(1 )
+
+ self.map_settings_table.setColumnCount(1)
self.map_settings_table.setRowCount(len(settings_content))
self.map_settings_table.setVerticalHeaderLabels(vertical_header)
-
+
i = 0
for cell_type in cell_type_list:
- if cell_type=="Time":
+ if cell_type == 'Time':
self.time_combo = Time_Unit_Combo()
- self.map_settings_table.setCellWidget(i,0, self.time_combo)
- self.time_combo.currentTextChanged.connect(self.mapTimeSettingsChanged )
-
- elif cell_type=="Yes-No_Combo":
+ self.map_settings_table.setCellWidget(i, 0, self.time_combo)
+ self.time_combo.currentTextChanged.connect(
+ self.mapTimeSettingsChanged
+ )
+
+ elif cell_type == 'Yes-No_Combo':
current_widget = Yes_No_Combo()
- self.map_settings_table.setCellWidget(i,0, current_widget)
- current_widget.currentTextChanged.connect(self.mapSettingChanged )
-
+ self.map_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.currentTextChanged.connect(self.mapSettingChanged)
+
default_value = default_list[i]
current_widget.setCurrentText(default_value)
-
+
self.map_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Custom_Combo":
+
+ elif cell_type == 'Custom_Combo':
current_widget = QtWidgets.QComboBox()
- contents = content_list[i]
+ contents = content_list[i]
current_widget.addItems(contents)
- self.map_settings_table.setCellWidget(i,0, current_widget)
- current_widget.currentTextChanged.connect(self.mapSettingChanged )
-
+ self.map_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.currentTextChanged.connect(self.mapSettingChanged)
+
default_value = default_list[i]
current_widget.setCurrentText(default_value)
-
+
self.map_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Float Line":
+
+ elif cell_type == 'Float Line':
current_widget = QtWidgets.QLineEdit()
- self.map_settings_table.setCellWidget(i,0, current_widget)
- current_widget.editingFinished.connect(self.mapSettingChanged )
- if validator_list[i] == None:
- current_widget.setValidator(QtGui.QDoubleValidator(0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
+ self.map_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.editingFinished.connect(self.mapSettingChanged)
+ if validator_list[i] == None: # noqa: E711
+ current_widget.setValidator(
+ QtGui.QDoubleValidator(
+ 0,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
else:
- current_widget.setValidator(QtGui.QDoubleValidator(validator_list[i]["Min"], validator_list[i]["Max"], 20, notation=QtGui.QDoubleValidator.StandardNotation) )
-
+ current_widget.setValidator(
+ QtGui.QDoubleValidator(
+ validator_list[i]['Min'],
+ validator_list[i]['Max'],
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+
default_value = default_list[i]
current_widget.setText(default_value)
self.map_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Int Line":
+
+ elif cell_type == 'Int Line':
current_widget = QtWidgets.QLineEdit()
- self.map_settings_table.setCellWidget(i,0, current_widget)
- current_widget.editingFinished.connect(self.mapSettingChanged )
-
- if validator_list[i] == None:
- current_widget.setValidator(QtGui.QIntValidator(0, 3600*24*1000) )
+ self.map_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.editingFinished.connect(self.mapSettingChanged)
+
+ if validator_list[i] == None: # noqa: E711
+ current_widget.setValidator(
+ QtGui.QIntValidator(0, 3600 * 24 * 1000)
+ )
else:
- current_widget.setValidator(QtGui.QIntValidator(validator_list[i]["Min"], validator_list[i]["Max"]) )
-
+ current_widget.setValidator(
+ QtGui.QIntValidator(
+ validator_list[i]['Min'], validator_list[i]['Max']
+ )
+ )
+
default_value = default_list[i]
current_widget.setText(default_value)
self.map_settings_widgets[vertical_header[i]] = current_widget
else:
- raise ValueError(repr(cell_type) )
-
- i += 1
- #for label in settings_content:
-
- def mapTimeSettingsChanged(self, x):
- self.current_map = self.time_combo.changeMapTimeUnit(self.current_raw_map, self.map_value_columns_name)
+ raise ValueError(repr(cell_type))
+
+ i += 1 # noqa: SIM113
+ # for label in settings_content:
+
+ def mapTimeSettingsChanged(self, x): # noqa: ARG002, N802, D102
+ self.current_map = self.time_combo.changeMapTimeUnit(
+ self.current_raw_map, self.map_value_columns_name
+ )
self.plotMap(self.map_value_columns_name)
-
- def mapSettingChanged(self):
- if "Population" in self.map_settings_widgets:
- new_population_setting = self.map_settings_widgets["Population"].currentText()
- if new_population_setting == "Yes" and type(self.project_result._population_data) == type(None):
- self.errorMSG("Error", "Population data is not loaded")
- self.map_settings_widgets["Population"].setCurrentText("No")
+
+ def mapSettingChanged(self): # noqa: N802, D102
+ if 'Population' in self.map_settings_widgets:
+ new_population_setting = self.map_settings_widgets[
+ 'Population'
+ ].currentText()
+ if new_population_setting == 'Yes' and type( # noqa: E721
+ self.project_result._population_data # noqa: SLF001
+ ) == type(None):
+ self.errorMSG('Error', 'Population data is not loaded')
+ self.map_settings_widgets['Population'].setCurrentText('No')
return
self.calculateCurrentMap()
- def tabChangedMap(self, index):
+
+ def tabChangedMap(self, index): # noqa: N802, D102
if index == 1:
self.initializeMap()
-
- def saveCurrentMapByButton(self):
- #if self.current_map_data == None:
- if type(self.current_map) == type(None):
- self.errorMSG("REWET", 'No map is ploted')
+
+ def saveCurrentMapByButton(self): # noqa: N802, D102
+ # if self.current_map_data == None:
+ if type(self.current_map) == type(None): # noqa: E721
+ self.errorMSG('REWET', 'No map is ploted')
return
-
- file_addr = QtWidgets.QFileDialog.getSaveFileName(self.asli_MainWindow, 'Save File',
- self.project_file_addr,"Shapefile (*.shp)")
- if file_addr[0] == '':
+
+ file_addr = QtWidgets.QFileDialog.getSaveFileName(
+ self.asli_MainWindow,
+ 'Save File',
+ self.project_file_addr,
+ 'Shapefile (*.shp)',
+ )
+ if file_addr[0] == '': # noqa: PLC1901
return
-
- #self.current_map_data[1].to_excel(file_addr[0])
+
+ # self.current_map_data[1].to_excel(file_addr[0])
self.current_map.to_file(file_addr[0])
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/MplWidget.py b/modules/systemPerformance/REWET/REWET/GUI/MplWidget.py
index 200f037f0..41c8cdf2e 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/MplWidget.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/MplWidget.py
@@ -1,33 +1,35 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 18:26:02 2022
+"""Created on Thu Nov 10 18:26:02 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
# Imports
-from PyQt5 import QtWidgets
-from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as Canvas
-from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
-import matplotlib
+from matplotlib.backends.backend_qt5agg import (
+ NavigationToolbar2QT as NavigationToolbar,
+)
+from matplotlib.figure import Figure
+from PyQt5 import QtWidgets
-class MplCanvas(Canvas):
+class MplCanvas(Canvas): # noqa: D101
def __init__(self):
self.fig = Figure(figsize=(100, 40), dpi=100, tight_layout=True)
self.ax = self.fig.add_subplot(111)
Canvas.__init__(self, self.fig)
- Canvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
+ Canvas.setSizePolicy(
+ self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding
+ )
Canvas.updateGeometry(self)
+
# Matplotlib widget
-class MplWidget(QtWidgets.QWidget):
+class MplWidget(QtWidgets.QWidget): # noqa: D101
def __init__(self, parent=None):
- QtWidgets.QWidget.__init__(self, parent) # Inherit from QWidget
- self.canvas = MplCanvas() # Create canvas object
+ QtWidgets.QWidget.__init__(self, parent) # Inherit from QWidget
+ self.canvas = MplCanvas() # Create canvas object
toolbar = NavigationToolbar(self.canvas, self)
- self.vbl = QtWidgets.QVBoxLayout() # Set box for plotting
+ self.vbl = QtWidgets.QVBoxLayout() # Set box for plotting
self.vbl.addWidget(toolbar)
self.vbl.addWidget(self.canvas)
- self.setLayout(self.vbl)
\ No newline at end of file
+ self.setLayout(self.vbl)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Discovery_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Discovery_Designer.py
index cb56e6197..8735ac15b 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Discovery_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Discovery_Designer.py
@@ -1,13 +1,12 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 23:25:30 2022
+"""Created on Tue Nov 1 23:25:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from .Damage_Discovery_Designer import Damage_Discovery_Designer
-class Node_Damage_Discovery_Designer(Damage_Discovery_Designer):
+
+class Node_Damage_Discovery_Designer(Damage_Discovery_Designer): # noqa: D101
def __init__(self, node_damage_discovery_model):
super().__init__(node_damage_discovery_model)
- self._window.setWindowTitle("Node Damage Discovery")
\ No newline at end of file
+ self._window.setWindowTitle('Node Damage Discovery')
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Designer.py
index 723422741..179b31858 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Designer.py
@@ -1,63 +1,163 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 20:36:29 2022
+"""Created on Tue Nov 1 20:36:29 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+
+from PyQt5 import QtGui, QtWidgets
-from PyQt5 import QtCore, QtGui, QtWidgets
-from .Node_Damage_Model_Window import Ui_Node_Damage_Model
from .Node_Damage_Model_Help_Designer import Node_Damage_Model_Help_Designer
+from .Node_Damage_Model_Window import Ui_Node_Damage_Model
+
-class Node_Damage_Model_Designer(Ui_Node_Damage_Model):
+class Node_Damage_Model_Designer(Ui_Node_Damage_Model): # noqa: D101
def __init__(self, node_damage_model):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
self.node_damage_model = node_damage_model.copy()
-
- self.a_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.b_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.c_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.d_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.e_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.f_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
-
- self.aa_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.bb_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.cc_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.dd_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.ee1_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.ff1_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.ee2_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.ff2_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
-
- a = self.node_damage_model['a']
- b = self.node_damage_model['b']
- c = self.node_damage_model['c']
- d = self.node_damage_model['d']
- e = self.node_damage_model['e']
- f = self.node_damage_model['f']
- aa = self.node_damage_model['aa']
- bb = self.node_damage_model['bb']
- cc = self.node_damage_model['cc']
- dd = self.node_damage_model['dd']
+
+ self.a_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.b_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.c_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.d_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.e_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.f_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+
+ self.aa_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.bb_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.cc_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.dd_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.ee1_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.ff1_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.ee2_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.ff2_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+
+ a = self.node_damage_model['a']
+ b = self.node_damage_model['b']
+ c = self.node_damage_model['c']
+ d = self.node_damage_model['d']
+ e = self.node_damage_model['e']
+ f = self.node_damage_model['f']
+ aa = self.node_damage_model['aa']
+ bb = self.node_damage_model['bb']
+ cc = self.node_damage_model['cc']
+ dd = self.node_damage_model['dd']
ee1 = self.node_damage_model['ee1']
ff1 = self.node_damage_model['ff1']
ee2 = self.node_damage_model['ee2']
ff2 = self.node_damage_model['ff2']
-
- if self.node_damage_model["damage_node_model"] == "equal_diameter_emitter":
+
+ if self.node_damage_model['damage_node_model'] == 'equal_diameter_emitter':
self.equal_emitter_button.setChecked(True)
- elif self.node_damage_model["damage_node_model"] == "equal_diameter_reservoir":
+ elif (
+ self.node_damage_model['damage_node_model'] == 'equal_diameter_reservoir'
+ ):
self.equal_reservoir_button.setChecked(True)
-
+
self.a_line.setText(str(a))
self.b_line.setText(str(b))
self.c_line.setText(str(c))
self.d_line.setText(str(d))
self.e_line.setText(str(e))
self.f_line.setText(str(f))
-
+
self.aa_line.setText(str(aa))
self.bb_line.setText(str(bb))
self.cc_line.setText(str(cc))
@@ -66,78 +166,78 @@ def __init__(self, node_damage_model):
self.ff1_line.setText(str(ff1))
self.ee2_line.setText(str(ee2))
self.ff2_line.setText(str(ff2))
-
+
self.buttonBox.accepted.connect(self.okButtonPressed)
self.help_button.clicked.connect(self.showHelpByButton)
-
- def showHelpByButton(self):
+
+ def showHelpByButton(self): # noqa: D102, N802, PLR6301
help_dialog_box = Node_Damage_Model_Help_Designer()
- help_dialog_box._window.exec_()
-
- def okButtonPressed(self):
- a = self.a_line.text()
- b = self.b_line.text()
- c = self.c_line.text()
- d = self.d_line.text()
- e = self.e_line.text()
- f = self.f_line.text()
- aa = self.aa_line.text()
- bb = self.bb_line.text()
- cc = self.cc_line.text()
- dd = self.dd_line.text()
+ help_dialog_box._window.exec_() # noqa: SLF001
+
+ def okButtonPressed(self): # noqa: C901, N802, D102
+ a = self.a_line.text()
+ b = self.b_line.text()
+ c = self.c_line.text()
+ d = self.d_line.text()
+ e = self.e_line.text()
+ f = self.f_line.text()
+ aa = self.aa_line.text()
+ bb = self.bb_line.text()
+ cc = self.cc_line.text()
+ dd = self.dd_line.text()
ee1 = self.ee1_line.text()
ff1 = self.ff1_line.text()
ee2 = self.ee2_line.text()
ff2 = self.ff2_line.text()
-
+
if_failed = False
-
- if a == '':
- self.errorMSG("Cannot Save data", "A Field cannot be left empty")
+
+ if a == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'A Field cannot be left empty')
if_failed = True
- elif aa == '':
- self.errorMSG("Cannot Save data", "AA Field cannot be left empty")
+ elif aa == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'AA Field cannot be left empty')
if_failed = True
- elif b == '':
- self.errorMSG("Cannot Save data", "B Field cannot be left empty")
+ elif b == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'B Field cannot be left empty')
if_failed = True
- elif bb == '':
- self.errorMSG("Cannot Save data", "BB Field cannot be left empty")
+ elif bb == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'BB Field cannot be left empty')
if_failed = True
- elif c == '':
- self.errorMSG("Cannot Save data", "C Field cannot be left empty")
+ elif c == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'C Field cannot be left empty')
if_failed = True
- elif cc == '':
- self.errorMSG("Cannot Save data", "CC Field cannot be left empty")
+ elif cc == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'CC Field cannot be left empty')
if_failed = True
- elif d == '':
- self.errorMSG("Cannot Save data", "D Field cannot be left empty")
+ elif d == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'D Field cannot be left empty')
if_failed = True
- elif dd == '':
- self.errorMSG("Cannot Save data", "DD Field cannot be left empty")
+ elif dd == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'DD Field cannot be left empty')
if_failed = True
- elif e == '':
- self.errorMSG("Cannot Save data", "E Field cannot be left empty")
+ elif e == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'E Field cannot be left empty')
if_failed = True
- elif ee1 == '':
- self.errorMSG("Cannot Save data", "EE1 Field cannot be left empty")
+ elif ee1 == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'EE1 Field cannot be left empty')
if_failed = True
- elif ee2 == '':
- self.errorMSG("Cannot Save data", "EE2 Field cannot be left empty")
+ elif ee2 == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'EE2 Field cannot be left empty')
if_failed = True
- elif f == '':
- self.errorMSG("Cannot Save data", "F Field cannot be left empty")
+ elif f == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'F Field cannot be left empty')
if_failed = True
- elif ff1 == '':
- self.errorMSG("Cannot Save data", "FF1 Field cannot be left empty")
+ elif ff1 == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'FF1 Field cannot be left empty')
if_failed = True
- elif ff2 == '':
- self.errorMSG("Cannot Save data", "FF2 Field cannot be left empty")
+ elif ff2 == '': # noqa: PLC1901
+ self.errorMSG('Cannot Save data', 'FF2 Field cannot be left empty')
if_failed = True
-
+
if if_failed:
return
-
+
self.node_damage_model['a'] = float(a)
self.node_damage_model['b'] = float(b)
self.node_damage_model['c'] = float(c)
@@ -152,22 +252,20 @@ def okButtonPressed(self):
self.node_damage_model['ff1'] = float(ff1)
self.node_damage_model['ee2'] = float(ee2)
self.node_damage_model['ff2'] = float(ff2)
-
+
if self.equal_emitter_button.isChecked():
- self.node_damage_model['damage_node_model'] = "equal_diameter_emitter"
+ self.node_damage_model['damage_node_model'] = 'equal_diameter_emitter'
elif self.equal_reservoir_button.isChecked():
- self.node_damage_model['damage_node_model'] = "equal_diameter_reservoir"
-
+ self.node_damage_model['damage_node_model'] = 'equal_diameter_reservoir'
+
self._window.accept()
-
- def errorMSG(self, error_title, error_msg, error_more_msg=None):
+
+ def errorMSG(self, error_title, error_msg, error_more_msg=None): # noqa: D102, N802, PLR6301
error_widget = QtWidgets.QMessageBox()
error_widget.setIcon(QtWidgets.QMessageBox.Critical)
error_widget.setText(error_msg)
error_widget.setWindowTitle(error_title)
error_widget.setStandardButtons(QtWidgets.QMessageBox.Ok)
- if error_more_msg!=None:
+ if error_more_msg != None: # noqa: E711
error_widget.setInformativeText(error_more_msg)
error_widget.exec_()
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Designer.py
index 3a5af6962..c2a91034a 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Designer.py
@@ -1,16 +1,15 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 21:35:02 2022
+"""Created on Tue Nov 1 21:35:02 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from PyQt5 import QtWidgets
-from .Node_Damage_Model_Help_Window import Ui_Node_Damage_Model_Help
+from .Node_Damage_Model_Help_Window import Ui_Node_Damage_Model_Help
-class Node_Damage_Model_Help_Designer(Ui_Node_Damage_Model_Help):
+
+class Node_Damage_Model_Help_Designer(Ui_Node_Damage_Model_Help): # noqa: D101
def __init__(self):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
- self.buttonBox.rejected.connect(self._window.close)
\ No newline at end of file
+ self.buttonBox.rejected.connect(self._window.close)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Window.py
index 09d65fdd3..79081db99 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Help_Window.py
@@ -1,60 +1,67 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Node_Damage_Model_Help_Window.ui'
+# Form implementation generated from reading ui file 'Node_Damage_Model_Help_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
-from PyQt5 import QtCore, QtGui, QtWidgets
+from PyQt5 import QtCore, QtWidgets
-class Ui_Node_Damage_Model_Help(object):
- def setupUi(self, Node_Damage_Model_Help):
- Node_Damage_Model_Help.setObjectName("Node_Damage_Model_Help")
+class Ui_Node_Damage_Model_Help: # noqa: D101
+ def setupUi(self, Node_Damage_Model_Help): # noqa: N802, N803, D102
+ Node_Damage_Model_Help.setObjectName('Node_Damage_Model_Help')
Node_Damage_Model_Help.resize(340, 130)
Node_Damage_Model_Help.setMinimumSize(QtCore.QSize(340, 130))
Node_Damage_Model_Help.setMaximumSize(QtCore.QSize(340, 130))
self.layoutWidget = QtWidgets.QWidget(Node_Damage_Model_Help)
self.layoutWidget.setGeometry(QtCore.QRect(20, 20, 291, 101))
- self.layoutWidget.setObjectName("layoutWidget")
+ self.layoutWidget.setObjectName('layoutWidget')
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
- self.verticalLayout_2.setObjectName("verticalLayout_2")
+ self.verticalLayout_2.setObjectName('verticalLayout_2')
self.label = QtWidgets.QLabel(self.layoutWidget)
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.verticalLayout_2.addWidget(self.label)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.verticalLayout_2.addWidget(self.label_2)
self.label_3 = QtWidgets.QLabel(self.layoutWidget)
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.verticalLayout_2.addWidget(self.label_3)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
- self.horizontalLayout_2.setObjectName("horizontalLayout_2")
- spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
+ self.horizontalLayout_2.setObjectName('horizontalLayout_2')
+ spacerItem = QtWidgets.QSpacerItem( # noqa: N806
+ 40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum
+ )
self.horizontalLayout_2.addItem(spacerItem)
self.buttonBox = QtWidgets.QDialogButtonBox(self.layoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setObjectName('buttonBox')
self.horizontalLayout_2.addWidget(self.buttonBox)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.retranslateUi(Node_Damage_Model_Help)
QtCore.QMetaObject.connectSlotsByName(Node_Damage_Model_Help)
- def retranslateUi(self, Node_Damage_Model_Help):
+ def retranslateUi(self, Node_Damage_Model_Help): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Node_Damage_Model_Help.setWindowTitle(_translate("Node_Damage_Model_Help", "Help"))
- self.label.setText(_translate("Node_Damage_Model_Help", "ND = Number of Nodal damage"))
- self.label_2.setText(_translate("Node_Damage_Model_Help", "MP = Pressure at the node"))
- self.label_3.setText(_translate("Node_Damage_Model_Help", "RR =Repair Rate"))
+ Node_Damage_Model_Help.setWindowTitle(
+ _translate('Node_Damage_Model_Help', 'Help')
+ )
+ self.label.setText(
+ _translate('Node_Damage_Model_Help', 'ND = Number of Nodal damage')
+ )
+ self.label_2.setText(
+ _translate('Node_Damage_Model_Help', 'MP = Pressure at the node')
+ )
+ self.label_3.setText(_translate('Node_Damage_Model_Help', 'RR =Repair Rate'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Node_Damage_Model_Help = QtWidgets.QDialog()
ui = Ui_Node_Damage_Model_Help()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Window.py
index 39e5b2f0e..cfe80424a 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Node_Damage_Model_Window.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Node_Damage_Model_Window.ui'
+# Form implementation generated from reading ui file 'Node_Damage_Model_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
@@ -10,9 +8,9 @@
from PyQt5 import QtCore, QtGui, QtWidgets
-class Ui_Node_Damage_Model(object):
- def setupUi(self, Node_Damage_Model):
- Node_Damage_Model.setObjectName("Node_Damage_Model")
+class Ui_Node_Damage_Model: # noqa: D101
+ def setupUi(self, Node_Damage_Model): # noqa: N802, N803, D102, PLR0915
+ Node_Damage_Model.setObjectName('Node_Damage_Model')
Node_Damage_Model.resize(396, 296)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
@@ -37,176 +35,188 @@ def setupUi(self, Node_Damage_Model):
self.buttonBox = QtWidgets.QDialogButtonBox(Node_Damage_Model)
self.buttonBox.setGeometry(QtCore.QRect(300, 230, 81, 51))
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.groupBox = QtWidgets.QGroupBox(Node_Damage_Model)
self.groupBox.setGeometry(QtCore.QRect(20, 230, 261, 51))
- self.groupBox.setObjectName("groupBox")
+ self.groupBox.setObjectName('groupBox')
self.equal_emitter_button = QtWidgets.QRadioButton(self.groupBox)
self.equal_emitter_button.setGeometry(QtCore.QRect(20, 20, 91, 17))
self.equal_emitter_button.setChecked(True)
- self.equal_emitter_button.setObjectName("equal_emitter_button")
+ self.equal_emitter_button.setObjectName('equal_emitter_button')
self.equal_reservoir_button = QtWidgets.QRadioButton(self.groupBox)
self.equal_reservoir_button.setGeometry(QtCore.QRect(120, 20, 131, 17))
- self.equal_reservoir_button.setObjectName("equal_reservoir_button")
+ self.equal_reservoir_button.setObjectName('equal_reservoir_button')
self.a_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.a_line.setGeometry(QtCore.QRect(40, 80, 41, 20))
- self.a_line.setObjectName("a_line")
+ self.a_line.setObjectName('a_line')
self.label = QtWidgets.QLabel(Node_Damage_Model)
self.label.setGeometry(QtCore.QRect(20, 80, 21, 16))
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.label_2 = QtWidgets.QLabel(Node_Damage_Model)
self.label_2.setGeometry(QtCore.QRect(20, 100, 21, 16))
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.label_3 = QtWidgets.QLabel(Node_Damage_Model)
self.label_3.setGeometry(QtCore.QRect(20, 120, 21, 16))
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.label_4 = QtWidgets.QLabel(Node_Damage_Model)
self.label_4.setGeometry(QtCore.QRect(20, 140, 21, 16))
- self.label_4.setObjectName("label_4")
+ self.label_4.setObjectName('label_4')
self.label_5 = QtWidgets.QLabel(Node_Damage_Model)
self.label_5.setGeometry(QtCore.QRect(20, 160, 21, 16))
- self.label_5.setObjectName("label_5")
+ self.label_5.setObjectName('label_5')
self.label_6 = QtWidgets.QLabel(Node_Damage_Model)
self.label_6.setGeometry(QtCore.QRect(20, 180, 21, 16))
- self.label_6.setObjectName("label_6")
+ self.label_6.setObjectName('label_6')
self.b_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.b_line.setGeometry(QtCore.QRect(40, 100, 41, 20))
- self.b_line.setObjectName("b_line")
+ self.b_line.setObjectName('b_line')
self.c_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.c_line.setGeometry(QtCore.QRect(40, 120, 41, 20))
- self.c_line.setObjectName("c_line")
+ self.c_line.setObjectName('c_line')
self.d_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.d_line.setGeometry(QtCore.QRect(40, 140, 41, 20))
- self.d_line.setObjectName("d_line")
+ self.d_line.setObjectName('d_line')
self.e_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.e_line.setGeometry(QtCore.QRect(40, 160, 41, 20))
- self.e_line.setObjectName("e_line")
+ self.e_line.setObjectName('e_line')
self.f_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.f_line.setGeometry(QtCore.QRect(40, 180, 41, 20))
- self.f_line.setObjectName("f_line")
+ self.f_line.setObjectName('f_line')
self.label_10 = QtWidgets.QLabel(Node_Damage_Model)
self.label_10.setGeometry(QtCore.QRect(170, 80, 71, 20))
- self.label_10.setText("")
- self.label_10.setPixmap(QtGui.QPixmap(":/resources/resources/node_A.png"))
- self.label_10.setObjectName("label_10")
+ self.label_10.setText('')
+ self.label_10.setPixmap(QtGui.QPixmap(':/resources/resources/node_A.png'))
+ self.label_10.setObjectName('label_10')
self.label_11 = QtWidgets.QLabel(Node_Damage_Model)
self.label_11.setGeometry(QtCore.QRect(170, 100, 71, 20))
- self.label_11.setText("")
- self.label_11.setPixmap(QtGui.QPixmap(":/resources/resources/node_B.png"))
- self.label_11.setObjectName("label_11")
+ self.label_11.setText('')
+ self.label_11.setPixmap(QtGui.QPixmap(':/resources/resources/node_B.png'))
+ self.label_11.setObjectName('label_11')
self.label_12 = QtWidgets.QLabel(Node_Damage_Model)
self.label_12.setGeometry(QtCore.QRect(170, 120, 71, 20))
- self.label_12.setText("")
- self.label_12.setPixmap(QtGui.QPixmap(":/resources/resources/node_C.png"))
- self.label_12.setObjectName("label_12")
+ self.label_12.setText('')
+ self.label_12.setPixmap(QtGui.QPixmap(':/resources/resources/node_C.png'))
+ self.label_12.setObjectName('label_12')
self.label_13 = QtWidgets.QLabel(Node_Damage_Model)
self.label_13.setGeometry(QtCore.QRect(170, 140, 71, 20))
- self.label_13.setText("")
- self.label_13.setPixmap(QtGui.QPixmap(":/resources/resources/node_D.png"))
- self.label_13.setObjectName("label_13")
+ self.label_13.setText('')
+ self.label_13.setPixmap(QtGui.QPixmap(':/resources/resources/node_D.png'))
+ self.label_13.setObjectName('label_13')
self.label_14 = QtWidgets.QLabel(Node_Damage_Model)
self.label_14.setGeometry(QtCore.QRect(170, 160, 131, 20))
- self.label_14.setText("")
- self.label_14.setPixmap(QtGui.QPixmap(":/resources/resources/node_E.png"))
- self.label_14.setObjectName("label_14")
+ self.label_14.setText('')
+ self.label_14.setPixmap(QtGui.QPixmap(':/resources/resources/node_E.png'))
+ self.label_14.setObjectName('label_14')
self.label_15 = QtWidgets.QLabel(Node_Damage_Model)
self.label_15.setGeometry(QtCore.QRect(170, 180, 131, 20))
- self.label_15.setText("")
- self.label_15.setPixmap(QtGui.QPixmap(":/resources/resources/node_F.png"))
- self.label_15.setObjectName("label_15")
+ self.label_15.setText('')
+ self.label_15.setPixmap(QtGui.QPixmap(':/resources/resources/node_F.png'))
+ self.label_15.setObjectName('label_15')
self.label_16 = QtWidgets.QLabel(Node_Damage_Model)
self.label_16.setGeometry(QtCore.QRect(90, 180, 21, 16))
- self.label_16.setObjectName("label_16")
+ self.label_16.setObjectName('label_16')
self.aa_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.aa_line.setGeometry(QtCore.QRect(120, 80, 41, 20))
- self.aa_line.setObjectName("aa_line")
+ self.aa_line.setObjectName('aa_line')
self.label_17 = QtWidgets.QLabel(Node_Damage_Model)
self.label_17.setGeometry(QtCore.QRect(90, 120, 21, 16))
- self.label_17.setObjectName("label_17")
+ self.label_17.setObjectName('label_17')
self.ff1_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.ff1_line.setGeometry(QtCore.QRect(120, 180, 41, 20))
- self.ff1_line.setObjectName("ff1_line")
+ self.ff1_line.setObjectName('ff1_line')
self.cc_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.cc_line.setGeometry(QtCore.QRect(120, 120, 41, 20))
- self.cc_line.setObjectName("cc_line")
+ self.cc_line.setObjectName('cc_line')
self.label_18 = QtWidgets.QLabel(Node_Damage_Model)
self.label_18.setGeometry(QtCore.QRect(90, 80, 21, 16))
- self.label_18.setObjectName("label_18")
+ self.label_18.setObjectName('label_18')
self.label_19 = QtWidgets.QLabel(Node_Damage_Model)
self.label_19.setGeometry(QtCore.QRect(90, 100, 21, 16))
- self.label_19.setObjectName("label_19")
+ self.label_19.setObjectName('label_19')
self.bb_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.bb_line.setGeometry(QtCore.QRect(120, 100, 41, 20))
- self.bb_line.setObjectName("bb_line")
+ self.bb_line.setObjectName('bb_line')
self.ee1_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.ee1_line.setGeometry(QtCore.QRect(120, 160, 41, 20))
- self.ee1_line.setObjectName("ee1_line")
+ self.ee1_line.setObjectName('ee1_line')
self.dd_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.dd_line.setGeometry(QtCore.QRect(120, 140, 41, 20))
- self.dd_line.setObjectName("dd_line")
+ self.dd_line.setObjectName('dd_line')
self.label_20 = QtWidgets.QLabel(Node_Damage_Model)
self.label_20.setGeometry(QtCore.QRect(90, 160, 21, 16))
- self.label_20.setObjectName("label_20")
+ self.label_20.setObjectName('label_20')
self.label_21 = QtWidgets.QLabel(Node_Damage_Model)
self.label_21.setGeometry(QtCore.QRect(90, 140, 21, 16))
- self.label_21.setObjectName("label_21")
+ self.label_21.setObjectName('label_21')
self.ff2_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.ff2_line.setGeometry(QtCore.QRect(340, 180, 41, 20))
- self.ff2_line.setObjectName("ff2_line")
+ self.ff2_line.setObjectName('ff2_line')
self.ee2_line = QtWidgets.QLineEdit(Node_Damage_Model)
self.ee2_line.setGeometry(QtCore.QRect(340, 160, 41, 20))
- self.ee2_line.setObjectName("ee2_line")
+ self.ee2_line.setObjectName('ee2_line')
self.label_22 = QtWidgets.QLabel(Node_Damage_Model)
self.label_22.setGeometry(QtCore.QRect(310, 180, 21, 16))
- self.label_22.setObjectName("label_22")
+ self.label_22.setObjectName('label_22')
self.label_23 = QtWidgets.QLabel(Node_Damage_Model)
self.label_23.setGeometry(QtCore.QRect(310, 160, 21, 16))
- self.label_23.setObjectName("label_23")
+ self.label_23.setObjectName('label_23')
self.first_formula = QtWidgets.QLabel(Node_Damage_Model)
self.first_formula.setGeometry(QtCore.QRect(20, 20, 361, 20))
- self.first_formula.setText("")
- self.first_formula.setPixmap(QtGui.QPixmap(":/resources/resources/first_damage.png"))
- self.first_formula.setObjectName("first_formula")
+ self.first_formula.setText('')
+ self.first_formula.setPixmap(
+ QtGui.QPixmap(':/resources/resources/first_damage.png')
+ )
+ self.first_formula.setObjectName('first_formula')
self.second_formula = QtWidgets.QLabel(Node_Damage_Model)
self.second_formula.setGeometry(QtCore.QRect(90, 40, 291, 20))
- self.second_formula.setText("")
- self.second_formula.setPixmap(QtGui.QPixmap(":/resources/resources/second_damage.png"))
- self.second_formula.setObjectName("second_formula")
+ self.second_formula.setText('')
+ self.second_formula.setPixmap(
+ QtGui.QPixmap(':/resources/resources/second_damage.png')
+ )
+ self.second_formula.setObjectName('second_formula')
self.help_button = QtWidgets.QPushButton(Node_Damage_Model)
self.help_button.setGeometry(QtCore.QRect(310, 100, 75, 23))
- self.help_button.setObjectName("help_button")
+ self.help_button.setObjectName('help_button')
self.retranslateUi(Node_Damage_Model)
self.buttonBox.rejected.connect(Node_Damage_Model.reject)
QtCore.QMetaObject.connectSlotsByName(Node_Damage_Model)
- def retranslateUi(self, Node_Damage_Model):
+ def retranslateUi(self, Node_Damage_Model): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Node_Damage_Model.setWindowTitle(_translate("Node_Damage_Model", "Node Damage Model"))
- self.groupBox.setTitle(_translate("Node_Damage_Model", "Node Damage Type"))
- self.equal_emitter_button.setText(_translate("Node_Damage_Model", "Equal Emitter"))
- self.equal_reservoir_button.setText(_translate("Node_Damage_Model", "Equal Pipe & Reservoir"))
- self.label.setText(_translate("Node_Damage_Model", "A"))
- self.label_2.setText(_translate("Node_Damage_Model", "B"))
- self.label_3.setText(_translate("Node_Damage_Model", "C"))
- self.label_4.setText(_translate("Node_Damage_Model", "D"))
- self.label_5.setText(_translate("Node_Damage_Model", "E"))
- self.label_6.setText(_translate("Node_Damage_Model", "F"))
- self.label_16.setText(_translate("Node_Damage_Model", "FF1"))
- self.label_17.setText(_translate("Node_Damage_Model", "CC"))
- self.label_18.setText(_translate("Node_Damage_Model", "AA"))
- self.label_19.setText(_translate("Node_Damage_Model", "BB"))
- self.label_20.setText(_translate("Node_Damage_Model", "EE1"))
- self.label_21.setText(_translate("Node_Damage_Model", "DD"))
- self.label_22.setText(_translate("Node_Damage_Model", "FF2"))
- self.label_23.setText(_translate("Node_Damage_Model", "EE2"))
- self.help_button.setText(_translate("Node_Damage_Model", "Help"))
-from . import REWET_Resource_rc
+ Node_Damage_Model.setWindowTitle(
+ _translate('Node_Damage_Model', 'Node Damage Model')
+ )
+ self.groupBox.setTitle(_translate('Node_Damage_Model', 'Node Damage Type'))
+ self.equal_emitter_button.setText(
+ _translate('Node_Damage_Model', 'Equal Emitter')
+ )
+ self.equal_reservoir_button.setText(
+ _translate('Node_Damage_Model', 'Equal Pipe & Reservoir')
+ )
+ self.label.setText(_translate('Node_Damage_Model', 'A'))
+ self.label_2.setText(_translate('Node_Damage_Model', 'B'))
+ self.label_3.setText(_translate('Node_Damage_Model', 'C'))
+ self.label_4.setText(_translate('Node_Damage_Model', 'D'))
+ self.label_5.setText(_translate('Node_Damage_Model', 'E'))
+ self.label_6.setText(_translate('Node_Damage_Model', 'F'))
+ self.label_16.setText(_translate('Node_Damage_Model', 'FF1'))
+ self.label_17.setText(_translate('Node_Damage_Model', 'CC'))
+ self.label_18.setText(_translate('Node_Damage_Model', 'AA'))
+ self.label_19.setText(_translate('Node_Damage_Model', 'BB'))
+ self.label_20.setText(_translate('Node_Damage_Model', 'EE1'))
+ self.label_21.setText(_translate('Node_Damage_Model', 'DD'))
+ self.label_22.setText(_translate('Node_Damage_Model', 'FF2'))
+ self.label_23.setText(_translate('Node_Damage_Model', 'EE2'))
+ self.help_button.setText(_translate('Node_Damage_Model', 'Help'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Node_Damage_Model = QtWidgets.QDialog()
ui = Ui_Node_Damage_Model()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Opening_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Opening_Designer.py
index 8239845ca..3b9fa4b2c 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Opening_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Opening_Designer.py
@@ -1,43 +1,53 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Oct 27 18:06:01 2022
+"""Created on Thu Oct 27 18:06:01 2022
@author: snaeimi
-"""
-import sys
+""" # noqa: CPY001, D400, N999
+
import os
+import pickle # noqa: S403
+import sys
+
+from Input.Settings import Settings
+from Project import Project
from PyQt5 import QtWidgets
-from PyQt5.Qt import QUrl, QDesktopServices
-import pickle
-
-from Project import Project
-from .Opening_Window import Ui_Opening_Window
-from .Simulation_Tab_Designer import Simulation_Tab_Designer
-from .Hydraulic_Tab_Designer import Hydraulic_Tab_Designer
-from .Damage_Tab_Designer import Damage_Tab_Designer
+from PyQt5.Qt import QDesktopServices, QUrl
+
+from .Damage_Tab_Designer import Damage_Tab_Designer
+from .Hydraulic_Tab_Designer import Hydraulic_Tab_Designer
+from .Main_Help_Designer import Main_Help_Designer
+from .Map_Designer import Map_Designer
+from .Opening_Window import Ui_Opening_Window
+from .PP_Data_Tab_Designer import PP_Data_Tab
from .Restoration_Tab_Designer import Restoration_Tab_Designer
-from .Run_Tab_Designer import Run_Tab_Designer
-from .Main_Help_Designer import Main_Help_Designer
-from .PP_Data_Tab_Designer import PP_Data_Tab
-from .Result_Designer import Result_Designer
-from .Map_Designer import Map_Designer
+from .Result_Designer import Result_Designer
+from .Run_Tab_Designer import Run_Tab_Designer
+from .Simulation_Tab_Designer import Simulation_Tab_Designer
-from Input.Settings import Settings
-class Opening_Designer(Ui_Opening_Window, Simulation_Tab_Designer, Hydraulic_Tab_Designer, Damage_Tab_Designer, Run_Tab_Designer, Restoration_Tab_Designer, PP_Data_Tab, Result_Designer, Map_Designer):
+class Opening_Designer( # noqa: D101
+ Ui_Opening_Window,
+ Simulation_Tab_Designer,
+ Hydraulic_Tab_Designer,
+ Damage_Tab_Designer,
+ Run_Tab_Designer,
+ Restoration_Tab_Designer,
+ PP_Data_Tab,
+ Result_Designer,
+ Map_Designer,
+):
def __init__(self):
self.project = None
self.scenario_list = None
self.settings = Settings()
self.settings.initializeScenarioSettings(None)
- self.current_project_directory = os.getcwd()
- self.project_file_addr = None
-
+ self.current_project_directory = os.getcwd() # noqa: PTH109
+ self.project_file_addr = None
+
self.asli_app = QtWidgets.QApplication([])
self.asli_MainWindow = QtWidgets.QMainWindow()
self.setupUi(self.asli_MainWindow)
-
+
Simulation_Tab_Designer.__init__(self)
Hydraulic_Tab_Designer.__init__(self)
Damage_Tab_Designer.__init__(self)
@@ -46,58 +56,69 @@ def __init__(self):
PP_Data_Tab.__init__(self, self.project)
Result_Designer.__init__(self)
Map_Designer.__init__(self)
-
-
+
"""
Action Triggers.
"""
self.action_Open_Project.triggered.connect(self.openProject)
self.action_Save.triggered.connect(self.saveProject)
self.action_Save_Project_As.triggered.connect(self.saveProjectAs)
- self.action_REWET_GITHUB.triggered.connect(lambda : QDesktopServices.openUrl(QUrl("https://github.com/snaeimi/REWET")))
+ self.action_REWET_GITHUB.triggered.connect(
+ lambda: QDesktopServices.openUrl(
+ QUrl('https://github.com/snaeimi/REWET')
+ )
+ )
self.action_About.triggered.connect(self.showHelpWindow)
self.action_Exit.triggered.connect(self.asli_MainWindow.close)
-
+
"""
Native signal overwrite
"""
self.asli_MainWindow.closeEvent = self.exitApp
-
- def run(self):
+
+ def run(self): # noqa: D102
self.asli_MainWindow.show()
sys.exit(self.asli_app.exec_())
- def errorMSG(self, error_title, error_msg, error_more_msg=None):
+ def errorMSG(self, error_title, error_msg, error_more_msg=None): # noqa: D102, N802, PLR6301
error_widget = QtWidgets.QMessageBox()
error_widget.setIcon(QtWidgets.QMessageBox.Critical)
error_widget.setText(error_msg)
error_widget.setWindowTitle(error_title)
error_widget.setStandardButtons(QtWidgets.QMessageBox.Ok)
- if error_more_msg!=None:
+ if error_more_msg != None: # noqa: E711
error_widget.setInformativeText(error_more_msg)
error_widget.exec_()
-
- def questionPrompt(self, title, msg, more_msg=None):
+
+ def questionPrompt(self, title, msg, more_msg=None): # noqa: D102, N802, PLR6301
prompt_widget = QtWidgets.QMessageBox()
prompt_widget.setIcon(QtWidgets.QMessageBox.Question)
prompt_widget.setText(msg)
prompt_widget.setWindowTitle(title)
- prompt_widget.setStandardButtons(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No | QtWidgets.QMessageBox.Cancel)
- if more_msg!=None:
+ prompt_widget.setStandardButtons(
+ QtWidgets.QMessageBox.Yes
+ | QtWidgets.QMessageBox.No
+ | QtWidgets.QMessageBox.Cancel
+ )
+ if more_msg != None: # noqa: E711
prompt_widget.setInformativeText(more_msg)
return prompt_widget.exec_()
-
- def openProject(self):
- file = QtWidgets.QFileDialog.getOpenFileName(self.asli_MainWindow, 'Select project file',
- self.current_project_directory, "REWET Project File (*.prj)")
- if file[0] == '':
+
+ def openProject(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self.asli_MainWindow,
+ 'Select project file',
+ self.current_project_directory,
+ 'REWET Project File (*.prj)',
+ )
+ if file[0] == '': # noqa: PLC1901
return
split_addr = os.path.split(file[0])
self.current_project_directory = split_addr
-
+
self.project_file_addr = file[0]
- with open(file[0], 'rb') as f:
- project = pickle.load(f)
+ with open(file[0], 'rb') as f: # noqa: PTH123
+ project = pickle.load(f) # noqa: S301
self.project = project
# sina put a possible check of result version here
self.setSimulationSettings(project.project_settings)
@@ -108,75 +129,83 @@ def openProject(self):
self.setHydraulicUI()
self.setDamageUI()
self.setRestorationUI()
-
- def saveProject(self, save_as=False):
-
+
+ def saveProject(self, save_as=False): # noqa: FBT002, N802, D102
data_retrived = False
if self.getSimulationSettings():
if self.getHydraulicSettings():
if self.getDamageSettings():
if self.getRestorationSettings():
data_retrived = True
-
- if data_retrived == False:
+
+ if data_retrived == False: # noqa: E712
return False
-
- if save_as == False:
- if self.project_file_addr == None:
- file_addr = QtWidgets.QFileDialog.getSaveFileName(self.asli_MainWindow, 'Save project file',
- self.project_file_addr,"Project file (*.prj)")
- if file_addr[0] == '':
+
+ if save_as == False: # noqa: E712
+ if self.project_file_addr == None: # noqa: E711
+ file_addr = QtWidgets.QFileDialog.getSaveFileName(
+ self.asli_MainWindow,
+ 'Save project file',
+ self.project_file_addr,
+ 'Project file (*.prj)',
+ )
+ if file_addr[0] == '': # noqa: PLC1901
return False
split_addr = os.path.split(file_addr[0])
self.current_project_directory = split_addr[0]
self.project_file_addr = file_addr[0]
-
+
project = Project(self.settings, self.scenario_list)
self.project = project
- with open(self.project_file_addr, 'wb') as f:
+ with open(self.project_file_addr, 'wb') as f: # noqa: PTH123
pickle.dump(project, f)
-
+
return True
-
- def saveProjectAs(self):
+
+ def saveProjectAs(self): # noqa: N802, D102
if_saved = self.saveProject(save_as=True)
- if if_saved == False:
+ if if_saved == False: # noqa: E712
return
-
- file_addr = QtWidgets.QFileDialog.getSaveFileName(self.asli_MainWindow, 'Save project file',
- self.project_file_addr,"Project file (*.prj)")
- if file_addr[0] == '':
+
+ file_addr = QtWidgets.QFileDialog.getSaveFileName(
+ self.asli_MainWindow,
+ 'Save project file',
+ self.project_file_addr,
+ 'Project file (*.prj)',
+ )
+ if file_addr[0] == '': # noqa: PLC1901
return
split_addr = os.path.split(file_addr[0])
self.current_project_directory = split_addr[0]
self.project_file_addr = file_addr[0]
-
+
project = Project(self.settings, self.scenario_list)
self.project = project
- with open(self.project_file_addr, 'wb') as f:
+ with open(self.project_file_addr, 'wb') as f: # noqa: PTH123
pickle.dump(project, f)
-
- def showHelpWindow(self):
+
+ def showHelpWindow(self): # noqa: D102, N802, PLR6301
help_window = Main_Help_Designer()
- help_window._window.exec_()
-
- def exitApp(self, event):
- return_value = self.questionPrompt("REWET", "Do you want to save the project before you leave?")
+ help_window._window.exec_() # noqa: SLF001
- if return_value == 16384: #Yes
+ def exitApp(self, event): # noqa: N802, D102
+ return_value = self.questionPrompt(
+ 'REWET', 'Do you want to save the project before you leave?'
+ )
+
+ if return_value == 16384: # Yes # noqa: PLR2004
if_saved = self.saveProject()
if if_saved:
event.accept()
else:
event.ignore()
- elif return_value == 65536: #None
+ elif return_value == 65536: # None # noqa: PLR2004
event.accept()
- elif return_value == 4194304: #Cancel
+ elif return_value == 4194304: # Cancel # noqa: PLR2004
event.ignore()
return
-
-
-
-if __name__ == "__main__":
+
+
+if __name__ == '__main__':
opening_designer = Opening_Designer()
- opening_designer.run()
\ No newline at end of file
+ opening_designer.run()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Opening_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Opening_Window.py
index 2eccb3c67..c96aa4bbd 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Opening_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Opening_Window.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Opening.ui'
+# Form implementation generated from reading ui file 'Opening.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
@@ -10,17 +8,17 @@
from PyQt5 import QtCore, QtGui, QtWidgets
-class Ui_Opening_Window(object):
- def setupUi(self, Opening_Window):
- Opening_Window.setObjectName("Opening_Window")
+class Ui_Opening_Window: # noqa: D101
+ def setupUi(self, Opening_Window): # noqa: N802, N803, D102, PLR0915
+ Opening_Window.setObjectName('Opening_Window')
Opening_Window.resize(830, 780)
self.centralwidget = QtWidgets.QWidget(Opening_Window)
- self.centralwidget.setObjectName("centralwidget")
+ self.centralwidget.setObjectName('centralwidget')
self.main_tab = QtWidgets.QTabWidget(self.centralwidget)
self.main_tab.setGeometry(QtCore.QRect(0, 0, 801, 741))
- self.main_tab.setObjectName("main_tab")
+ self.main_tab.setObjectName('main_tab')
self.main_process = QtWidgets.QWidget()
- self.main_process.setObjectName("main_process")
+ self.main_process.setObjectName('main_process')
self.main_process1 = QtWidgets.QTabWidget(self.main_process)
self.main_process1.setGeometry(QtCore.QRect(0, 0, 811, 681))
font = QtGui.QFont()
@@ -31,17 +29,17 @@ def setupUi(self, Opening_Window):
self.main_process1.setTabPosition(QtWidgets.QTabWidget.West)
self.main_process1.setTabShape(QtWidgets.QTabWidget.Triangular)
self.main_process1.setIconSize(QtCore.QSize(16, 16))
- self.main_process1.setObjectName("main_process1")
+ self.main_process1.setObjectName('main_process1')
self.simulation_tab = QtWidgets.QWidget()
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.simulation_tab.setFont(font)
- self.simulation_tab.setObjectName("simulation_tab")
+ self.simulation_tab.setObjectName('simulation_tab')
self.groupBox_4 = QtWidgets.QGroupBox(self.simulation_tab)
self.groupBox_4.setGeometry(QtCore.QRect(50, 220, 711, 101))
- self.groupBox_4.setObjectName("groupBox_4")
+ self.groupBox_4.setObjectName('groupBox_4')
self.label_29 = QtWidgets.QLabel(self.groupBox_4)
self.label_29.setGeometry(QtCore.QRect(20, 70, 101, 16))
font = QtGui.QFont()
@@ -49,11 +47,11 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_29.setFont(font)
- self.label_29.setObjectName("label_29")
+ self.label_29.setObjectName('label_29')
self.save_time_step_yes_radio = QtWidgets.QRadioButton(self.groupBox_4)
self.save_time_step_yes_radio.setGeometry(QtCore.QRect(120, 70, 41, 17))
self.save_time_step_yes_radio.setChecked(False)
- self.save_time_step_yes_radio.setObjectName("save_time_step_yes_radio")
+ self.save_time_step_yes_radio.setObjectName('save_time_step_yes_radio')
self.label_30 = QtWidgets.QLabel(self.groupBox_4)
self.label_30.setGeometry(QtCore.QRect(250, 60, 301, 31))
palette = QtGui.QPalette()
@@ -65,7 +63,9 @@ def setupUi(self, Opening_Window):
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 85, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
- palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
+ palette.setBrush(
+ QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush
+ )
brush = QtGui.QBrush(QtGui.QColor(170, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
@@ -74,7 +74,9 @@ def setupUi(self, Opening_Window):
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 85, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
- palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
+ palette.setBrush(
+ QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush
+ )
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
@@ -83,14 +85,16 @@ def setupUi(self, Opening_Window):
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
- palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
+ palette.setBrush(
+ QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush
+ )
self.label_30.setPalette(palette)
self.label_30.setWordWrap(True)
- self.label_30.setObjectName("label_30")
+ self.label_30.setObjectName('label_30')
self.save_time_step_no_radio = QtWidgets.QRadioButton(self.groupBox_4)
self.save_time_step_no_radio.setGeometry(QtCore.QRect(190, 70, 41, 17))
self.save_time_step_no_radio.setChecked(True)
- self.save_time_step_no_radio.setObjectName("save_time_step_no_radio")
+ self.save_time_step_no_radio.setObjectName('save_time_step_no_radio')
self.label_15 = QtWidgets.QLabel(self.groupBox_4)
self.label_15.setGeometry(QtCore.QRect(20, 32, 91, 16))
font = QtGui.QFont()
@@ -98,21 +102,23 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_15.setFont(font)
- self.label_15.setObjectName("label_15")
+ self.label_15.setObjectName('label_15')
self.temp_folder_addr_line = QtWidgets.QLineEdit(self.groupBox_4)
self.temp_folder_addr_line.setGeometry(QtCore.QRect(120, 30, 331, 20))
self.temp_folder_addr_line.setReadOnly(True)
- self.temp_folder_addr_line.setObjectName("temp_folder_addr_line")
+ self.temp_folder_addr_line.setObjectName('temp_folder_addr_line')
self.temp_browser_button = QtWidgets.QPushButton(self.groupBox_4)
self.temp_browser_button.setGeometry(QtCore.QRect(460, 28, 75, 23))
- self.temp_browser_button.setObjectName("temp_browser_button")
+ self.temp_browser_button.setObjectName('temp_browser_button')
self.groupBox_5 = QtWidgets.QGroupBox(self.simulation_tab)
self.groupBox_5.setGeometry(QtCore.QRect(50, 40, 711, 171))
- self.groupBox_5.setObjectName("groupBox_5")
+ self.groupBox_5.setObjectName('groupBox_5')
self.simulation_time_line = QtWidgets.QLineEdit(self.groupBox_5)
self.simulation_time_line.setGeometry(QtCore.QRect(150, 20, 81, 20))
- self.simulation_time_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.simulation_time_line.setObjectName("simulation_time_line")
+ self.simulation_time_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.simulation_time_line.setObjectName('simulation_time_line')
self.label = QtWidgets.QLabel(self.groupBox_5)
self.label.setGeometry(QtCore.QRect(20, 22, 101, 16))
font = QtGui.QFont()
@@ -120,7 +126,7 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.label_32 = QtWidgets.QLabel(self.groupBox_5)
self.label_32.setGeometry(QtCore.QRect(20, 100, 101, 16))
font = QtGui.QFont()
@@ -128,14 +134,14 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_32.setFont(font)
- self.label_32.setObjectName("label_32")
+ self.label_32.setObjectName('label_32')
self.multiple_radio = QtWidgets.QRadioButton(self.groupBox_5)
self.multiple_radio.setGeometry(QtCore.QRect(190, 100, 101, 17))
- self.multiple_radio.setObjectName("multiple_radio")
+ self.multiple_radio.setObjectName('multiple_radio')
self.single_radio = QtWidgets.QRadioButton(self.groupBox_5)
self.single_radio.setGeometry(QtCore.QRect(120, 100, 51, 17))
self.single_radio.setChecked(True)
- self.single_radio.setObjectName("single_radio")
+ self.single_radio.setObjectName('single_radio')
self.label_3 = QtWidgets.QLabel(self.groupBox_5)
self.label_3.setGeometry(QtCore.QRect(20, 52, 121, 16))
font = QtGui.QFont()
@@ -143,11 +149,13 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.simulation_time_step_line = QtWidgets.QLineEdit(self.groupBox_5)
self.simulation_time_step_line.setGeometry(QtCore.QRect(150, 50, 81, 20))
- self.simulation_time_step_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.simulation_time_step_line.setObjectName("simulation_time_step_line")
+ self.simulation_time_step_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.simulation_time_step_line.setObjectName('simulation_time_step_line')
self.label_17 = QtWidgets.QLabel(self.groupBox_5)
self.label_17.setGeometry(QtCore.QRect(240, 20, 61, 16))
font = QtGui.QFont()
@@ -156,7 +164,7 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_17.setFont(font)
self.label_17.setTextFormat(QtCore.Qt.PlainText)
- self.label_17.setObjectName("label_17")
+ self.label_17.setObjectName('label_17')
self.label_18 = QtWidgets.QLabel(self.groupBox_5)
self.label_18.setGeometry(QtCore.QRect(240, 50, 61, 16))
font = QtGui.QFont()
@@ -165,14 +173,18 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_18.setFont(font)
self.label_18.setTextFormat(QtCore.Qt.PlainText)
- self.label_18.setObjectName("label_18")
+ self.label_18.setObjectName('label_18')
self.result_folder_addr_line = QtWidgets.QLineEdit(self.groupBox_5)
self.result_folder_addr_line.setGeometry(QtCore.QRect(120, 130, 331, 20))
self.result_folder_addr_line.setReadOnly(True)
- self.result_folder_addr_line.setObjectName("result_folder_addr_line")
+ self.result_folder_addr_line.setObjectName('result_folder_addr_line')
self.result_directory_browser_button = QtWidgets.QPushButton(self.groupBox_5)
- self.result_directory_browser_button.setGeometry(QtCore.QRect(460, 130, 75, 23))
- self.result_directory_browser_button.setObjectName("result_directory_browser_button")
+ self.result_directory_browser_button.setGeometry(
+ QtCore.QRect(460, 130, 75, 23)
+ )
+ self.result_directory_browser_button.setObjectName(
+ 'result_directory_browser_button'
+ )
self.label_22 = QtWidgets.QLabel(self.groupBox_5)
self.label_22.setGeometry(QtCore.QRect(20, 130, 91, 16))
font = QtGui.QFont()
@@ -180,13 +192,13 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_22.setFont(font)
- self.label_22.setObjectName("label_22")
- self.main_process1.addTab(self.simulation_tab, "")
+ self.label_22.setObjectName('label_22')
+ self.main_process1.addTab(self.simulation_tab, '')
self.hudrauics_tab = QtWidgets.QWidget()
- self.hudrauics_tab.setObjectName("hudrauics_tab")
+ self.hudrauics_tab.setObjectName('hudrauics_tab')
self.groupBox_3 = QtWidgets.QGroupBox(self.hudrauics_tab)
self.groupBox_3.setGeometry(QtCore.QRect(10, 210, 761, 241))
- self.groupBox_3.setObjectName("groupBox_3")
+ self.groupBox_3.setObjectName('groupBox_3')
self.label_9 = QtWidgets.QLabel(self.groupBox_3)
self.label_9.setGeometry(QtCore.QRect(20, 50, 47, 13))
font = QtGui.QFont()
@@ -195,24 +207,28 @@ def setupUi(self, Opening_Window):
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setTextFormat(QtCore.Qt.PlainText)
- self.label_9.setObjectName("label_9")
+ self.label_9.setObjectName('label_9')
self.wntr_solver_radio = QtWidgets.QRadioButton(self.groupBox_3)
self.wntr_solver_radio.setEnabled(False)
self.wntr_solver_radio.setGeometry(QtCore.QRect(240, 50, 82, 17))
- self.wntr_solver_radio.setObjectName("wntr_solver_radio")
+ self.wntr_solver_radio.setObjectName('wntr_solver_radio')
self.modified_epanet_radio = QtWidgets.QRadioButton(self.groupBox_3)
self.modified_epanet_radio.setEnabled(True)
self.modified_epanet_radio.setGeometry(QtCore.QRect(80, 50, 151, 17))
self.modified_epanet_radio.setChecked(True)
- self.modified_epanet_radio.setObjectName("modified_epanet_radio")
+ self.modified_epanet_radio.setObjectName('modified_epanet_radio')
self.minimum_pressure_line = QtWidgets.QLineEdit(self.groupBox_3)
self.minimum_pressure_line.setGeometry(QtCore.QRect(80, 90, 61, 20))
- self.minimum_pressure_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.minimum_pressure_line.setObjectName("minimum_pressure_line")
+ self.minimum_pressure_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.minimum_pressure_line.setObjectName('minimum_pressure_line')
self.required_pressure_line = QtWidgets.QLineEdit(self.groupBox_3)
self.required_pressure_line.setGeometry(QtCore.QRect(80, 130, 61, 20))
- self.required_pressure_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.required_pressure_line.setObjectName("required_pressure_line")
+ self.required_pressure_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.required_pressure_line.setObjectName('required_pressure_line')
self.label_10 = QtWidgets.QLabel(self.groupBox_3)
self.label_10.setGeometry(QtCore.QRect(20, 90, 47, 13))
font = QtGui.QFont()
@@ -221,7 +237,7 @@ def setupUi(self, Opening_Window):
font.setWeight(75)
self.label_10.setFont(font)
self.label_10.setTextFormat(QtCore.Qt.PlainText)
- self.label_10.setObjectName("label_10")
+ self.label_10.setObjectName('label_10')
self.label_11 = QtWidgets.QLabel(self.groupBox_3)
self.label_11.setGeometry(QtCore.QRect(150, 90, 61, 16))
font = QtGui.QFont()
@@ -230,7 +246,7 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_11.setFont(font)
self.label_11.setTextFormat(QtCore.Qt.PlainText)
- self.label_11.setObjectName("label_11")
+ self.label_11.setObjectName('label_11')
self.label_12 = QtWidgets.QLabel(self.groupBox_3)
self.label_12.setGeometry(QtCore.QRect(150, 130, 61, 16))
font = QtGui.QFont()
@@ -239,7 +255,7 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_12.setFont(font)
self.label_12.setTextFormat(QtCore.Qt.PlainText)
- self.label_12.setObjectName("label_12")
+ self.label_12.setObjectName('label_12')
self.label_13 = QtWidgets.QLabel(self.groupBox_3)
self.label_13.setGeometry(QtCore.QRect(20, 130, 47, 13))
font = QtGui.QFont()
@@ -248,7 +264,7 @@ def setupUi(self, Opening_Window):
font.setWeight(75)
self.label_13.setFont(font)
self.label_13.setTextFormat(QtCore.Qt.PlainText)
- self.label_13.setObjectName("label_13")
+ self.label_13.setObjectName('label_13')
self.label_4 = QtWidgets.QLabel(self.groupBox_3)
self.label_4.setGeometry(QtCore.QRect(10, 145, 61, 41))
font = QtGui.QFont()
@@ -257,11 +273,13 @@ def setupUi(self, Opening_Window):
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setWordWrap(True)
- self.label_4.setObjectName("label_4")
+ self.label_4.setObjectName('label_4')
self.hydraulic_time_step_line = QtWidgets.QLineEdit(self.groupBox_3)
self.hydraulic_time_step_line.setGeometry(QtCore.QRect(80, 160, 61, 20))
- self.hydraulic_time_step_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.hydraulic_time_step_line.setObjectName("hydraulic_time_step_line")
+ self.hydraulic_time_step_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.hydraulic_time_step_line.setObjectName('hydraulic_time_step_line')
self.label_16 = QtWidgets.QLabel(self.groupBox_3)
self.label_16.setGeometry(QtCore.QRect(150, 160, 61, 16))
font = QtGui.QFont()
@@ -270,17 +288,17 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_16.setFont(font)
self.label_16.setTextFormat(QtCore.Qt.PlainText)
- self.label_16.setObjectName("label_16")
+ self.label_16.setObjectName('label_16')
self.groupBox_6 = QtWidgets.QGroupBox(self.hudrauics_tab)
self.groupBox_6.setGeometry(QtCore.QRect(10, 30, 761, 171))
- self.groupBox_6.setObjectName("groupBox_6")
+ self.groupBox_6.setObjectName('groupBox_6')
self.wdn_addr_line = QtWidgets.QLineEdit(self.groupBox_6)
self.wdn_addr_line.setGeometry(QtCore.QRect(130, 30, 481, 20))
self.wdn_addr_line.setReadOnly(True)
- self.wdn_addr_line.setObjectName("wdn_addr_line")
+ self.wdn_addr_line.setObjectName('wdn_addr_line')
self.wdn_browse_button = QtWidgets.QPushButton(self.groupBox_6)
self.wdn_browse_button.setGeometry(QtCore.QRect(630, 30, 75, 23))
- self.wdn_browse_button.setObjectName("wdn_browse_button")
+ self.wdn_browse_button.setObjectName('wdn_browse_button')
self.label_2 = QtWidgets.QLabel(self.groupBox_6)
self.label_2.setGeometry(QtCore.QRect(10, 30, 101, 16))
font = QtGui.QFont()
@@ -288,11 +306,13 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.demand_ratio_line = QtWidgets.QLineEdit(self.groupBox_6)
self.demand_ratio_line.setGeometry(QtCore.QRect(130, 70, 41, 21))
- self.demand_ratio_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.demand_ratio_line.setObjectName("demand_ratio_line")
+ self.demand_ratio_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.demand_ratio_line.setObjectName('demand_ratio_line')
self.label_7 = QtWidgets.QLabel(self.groupBox_6)
self.label_7.setGeometry(QtCore.QRect(10, 70, 111, 16))
font = QtGui.QFont()
@@ -300,20 +320,24 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
- self.label_7.setObjectName("label_7")
+ self.label_7.setObjectName('label_7')
self.label_8 = QtWidgets.QLabel(self.groupBox_6)
self.label_8.setGeometry(QtCore.QRect(180, 70, 91, 21))
self.label_8.setFrameShape(QtWidgets.QFrame.Box)
self.label_8.setFrameShadow(QtWidgets.QFrame.Raised)
- self.label_8.setObjectName("label_8")
- self.main_process1.addTab(self.hudrauics_tab, "")
+ self.label_8.setObjectName('label_8')
+ self.main_process1.addTab(self.hudrauics_tab, '')
self.damage_tab = QtWidgets.QWidget()
- self.damage_tab.setObjectName("damage_tab")
+ self.damage_tab.setObjectName('damage_tab')
self.scenario_table = QtWidgets.QTableWidget(self.damage_tab)
self.scenario_table.setGeometry(QtCore.QRect(80, 100, 691, 192))
- self.scenario_table.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
- self.scenario_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
- self.scenario_table.setObjectName("scenario_table")
+ self.scenario_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.ExtendedSelection
+ )
+ self.scenario_table.setSelectionBehavior(
+ QtWidgets.QAbstractItemView.SelectRows
+ )
+ self.scenario_table.setObjectName('scenario_table')
self.scenario_table.setColumnCount(6)
self.scenario_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
@@ -337,45 +361,45 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_31.setFont(font)
- self.label_31.setObjectName("label_31")
+ self.label_31.setObjectName('label_31')
self.add_scenario_button = QtWidgets.QPushButton(self.damage_tab)
self.add_scenario_button.setGeometry(QtCore.QRect(20, 100, 51, 23))
- self.add_scenario_button.setObjectName("add_scenario_button")
+ self.add_scenario_button.setObjectName('add_scenario_button')
self.status_text = QtWidgets.QTextBrowser(self.damage_tab)
self.status_text.setEnabled(True)
self.status_text.setGeometry(QtCore.QRect(0, 550, 791, 121))
- self.status_text.setObjectName("status_text")
+ self.status_text.setObjectName('status_text')
self.remove_scenario_button = QtWidgets.QPushButton(self.damage_tab)
self.remove_scenario_button.setGeometry(QtCore.QRect(20, 130, 51, 23))
- self.remove_scenario_button.setObjectName("remove_scenario_button")
+ self.remove_scenario_button.setObjectName('remove_scenario_button')
self.groupBox_7 = QtWidgets.QGroupBox(self.damage_tab)
self.groupBox_7.setGeometry(QtCore.QRect(80, 310, 481, 80))
- self.groupBox_7.setObjectName("groupBox_7")
+ self.groupBox_7.setObjectName('groupBox_7')
self.pushButton_13 = QtWidgets.QPushButton(self.groupBox_7)
self.pushButton_13.setEnabled(False)
self.pushButton_13.setGeometry(QtCore.QRect(30, 30, 91, 23))
- self.pushButton_13.setObjectName("pushButton_13")
+ self.pushButton_13.setObjectName('pushButton_13')
self.pushButton_17 = QtWidgets.QPushButton(self.groupBox_7)
self.pushButton_17.setEnabled(False)
self.pushButton_17.setGeometry(QtCore.QRect(140, 30, 91, 23))
- self.pushButton_17.setObjectName("pushButton_17")
+ self.pushButton_17.setObjectName('pushButton_17')
self.pushButton_16 = QtWidgets.QPushButton(self.groupBox_7)
self.pushButton_16.setEnabled(False)
self.pushButton_16.setGeometry(QtCore.QRect(360, 30, 91, 23))
- self.pushButton_16.setObjectName("pushButton_16")
+ self.pushButton_16.setObjectName('pushButton_16')
self.pushButton_15 = QtWidgets.QPushButton(self.groupBox_7)
self.pushButton_15.setEnabled(False)
self.pushButton_15.setGeometry(QtCore.QRect(250, 30, 91, 23))
- self.pushButton_15.setObjectName("pushButton_15")
+ self.pushButton_15.setObjectName('pushButton_15')
self.groupBox_8 = QtWidgets.QGroupBox(self.damage_tab)
self.groupBox_8.setGeometry(QtCore.QRect(80, 420, 691, 80))
- self.groupBox_8.setObjectName("groupBox_8")
+ self.groupBox_8.setObjectName('groupBox_8')
self.pipe_damage_modeling_button = QtWidgets.QPushButton(self.groupBox_8)
self.pipe_damage_modeling_button.setGeometry(QtCore.QRect(30, 40, 101, 23))
- self.pipe_damage_modeling_button.setObjectName("pipe_damage_modeling_button")
+ self.pipe_damage_modeling_button.setObjectName('pipe_damage_modeling_button')
self.node_damage_modeling_button = QtWidgets.QPushButton(self.groupBox_8)
self.node_damage_modeling_button.setGeometry(QtCore.QRect(150, 40, 101, 23))
- self.node_damage_modeling_button.setObjectName("node_damage_modeling_button")
+ self.node_damage_modeling_button.setObjectName('node_damage_modeling_button')
self.label_20 = QtWidgets.QLabel(self.damage_tab)
self.label_20.setGeometry(QtCore.QRect(200, 50, 141, 16))
font = QtGui.QFont()
@@ -383,59 +407,63 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_20.setFont(font)
- self.label_20.setObjectName("label_20")
+ self.label_20.setObjectName('label_20')
self.damage_directory_browse_button = QtWidgets.QPushButton(self.damage_tab)
- self.damage_directory_browse_button.setGeometry(QtCore.QRect(580, 70, 75, 23))
- self.damage_directory_browse_button.setObjectName("damage_directory_browse_button")
+ self.damage_directory_browse_button.setGeometry(
+ QtCore.QRect(580, 70, 75, 23)
+ )
+ self.damage_directory_browse_button.setObjectName(
+ 'damage_directory_browse_button'
+ )
self.damage_direcotry_line = QtWidgets.QLineEdit(self.damage_tab)
self.damage_direcotry_line.setGeometry(QtCore.QRect(200, 70, 371, 20))
self.damage_direcotry_line.setReadOnly(True)
- self.damage_direcotry_line.setObjectName("damage_direcotry_line")
+ self.damage_direcotry_line.setObjectName('damage_direcotry_line')
self.load_scenario_button = QtWidgets.QPushButton(self.damage_tab)
self.load_scenario_button.setGeometry(QtCore.QRect(20, 230, 51, 23))
- self.load_scenario_button.setObjectName("load_scenario_button")
+ self.load_scenario_button.setObjectName('load_scenario_button')
self.save_scenario_button = QtWidgets.QPushButton(self.damage_tab)
self.save_scenario_button.setGeometry(QtCore.QRect(20, 260, 51, 23))
- self.save_scenario_button.setObjectName("save_scenario_button")
+ self.save_scenario_button.setObjectName('save_scenario_button')
self.validate_scenario_button = QtWidgets.QPushButton(self.damage_tab)
self.validate_scenario_button.setGeometry(QtCore.QRect(570, 350, 171, 41))
- self.validate_scenario_button.setObjectName("validate_scenario_button")
+ self.validate_scenario_button.setObjectName('validate_scenario_button')
self.groupBox_11 = QtWidgets.QGroupBox(self.damage_tab)
self.groupBox_11.setGeometry(QtCore.QRect(560, 300, 171, 51))
- self.groupBox_11.setObjectName("groupBox_11")
+ self.groupBox_11.setObjectName('groupBox_11')
self.file_type_excel_radio = QtWidgets.QRadioButton(self.groupBox_11)
self.file_type_excel_radio.setGeometry(QtCore.QRect(10, 20, 82, 17))
self.file_type_excel_radio.setChecked(True)
- self.file_type_excel_radio.setObjectName("file_type_excel_radio")
+ self.file_type_excel_radio.setObjectName('file_type_excel_radio')
self.file_type_pickle_radio = QtWidgets.QRadioButton(self.groupBox_11)
self.file_type_pickle_radio.setGeometry(QtCore.QRect(90, 20, 82, 17))
- self.file_type_pickle_radio.setObjectName("file_type_pickle_radio")
- self.main_process1.addTab(self.damage_tab, "")
+ self.file_type_pickle_radio.setObjectName('file_type_pickle_radio')
+ self.main_process1.addTab(self.damage_tab, '')
self.restoration_tab = QtWidgets.QWidget()
- self.restoration_tab.setObjectName("restoration_tab")
+ self.restoration_tab.setObjectName('restoration_tab')
self.groupBox = QtWidgets.QGroupBox(self.restoration_tab)
self.groupBox.setGeometry(QtCore.QRect(30, 40, 191, 80))
- self.groupBox.setObjectName("groupBox")
+ self.groupBox.setObjectName('groupBox')
self.restoration_on_radio = QtWidgets.QRadioButton(self.groupBox)
self.restoration_on_radio.setGeometry(QtCore.QRect(20, 40, 61, 17))
self.restoration_on_radio.setChecked(True)
- self.restoration_on_radio.setObjectName("restoration_on_radio")
+ self.restoration_on_radio.setObjectName('restoration_on_radio')
self.restoration_off_radio = QtWidgets.QRadioButton(self.groupBox)
self.restoration_off_radio.setGeometry(QtCore.QRect(90, 40, 82, 17))
- self.restoration_off_radio.setObjectName("restoration_off_radio")
+ self.restoration_off_radio.setObjectName('restoration_off_radio')
self.groupBox_2 = QtWidgets.QGroupBox(self.restoration_tab)
self.groupBox_2.setGeometry(QtCore.QRect(30, 120, 731, 231))
- self.groupBox_2.setObjectName("groupBox_2")
+ self.groupBox_2.setObjectName('groupBox_2')
self.policy_browse_button = QtWidgets.QPushButton(self.groupBox_2)
self.policy_browse_button.setGeometry(QtCore.QRect(460, 100, 91, 23))
- self.policy_browse_button.setObjectName("policy_browse_button")
+ self.policy_browse_button.setObjectName('policy_browse_button')
self.script_rrp_radio = QtWidgets.QRadioButton(self.groupBox_2)
self.script_rrp_radio.setGeometry(QtCore.QRect(290, 60, 111, 17))
- self.script_rrp_radio.setObjectName("script_rrp_radio")
+ self.script_rrp_radio.setObjectName('script_rrp_radio')
self.script_txt_radio = QtWidgets.QRadioButton(self.groupBox_2)
self.script_txt_radio.setGeometry(QtCore.QRect(180, 60, 101, 17))
self.script_txt_radio.setChecked(True)
- self.script_txt_radio.setObjectName("script_txt_radio")
+ self.script_txt_radio.setObjectName('script_txt_radio')
self.label_5 = QtWidgets.QLabel(self.groupBox_2)
self.label_5.setGeometry(QtCore.QRect(30, 100, 131, 16))
font = QtGui.QFont()
@@ -443,15 +471,15 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
- self.label_5.setObjectName("label_5")
+ self.label_5.setObjectName('label_5')
self.policy_definition_addr_line = QtWidgets.QLineEdit(self.groupBox_2)
self.policy_definition_addr_line.setGeometry(QtCore.QRect(170, 100, 281, 20))
- self.policy_definition_addr_line.setText("")
+ self.policy_definition_addr_line.setText('')
self.policy_definition_addr_line.setReadOnly(True)
- self.policy_definition_addr_line.setObjectName("policy_definition_addr_line")
+ self.policy_definition_addr_line.setObjectName('policy_definition_addr_line')
self.policy_designer = QtWidgets.QPushButton(self.groupBox_2)
self.policy_designer.setGeometry(QtCore.QRect(460, 60, 91, 23))
- self.policy_designer.setObjectName("policy_designer")
+ self.policy_designer.setObjectName('policy_designer')
self.label_6 = QtWidgets.QLabel(self.groupBox_2)
self.label_6.setGeometry(QtCore.QRect(30, 180, 131, 16))
font = QtGui.QFont()
@@ -459,11 +487,13 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
- self.label_6.setObjectName("label_6")
+ self.label_6.setObjectName('label_6')
self.minimum_job_time_line = QtWidgets.QLineEdit(self.groupBox_2)
self.minimum_job_time_line.setGeometry(QtCore.QRect(170, 180, 51, 20))
- self.minimum_job_time_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.minimum_job_time_line.setObjectName("minimum_job_time_line")
+ self.minimum_job_time_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.minimum_job_time_line.setObjectName('minimum_job_time_line')
self.label_14 = QtWidgets.QLabel(self.groupBox_2)
self.label_14.setEnabled(False)
self.label_14.setGeometry(QtCore.QRect(30, 140, 131, 16))
@@ -472,16 +502,20 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_14.setFont(font)
- self.label_14.setObjectName("label_14")
+ self.label_14.setObjectName('label_14')
self.crew_availability_browse_button = QtWidgets.QPushButton(self.groupBox_2)
self.crew_availability_browse_button.setEnabled(False)
- self.crew_availability_browse_button.setGeometry(QtCore.QRect(460, 140, 91, 23))
- self.crew_availability_browse_button.setObjectName("crew_availability_browse_button")
+ self.crew_availability_browse_button.setGeometry(
+ QtCore.QRect(460, 140, 91, 23)
+ )
+ self.crew_availability_browse_button.setObjectName(
+ 'crew_availability_browse_button'
+ )
self.lineEdit_8 = QtWidgets.QLineEdit(self.groupBox_2)
self.lineEdit_8.setEnabled(False)
self.lineEdit_8.setGeometry(QtCore.QRect(170, 140, 281, 20))
- self.lineEdit_8.setText("")
- self.lineEdit_8.setObjectName("lineEdit_8")
+ self.lineEdit_8.setText('')
+ self.lineEdit_8.setObjectName('lineEdit_8')
self.label_19 = QtWidgets.QLabel(self.groupBox_2)
self.label_19.setGeometry(QtCore.QRect(230, 180, 61, 16))
font = QtGui.QFont()
@@ -490,7 +524,7 @@ def setupUi(self, Opening_Window):
font.setWeight(50)
self.label_19.setFont(font)
self.label_19.setTextFormat(QtCore.Qt.PlainText)
- self.label_19.setObjectName("label_19")
+ self.label_19.setObjectName('label_19')
self.label_21 = QtWidgets.QLabel(self.groupBox_2)
self.label_21.setGeometry(QtCore.QRect(30, 60, 131, 16))
font = QtGui.QFont()
@@ -498,35 +532,43 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_21.setFont(font)
- self.label_21.setObjectName("label_21")
+ self.label_21.setObjectName('label_21')
self.groupBox_9 = QtWidgets.QGroupBox(self.restoration_tab)
self.groupBox_9.setGeometry(QtCore.QRect(30, 370, 721, 91))
- self.groupBox_9.setObjectName("groupBox_9")
+ self.groupBox_9.setObjectName('groupBox_9')
self.pipe_damage_discovery_button = QtWidgets.QPushButton(self.groupBox_9)
self.pipe_damage_discovery_button.setGeometry(QtCore.QRect(30, 40, 75, 23))
- self.pipe_damage_discovery_button.setObjectName("pipe_damage_discovery_button")
+ self.pipe_damage_discovery_button.setObjectName(
+ 'pipe_damage_discovery_button'
+ )
self.node_damage_discovery_button = QtWidgets.QPushButton(self.groupBox_9)
self.node_damage_discovery_button.setGeometry(QtCore.QRect(120, 40, 75, 23))
- self.node_damage_discovery_button.setObjectName("node_damage_discovery_button")
+ self.node_damage_discovery_button.setObjectName(
+ 'node_damage_discovery_button'
+ )
self.pump_damage_discovery_button = QtWidgets.QPushButton(self.groupBox_9)
self.pump_damage_discovery_button.setGeometry(QtCore.QRect(210, 40, 75, 23))
- self.pump_damage_discovery_button.setObjectName("pump_damage_discovery_button")
+ self.pump_damage_discovery_button.setObjectName(
+ 'pump_damage_discovery_button'
+ )
self.tank_damage_discovery_button = QtWidgets.QPushButton(self.groupBox_9)
self.tank_damage_discovery_button.setGeometry(QtCore.QRect(290, 40, 75, 23))
- self.tank_damage_discovery_button.setObjectName("tank_damage_discovery_button")
+ self.tank_damage_discovery_button.setObjectName(
+ 'tank_damage_discovery_button'
+ )
self.groupBox_10 = QtWidgets.QGroupBox(self.restoration_tab)
self.groupBox_10.setGeometry(QtCore.QRect(30, 470, 211, 51))
- self.groupBox_10.setObjectName("groupBox_10")
+ self.groupBox_10.setObjectName('groupBox_10')
self.out_of_zone_travel_yes = QtWidgets.QRadioButton(self.groupBox_10)
self.out_of_zone_travel_yes.setGeometry(QtCore.QRect(10, 20, 82, 17))
- self.out_of_zone_travel_yes.setObjectName("out_of_zone_travel_yes")
+ self.out_of_zone_travel_yes.setObjectName('out_of_zone_travel_yes')
self.out_of_zone_travel_no = QtWidgets.QRadioButton(self.groupBox_10)
self.out_of_zone_travel_no.setGeometry(QtCore.QRect(90, 20, 101, 17))
self.out_of_zone_travel_no.setChecked(True)
- self.out_of_zone_travel_no.setObjectName("out_of_zone_travel_no")
- self.main_process1.addTab(self.restoration_tab, "")
+ self.out_of_zone_travel_no.setObjectName('out_of_zone_travel_no')
+ self.main_process1.addTab(self.restoration_tab, '')
self.run_tab = QtWidgets.QWidget()
- self.run_tab.setObjectName("run_tab")
+ self.run_tab.setObjectName('run_tab')
self.run_button = QtWidgets.QPushButton(self.run_tab)
self.run_button.setGeometry(QtCore.QRect(50, 50, 181, 31))
font = QtGui.QFont()
@@ -534,7 +576,7 @@ def setupUi(self, Opening_Window):
font.setBold(False)
font.setWeight(50)
self.run_button.setFont(font)
- self.run_button.setObjectName("run_button")
+ self.run_button.setObjectName('run_button')
self.stop_button = QtWidgets.QPushButton(self.run_tab)
self.stop_button.setGeometry(QtCore.QRect(50, 520, 181, 31))
font = QtGui.QFont()
@@ -542,21 +584,21 @@ def setupUi(self, Opening_Window):
font.setBold(False)
font.setWeight(50)
self.stop_button.setFont(font)
- self.stop_button.setObjectName("stop_button")
+ self.stop_button.setObjectName('stop_button')
self.ouput_textedit = QtWidgets.QPlainTextEdit(self.run_tab)
self.ouput_textedit.setGeometry(QtCore.QRect(50, 110, 651, 391))
self.ouput_textedit.setReadOnly(True)
- self.ouput_textedit.setObjectName("ouput_textedit")
+ self.ouput_textedit.setObjectName('ouput_textedit')
self.label_42 = QtWidgets.QLabel(self.run_tab)
self.label_42.setGeometry(QtCore.QRect(50, 80, 51, 31))
font = QtGui.QFont()
font.setPointSize(11)
self.label_42.setFont(font)
- self.label_42.setObjectName("label_42")
- self.main_process1.addTab(self.run_tab, "")
- self.main_tab.addTab(self.main_process, "")
+ self.label_42.setObjectName('label_42')
+ self.main_process1.addTab(self.run_tab, '')
+ self.main_tab.addTab(self.main_process, '')
self.post_process = QtWidgets.QWidget()
- self.post_process.setObjectName("post_process")
+ self.post_process.setObjectName('post_process')
self.results_tabs_widget = QtWidgets.QTabWidget(self.post_process)
self.results_tabs_widget.setGeometry(QtCore.QRect(0, 0, 801, 721))
font = QtGui.QFont()
@@ -564,14 +606,16 @@ def setupUi(self, Opening_Window):
self.results_tabs_widget.setFont(font)
self.results_tabs_widget.setTabPosition(QtWidgets.QTabWidget.West)
self.results_tabs_widget.setTabShape(QtWidgets.QTabWidget.Triangular)
- self.results_tabs_widget.setObjectName("results_tabs_widget")
+ self.results_tabs_widget.setObjectName('results_tabs_widget')
self.data_tab = QtWidgets.QWidget()
- self.data_tab.setObjectName("data_tab")
+ self.data_tab.setObjectName('data_tab')
self.result_file_status_table = QtWidgets.QTableWidget(self.data_tab)
self.result_file_status_table.setEnabled(True)
self.result_file_status_table.setGeometry(QtCore.QRect(130, 70, 331, 421))
- self.result_file_status_table.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
- self.result_file_status_table.setObjectName("result_file_status_table")
+ self.result_file_status_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.NoSelection
+ )
+ self.result_file_status_table.setObjectName('result_file_status_table')
self.result_file_status_table.setColumnCount(2)
self.result_file_status_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
@@ -582,7 +626,7 @@ def setupUi(self, Opening_Window):
self.result_file_status_table.horizontalHeader().setStretchLastSection(False)
self.load_results_button = QtWidgets.QPushButton(self.data_tab)
self.load_results_button.setGeometry(QtCore.QRect(130, 30, 91, 23))
- self.load_results_button.setObjectName("load_results_button")
+ self.load_results_button.setObjectName('load_results_button')
self.label_27 = QtWidgets.QLabel(self.data_tab)
self.label_27.setGeometry(QtCore.QRect(30, 30, 101, 16))
font = QtGui.QFont()
@@ -590,14 +634,14 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_27.setFont(font)
- self.label_27.setObjectName("label_27")
+ self.label_27.setObjectName('label_27')
self.read_files_check_box = QtWidgets.QCheckBox(self.data_tab)
self.read_files_check_box.setGeometry(QtCore.QRect(250, 30, 121, 17))
self.read_files_check_box.setChecked(True)
- self.read_files_check_box.setObjectName("read_files_check_box")
+ self.read_files_check_box.setObjectName('read_files_check_box')
self.population_browser_button = QtWidgets.QPushButton(self.data_tab)
self.population_browser_button.setGeometry(QtCore.QRect(470, 550, 91, 23))
- self.population_browser_button.setObjectName("population_browser_button")
+ self.population_browser_button.setObjectName('population_browser_button')
self.label_28 = QtWidgets.QLabel(self.data_tab)
self.label_28.setGeometry(QtCore.QRect(130, 530, 141, 16))
font = QtGui.QFont()
@@ -605,20 +649,28 @@ def setupUi(self, Opening_Window):
font.setBold(True)
font.setWeight(75)
self.label_28.setFont(font)
- self.label_28.setObjectName("label_28")
+ self.label_28.setObjectName('label_28')
self.population_addr_line = QtWidgets.QLineEdit(self.data_tab)
self.population_addr_line.setGeometry(QtCore.QRect(130, 550, 331, 20))
self.population_addr_line.setReadOnly(True)
- self.population_addr_line.setObjectName("population_addr_line")
+ self.population_addr_line.setObjectName('population_addr_line')
self.population_node_ID_header_combo = QtWidgets.QComboBox(self.data_tab)
- self.population_node_ID_header_combo.setGeometry(QtCore.QRect(130, 600, 111, 22))
- self.population_node_ID_header_combo.setObjectName("population_node_ID_header_combo")
+ self.population_node_ID_header_combo.setGeometry(
+ QtCore.QRect(130, 600, 111, 22)
+ )
+ self.population_node_ID_header_combo.setObjectName(
+ 'population_node_ID_header_combo'
+ )
self.population_population_header_combo = QtWidgets.QComboBox(self.data_tab)
- self.population_population_header_combo.setGeometry(QtCore.QRect(250, 600, 111, 22))
- self.population_population_header_combo.setObjectName("population_population_header_combo")
+ self.population_population_header_combo.setGeometry(
+ QtCore.QRect(250, 600, 111, 22)
+ )
+ self.population_population_header_combo.setObjectName(
+ 'population_population_header_combo'
+ )
self.population_load_button = QtWidgets.QPushButton(self.data_tab)
self.population_load_button.setGeometry(QtCore.QRect(470, 600, 91, 23))
- self.population_load_button.setObjectName("population_load_button")
+ self.population_load_button.setObjectName('population_load_button')
self.label_33 = QtWidgets.QLabel(self.data_tab)
self.label_33.setGeometry(QtCore.QRect(130, 580, 101, 16))
font = QtGui.QFont()
@@ -626,7 +678,7 @@ def setupUi(self, Opening_Window):
font.setBold(False)
font.setWeight(50)
self.label_33.setFont(font)
- self.label_33.setObjectName("label_33")
+ self.label_33.setObjectName('label_33')
self.label_34 = QtWidgets.QLabel(self.data_tab)
self.label_34.setGeometry(QtCore.QRect(250, 580, 101, 16))
font = QtGui.QFont()
@@ -634,30 +686,34 @@ def setupUi(self, Opening_Window):
font.setBold(False)
font.setWeight(50)
self.label_34.setFont(font)
- self.label_34.setObjectName("label_34")
- self.results_tabs_widget.addTab(self.data_tab, "")
+ self.label_34.setObjectName('label_34')
+ self.results_tabs_widget.addTab(self.data_tab, '')
self.curve_tab = QtWidgets.QWidget()
- self.curve_tab.setObjectName("curve_tab")
+ self.curve_tab.setObjectName('curve_tab')
self.curve_type_combo = QtWidgets.QComboBox(self.curve_tab)
self.curve_type_combo.setGeometry(QtCore.QRect(30, 80, 351, 22))
- self.curve_type_combo.setObjectName("curve_type_combo")
- self.curve_type_combo.addItem("")
- self.curve_type_combo.addItem("")
+ self.curve_type_combo.setObjectName('curve_type_combo')
+ self.curve_type_combo.addItem('')
+ self.curve_type_combo.addItem('')
self.all_scenarios_checkbox = QtWidgets.QCheckBox(self.curve_tab)
self.all_scenarios_checkbox.setGeometry(QtCore.QRect(190, 30, 121, 17))
self.all_scenarios_checkbox.setChecked(False)
- self.all_scenarios_checkbox.setObjectName("all_scenarios_checkbox")
+ self.all_scenarios_checkbox.setObjectName('all_scenarios_checkbox')
self.label_23 = QtWidgets.QLabel(self.curve_tab)
self.label_23.setGeometry(QtCore.QRect(30, 412, 47, 21))
- self.label_23.setObjectName("label_23")
+ self.label_23.setObjectName('label_23')
self.save_curve_button = QtWidgets.QPushButton(self.curve_tab)
self.save_curve_button.setGeometry(QtCore.QRect(380, 440, 91, 23))
- self.save_curve_button.setObjectName("save_curve_button")
+ self.save_curve_button.setObjectName('save_curve_button')
self.curve_settings_table = QtWidgets.QTableWidget(self.curve_tab)
self.curve_settings_table.setGeometry(QtCore.QRect(30, 440, 231, 271))
- self.curve_settings_table.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
- self.curve_settings_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
- self.curve_settings_table.setObjectName("curve_settings_table")
+ self.curve_settings_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.NoSelection
+ )
+ self.curve_settings_table.setSelectionBehavior(
+ QtWidgets.QAbstractItemView.SelectItems
+ )
+ self.curve_settings_table.setObjectName('curve_settings_table')
self.curve_settings_table.setColumnCount(1)
self.curve_settings_table.setRowCount(4)
item = QtWidgets.QTableWidgetItem()
@@ -679,36 +735,40 @@ def setupUi(self, Opening_Window):
self.curve_settings_table.horizontalHeader().setStretchLastSection(True)
self.scenario_combo = QtWidgets.QComboBox(self.curve_tab)
self.scenario_combo.setGeometry(QtCore.QRect(30, 30, 141, 22))
- self.scenario_combo.setObjectName("scenario_combo")
+ self.scenario_combo.setObjectName('scenario_combo')
self.mpl_curve = MplWidget(self.curve_tab)
self.mpl_curve.setGeometry(QtCore.QRect(30, 110, 531, 291))
- self.mpl_curve.setObjectName("mpl_curve")
+ self.mpl_curve.setObjectName('mpl_curve')
self.label_25 = QtWidgets.QLabel(self.curve_tab)
self.label_25.setGeometry(QtCore.QRect(30, 10, 81, 16))
- self.label_25.setObjectName("label_25")
+ self.label_25.setObjectName('label_25')
self.label_26 = QtWidgets.QLabel(self.curve_tab)
self.label_26.setGeometry(QtCore.QRect(30, 55, 81, 21))
- self.label_26.setObjectName("label_26")
+ self.label_26.setObjectName('label_26')
self.label_24 = QtWidgets.QLabel(self.curve_tab)
self.label_24.setGeometry(QtCore.QRect(380, 420, 101, 21))
- self.label_24.setObjectName("label_24")
- self.results_tabs_widget.addTab(self.curve_tab, "")
+ self.label_24.setObjectName('label_24')
+ self.results_tabs_widget.addTab(self.curve_tab, '')
self.tab = QtWidgets.QWidget()
- self.tab.setObjectName("tab")
+ self.tab.setObjectName('tab')
self.mpl_map = MplWidget(self.tab)
self.mpl_map.setGeometry(QtCore.QRect(20, 110, 560, 560))
- self.mpl_map.setObjectName("mpl_map")
+ self.mpl_map.setObjectName('mpl_map')
self.label_35 = QtWidgets.QLabel(self.tab)
self.label_35.setGeometry(QtCore.QRect(20, 25, 81, 21))
- self.label_35.setObjectName("label_35")
+ self.label_35.setObjectName('label_35')
self.map_type_combo = QtWidgets.QComboBox(self.tab)
self.map_type_combo.setGeometry(QtCore.QRect(20, 50, 351, 22))
- self.map_type_combo.setObjectName("map_type_combo")
+ self.map_type_combo.setObjectName('map_type_combo')
self.map_settings_table = QtWidgets.QTableWidget(self.tab)
self.map_settings_table.setGeometry(QtCore.QRect(590, 110, 181, 201))
- self.map_settings_table.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
- self.map_settings_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
- self.map_settings_table.setObjectName("map_settings_table")
+ self.map_settings_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.NoSelection
+ )
+ self.map_settings_table.setSelectionBehavior(
+ QtWidgets.QAbstractItemView.SelectItems
+ )
+ self.map_settings_table.setObjectName('map_settings_table')
self.map_settings_table.setColumnCount(1)
self.map_settings_table.setRowCount(4)
item = QtWidgets.QTableWidgetItem()
@@ -730,91 +790,91 @@ def setupUi(self, Opening_Window):
self.map_settings_table.horizontalHeader().setStretchLastSection(True)
self.label_36 = QtWidgets.QLabel(self.tab)
self.label_36.setGeometry(QtCore.QRect(590, 80, 47, 21))
- self.label_36.setObjectName("label_36")
+ self.label_36.setObjectName('label_36')
self.label_37 = QtWidgets.QLabel(self.tab)
self.label_37.setGeometry(QtCore.QRect(20, 90, 81, 21))
- self.label_37.setObjectName("label_37")
+ self.label_37.setObjectName('label_37')
self.save_map_button = QtWidgets.QPushButton(self.tab)
self.save_map_button.setGeometry(QtCore.QRect(600, 680, 131, 23))
- self.save_map_button.setObjectName("save_map_button")
+ self.save_map_button.setObjectName('save_map_button')
self.label_38 = QtWidgets.QLabel(self.tab)
self.label_38.setGeometry(QtCore.QRect(600, 660, 131, 21))
self.label_38.setAlignment(QtCore.Qt.AlignCenter)
- self.label_38.setObjectName("label_38")
+ self.label_38.setObjectName('label_38')
self.label_39 = QtWidgets.QLabel(self.tab)
self.label_39.setGeometry(QtCore.QRect(400, 30, 81, 16))
- self.label_39.setObjectName("label_39")
+ self.label_39.setObjectName('label_39')
self.map_all_scenarios_checkbox = QtWidgets.QCheckBox(self.tab)
self.map_all_scenarios_checkbox.setGeometry(QtCore.QRect(400, 10, 121, 17))
self.map_all_scenarios_checkbox.setChecked(False)
- self.map_all_scenarios_checkbox.setObjectName("map_all_scenarios_checkbox")
+ self.map_all_scenarios_checkbox.setObjectName('map_all_scenarios_checkbox')
self.map_scenario_combo = QtWidgets.QComboBox(self.tab)
self.map_scenario_combo.setGeometry(QtCore.QRect(400, 50, 181, 22))
- self.map_scenario_combo.setObjectName("map_scenario_combo")
+ self.map_scenario_combo.setObjectName('map_scenario_combo')
self.groupBox_12 = QtWidgets.QGroupBox(self.tab)
self.groupBox_12.setGeometry(QtCore.QRect(590, 330, 171, 131))
- self.groupBox_12.setObjectName("groupBox_12")
+ self.groupBox_12.setObjectName('groupBox_12')
self.annotation_checkbox = QtWidgets.QCheckBox(self.groupBox_12)
self.annotation_checkbox.setGeometry(QtCore.QRect(10, 20, 91, 31))
- self.annotation_checkbox.setObjectName("annotation_checkbox")
+ self.annotation_checkbox.setObjectName('annotation_checkbox')
self.annotation_event_combo = QtWidgets.QComboBox(self.groupBox_12)
self.annotation_event_combo.setEnabled(False)
self.annotation_event_combo.setGeometry(QtCore.QRect(10, 50, 141, 22))
- self.annotation_event_combo.setObjectName("annotation_event_combo")
- self.annotation_event_combo.addItem("")
- self.annotation_event_combo.addItem("")
+ self.annotation_event_combo.setObjectName('annotation_event_combo')
+ self.annotation_event_combo.addItem('')
+ self.annotation_event_combo.addItem('')
self.label_40 = QtWidgets.QLabel(self.groupBox_12)
self.label_40.setGeometry(QtCore.QRect(10, 80, 81, 21))
- self.label_40.setObjectName("label_40")
+ self.label_40.setObjectName('label_40')
self.annotation_radius_line = QtWidgets.QLineEdit(self.groupBox_12)
self.annotation_radius_line.setEnabled(False)
self.annotation_radius_line.setGeometry(QtCore.QRect(10, 100, 113, 20))
- self.annotation_radius_line.setObjectName("annotation_radius_line")
+ self.annotation_radius_line.setObjectName('annotation_radius_line')
self.groupBox_13 = QtWidgets.QGroupBox(self.tab)
self.groupBox_13.setGeometry(QtCore.QRect(590, 470, 171, 51))
- self.groupBox_13.setObjectName("groupBox_13")
+ self.groupBox_13.setObjectName('groupBox_13')
self.spatial_join_button = QtWidgets.QPushButton(self.groupBox_13)
self.spatial_join_button.setGeometry(QtCore.QRect(20, 20, 121, 21))
- self.spatial_join_button.setObjectName("spatial_join_button")
+ self.spatial_join_button.setObjectName('spatial_join_button')
self.groupBox_14 = QtWidgets.QGroupBox(self.tab)
self.groupBox_14.setGeometry(QtCore.QRect(590, 540, 171, 111))
- self.groupBox_14.setObjectName("groupBox_14")
+ self.groupBox_14.setObjectName('groupBox_14')
self.symbology_button = QtWidgets.QPushButton(self.groupBox_14)
self.symbology_button.setGeometry(QtCore.QRect(20, 20, 121, 31))
- self.symbology_button.setObjectName("symbology_button")
+ self.symbology_button.setObjectName('symbology_button')
self.major_tick_size_line = QtWidgets.QLineEdit(self.groupBox_14)
self.major_tick_size_line.setGeometry(QtCore.QRect(110, 60, 31, 20))
- self.major_tick_size_line.setObjectName("major_tick_size_line")
+ self.major_tick_size_line.setObjectName('major_tick_size_line')
self.label_41 = QtWidgets.QLabel(self.groupBox_14)
self.label_41.setGeometry(QtCore.QRect(20, 60, 81, 21))
self.label_41.setAlignment(QtCore.Qt.AlignCenter)
- self.label_41.setObjectName("label_41")
- self.results_tabs_widget.addTab(self.tab, "")
- self.main_tab.addTab(self.post_process, "")
+ self.label_41.setObjectName('label_41')
+ self.results_tabs_widget.addTab(self.tab, '')
+ self.main_tab.addTab(self.post_process, '')
Opening_Window.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Opening_Window)
self.menubar.setGeometry(QtCore.QRect(0, 0, 830, 21))
- self.menubar.setObjectName("menubar")
+ self.menubar.setObjectName('menubar')
self.menuFile = QtWidgets.QMenu(self.menubar)
- self.menuFile.setObjectName("menuFile")
+ self.menuFile.setObjectName('menuFile')
self.menuHelp = QtWidgets.QMenu(self.menubar)
- self.menuHelp.setObjectName("menuHelp")
+ self.menuHelp.setObjectName('menuHelp')
Opening_Window.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Opening_Window)
- self.statusbar.setObjectName("statusbar")
+ self.statusbar.setObjectName('statusbar')
Opening_Window.setStatusBar(self.statusbar)
self.action_Open_Project = QtWidgets.QAction(Opening_Window)
- self.action_Open_Project.setObjectName("action_Open_Project")
+ self.action_Open_Project.setObjectName('action_Open_Project')
self.action_Save = QtWidgets.QAction(Opening_Window)
- self.action_Save.setObjectName("action_Save")
+ self.action_Save.setObjectName('action_Save')
self.action_Save_Project_As = QtWidgets.QAction(Opening_Window)
- self.action_Save_Project_As.setObjectName("action_Save_Project_As")
+ self.action_Save_Project_As.setObjectName('action_Save_Project_As')
self.action_Exit = QtWidgets.QAction(Opening_Window)
- self.action_Exit.setObjectName("action_Exit")
+ self.action_Exit.setObjectName('action_Exit')
self.action_About = QtWidgets.QAction(Opening_Window)
- self.action_About.setObjectName("action_About")
+ self.action_About.setObjectName('action_About')
self.action_REWET_GITHUB = QtWidgets.QAction(Opening_Window)
- self.action_REWET_GITHUB.setObjectName("action_REWET_GITHUB")
+ self.action_REWET_GITHUB.setObjectName('action_REWET_GITHUB')
self.menuFile.addAction(self.action_Open_Project)
self.menuFile.addSeparator()
self.menuFile.addAction(self.action_Save)
@@ -832,197 +892,293 @@ def setupUi(self, Opening_Window):
self.results_tabs_widget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Opening_Window)
- def retranslateUi(self, Opening_Window):
+ def retranslateUi(self, Opening_Window): # noqa: N802, N803, D102, PLR0915
_translate = QtCore.QCoreApplication.translate
- Opening_Window.setWindowTitle(_translate("Opening_Window", "REWET"))
- self.groupBox_4.setTitle(_translate("Opening_Window", "Temp File Settings"))
- self.label_29.setText(_translate("Opening_Window", "Save time step"))
- self.save_time_step_yes_radio.setText(_translate("Opening_Window", "Yes"))
- self.label_30.setText(_translate("Opening_Window", "WARNING: Producing INP files for each event time can produce too many files"))
- self.save_time_step_no_radio.setText(_translate("Opening_Window", "No"))
- self.label_15.setText(_translate("Opening_Window", "Temp Folder"))
- self.temp_browser_button.setText(_translate("Opening_Window", "Browse"))
- self.groupBox_5.setTitle(_translate("Opening_Window", "Settings"))
- self.simulation_time_line.setText(_translate("Opening_Window", "36000"))
- self.label.setText(_translate("Opening_Window", "Simulation time"))
- self.label_32.setText(_translate("Opening_Window", "Run Type"))
- self.multiple_radio.setText(_translate("Opening_Window", "Multiple Serial"))
- self.single_radio.setText(_translate("Opening_Window", "Single"))
- self.label_3.setText(_translate("Opening_Window", "Simulation time step"))
- self.simulation_time_step_line.setText(_translate("Opening_Window", "3600"))
- self.label_17.setText(_translate("Opening_Window", "seconds"))
- self.label_18.setText(_translate("Opening_Window", "seconds"))
- self.result_directory_browser_button.setText(_translate("Opening_Window", "Browse"))
- self.label_22.setText(_translate("Opening_Window", "Result Folder"))
- self.main_process1.setTabText(self.main_process1.indexOf(self.simulation_tab), _translate("Opening_Window", "Simulation"))
- self.groupBox_3.setTitle(_translate("Opening_Window", "Solver Options"))
- self.label_9.setText(_translate("Opening_Window", "Solver:"))
- self.wntr_solver_radio.setText(_translate("Opening_Window", "WNTR"))
- self.modified_epanet_radio.setText(_translate("Opening_Window", "Modified EPANET V2.2"))
- self.minimum_pressure_line.setText(_translate("Opening_Window", "8"))
- self.required_pressure_line.setText(_translate("Opening_Window", "25"))
- self.label_10.setText(_translate("Opening_Window", "P(min):"))
- self.label_11.setText(_translate("Opening_Window", "h (water)"))
- self.label_12.setText(_translate("Opening_Window", "h (water)"))
- self.label_13.setText(_translate("Opening_Window", "P(req):"))
- self.label_4.setText(_translate("Opening_Window", "Hydraulic time step"))
- self.hydraulic_time_step_line.setText(_translate("Opening_Window", "3600"))
- self.label_16.setText(_translate("Opening_Window", "seconds"))
- self.groupBox_6.setTitle(_translate("Opening_Window", "Water Distribution Network"))
- self.wdn_browse_button.setText(_translate("Opening_Window", "Browse"))
- self.label_2.setText(_translate("Opening_Window", "System INP File"))
- self.demand_ratio_line.setText(_translate("Opening_Window", "1"))
- self.label_7.setText(_translate("Opening_Window", "Calibrate Demand"))
- self.label_8.setText(_translate("Opening_Window", "0\n"
-"\n"
-"
"))
- self.remove_scenario_button.setText(_translate("Opening_Window", "Remove"))
- self.groupBox_7.setTitle(_translate("Opening_Window", "File Headers Correction"))
- self.pushButton_13.setText(_translate("Opening_Window", "Pipe"))
- self.pushButton_17.setText(_translate("Opening_Window", "Node"))
- self.pushButton_16.setText(_translate("Opening_Window", "Tanks"))
- self.pushButton_15.setText(_translate("Opening_Window", "Pump"))
- self.groupBox_8.setTitle(_translate("Opening_Window", "Damage Modeling"))
- self.pipe_damage_modeling_button.setText(_translate("Opening_Window", "Pipe"))
- self.node_damage_modeling_button.setText(_translate("Opening_Window", "Nodal"))
- self.label_20.setText(_translate("Opening_Window", "Damage File Directory"))
- self.damage_directory_browse_button.setText(_translate("Opening_Window", "Browse"))
- self.load_scenario_button.setText(_translate("Opening_Window", "Load"))
- self.save_scenario_button.setText(_translate("Opening_Window", "Save"))
- self.validate_scenario_button.setText(_translate("Opening_Window", "Validate Damage Scenarios"))
- self.groupBox_11.setTitle(_translate("Opening_Window", "File Type"))
- self.file_type_excel_radio.setText(_translate("Opening_Window", "Excel"))
- self.file_type_pickle_radio.setText(_translate("Opening_Window", "Pickle"))
- self.main_process1.setTabText(self.main_process1.indexOf(self.damage_tab), _translate("Opening_Window", "Damage"))
- self.groupBox.setTitle(_translate("Opening_Window", "On/Off"))
- self.restoration_on_radio.setText(_translate("Opening_Window", "On"))
- self.restoration_off_radio.setText(_translate("Opening_Window", "Off"))
- self.groupBox_2.setTitle(_translate("Opening_Window", "Restoration Definition"))
- self.policy_browse_button.setText(_translate("Opening_Window", "Browse"))
- self.script_rrp_radio.setText(_translate("Opening_Window", "Policy File (*.rrp)"))
- self.script_txt_radio.setText(_translate("Opening_Window", "Script (*.txt)"))
- self.label_5.setText(_translate("Opening_Window", "Policy Definition File"))
- self.policy_designer.setText(_translate("Opening_Window", "Policy Designer"))
- self.label_6.setText(_translate("Opening_Window", "Minimum Job Time"))
- self.minimum_job_time_line.setText(_translate("Opening_Window", "3600"))
- self.label_14.setText(_translate("Opening_Window", "Crew Availability"))
- self.crew_availability_browse_button.setText(_translate("Opening_Window", "Browse"))
- self.label_19.setText(_translate("Opening_Window", "seconds"))
- self.label_21.setText(_translate("Opening_Window", "File Type"))
- self.groupBox_9.setTitle(_translate("Opening_Window", "Damage Discovery"))
- self.pipe_damage_discovery_button.setText(_translate("Opening_Window", "Pipe "))
- self.node_damage_discovery_button.setText(_translate("Opening_Window", "Node"))
- self.pump_damage_discovery_button.setText(_translate("Opening_Window", "Pump"))
- self.tank_damage_discovery_button.setText(_translate("Opening_Window", "Tank"))
- self.groupBox_10.setTitle(_translate("Opening_Window", "Crew out-of-zone Travel"))
- self.out_of_zone_travel_yes.setText(_translate("Opening_Window", "Allowed"))
- self.out_of_zone_travel_no.setText(_translate("Opening_Window", "Not Allowed"))
- self.main_process1.setTabText(self.main_process1.indexOf(self.restoration_tab), _translate("Opening_Window", "Restoration"))
- self.run_button.setText(_translate("Opening_Window", "RUN"))
- self.stop_button.setText(_translate("Opening_Window", "Stop"))
- self.label_42.setText(_translate("Opening_Window", "Log"))
- self.main_process1.setTabText(self.main_process1.indexOf(self.run_tab), _translate("Opening_Window", "Run"))
- self.main_tab.setTabText(self.main_tab.indexOf(self.main_process), _translate("Opening_Window", "Process"))
+ item.setText(_translate('Opening_Window', 'Probability'))
+ self.label_31.setText(_translate('Opening_Window', 'Damage Scenario'))
+ self.add_scenario_button.setText(_translate('Opening_Window', 'Add'))
+ self.status_text.setHtml(
+ _translate(
+ 'Opening_Window',
+ '\n'
+ '\n'
+ '
',
+ )
+ )
+ self.remove_scenario_button.setText(_translate('Opening_Window', 'Remove'))
+ self.groupBox_7.setTitle(
+ _translate('Opening_Window', 'File Headers Correction')
+ )
+ self.pushButton_13.setText(_translate('Opening_Window', 'Pipe'))
+ self.pushButton_17.setText(_translate('Opening_Window', 'Node'))
+ self.pushButton_16.setText(_translate('Opening_Window', 'Tanks'))
+ self.pushButton_15.setText(_translate('Opening_Window', 'Pump'))
+ self.groupBox_8.setTitle(_translate('Opening_Window', 'Damage Modeling'))
+ self.pipe_damage_modeling_button.setText(
+ _translate('Opening_Window', 'Pipe')
+ )
+ self.node_damage_modeling_button.setText(
+ _translate('Opening_Window', 'Nodal')
+ )
+ self.label_20.setText(_translate('Opening_Window', 'Damage File Directory'))
+ self.damage_directory_browse_button.setText(
+ _translate('Opening_Window', 'Browse')
+ )
+ self.load_scenario_button.setText(_translate('Opening_Window', 'Load'))
+ self.save_scenario_button.setText(_translate('Opening_Window', 'Save'))
+ self.validate_scenario_button.setText(
+ _translate('Opening_Window', 'Validate Damage Scenarios')
+ )
+ self.groupBox_11.setTitle(_translate('Opening_Window', 'File Type'))
+ self.file_type_excel_radio.setText(_translate('Opening_Window', 'Excel'))
+ self.file_type_pickle_radio.setText(_translate('Opening_Window', 'Pickle'))
+ self.main_process1.setTabText(
+ self.main_process1.indexOf(self.damage_tab),
+ _translate('Opening_Window', 'Damage'),
+ )
+ self.groupBox.setTitle(_translate('Opening_Window', 'On/Off'))
+ self.restoration_on_radio.setText(_translate('Opening_Window', 'On'))
+ self.restoration_off_radio.setText(_translate('Opening_Window', 'Off'))
+ self.groupBox_2.setTitle(
+ _translate('Opening_Window', 'Restoration Definition')
+ )
+ self.policy_browse_button.setText(_translate('Opening_Window', 'Browse'))
+ self.script_rrp_radio.setText(
+ _translate('Opening_Window', 'Policy File (*.rrp)')
+ )
+ self.script_txt_radio.setText(_translate('Opening_Window', 'Script (*.txt)'))
+ self.label_5.setText(_translate('Opening_Window', 'Policy Definition File'))
+ self.policy_designer.setText(_translate('Opening_Window', 'Policy Designer'))
+ self.label_6.setText(_translate('Opening_Window', 'Minimum Job Time'))
+ self.minimum_job_time_line.setText(_translate('Opening_Window', '3600'))
+ self.label_14.setText(_translate('Opening_Window', 'Crew Availability'))
+ self.crew_availability_browse_button.setText(
+ _translate('Opening_Window', 'Browse')
+ )
+ self.label_19.setText(_translate('Opening_Window', 'seconds'))
+ self.label_21.setText(_translate('Opening_Window', 'File Type'))
+ self.groupBox_9.setTitle(_translate('Opening_Window', 'Damage Discovery'))
+ self.pipe_damage_discovery_button.setText(
+ _translate('Opening_Window', 'Pipe ')
+ )
+ self.node_damage_discovery_button.setText(
+ _translate('Opening_Window', 'Node')
+ )
+ self.pump_damage_discovery_button.setText(
+ _translate('Opening_Window', 'Pump')
+ )
+ self.tank_damage_discovery_button.setText(
+ _translate('Opening_Window', 'Tank')
+ )
+ self.groupBox_10.setTitle(
+ _translate('Opening_Window', 'Crew out-of-zone Travel')
+ )
+ self.out_of_zone_travel_yes.setText(_translate('Opening_Window', 'Allowed'))
+ self.out_of_zone_travel_no.setText(
+ _translate('Opening_Window', 'Not Allowed')
+ )
+ self.main_process1.setTabText(
+ self.main_process1.indexOf(self.restoration_tab),
+ _translate('Opening_Window', 'Restoration'),
+ )
+ self.run_button.setText(_translate('Opening_Window', 'RUN'))
+ self.stop_button.setText(_translate('Opening_Window', 'Stop'))
+ self.label_42.setText(_translate('Opening_Window', 'Log'))
+ self.main_process1.setTabText(
+ self.main_process1.indexOf(self.run_tab),
+ _translate('Opening_Window', 'Run'),
+ )
+ self.main_tab.setTabText(
+ self.main_tab.indexOf(self.main_process),
+ _translate('Opening_Window', 'Process'),
+ )
item = self.result_file_status_table.horizontalHeaderItem(0)
- item.setText(_translate("Opening_Window", "Scenario Name"))
+ item.setText(_translate('Opening_Window', 'Scenario Name'))
item = self.result_file_status_table.horizontalHeaderItem(1)
- item.setText(_translate("Opening_Window", "Status"))
- self.load_results_button.setText(_translate("Opening_Window", "Load"))
- self.label_27.setText(_translate("Opening_Window", "Load Result Files"))
- self.read_files_check_box.setText(_translate("Opening_Window", "Read Files"))
- self.population_browser_button.setText(_translate("Opening_Window", "Browse"))
- self.label_28.setText(_translate("Opening_Window", "Population Data"))
- self.population_load_button.setText(_translate("Opening_Window", "Load"))
- self.label_33.setText(_translate("Opening_Window", "Node ID Header"))
- self.label_34.setText(_translate("Opening_Window", "Population Header"))
- self.results_tabs_widget.setTabText(self.results_tabs_widget.indexOf(self.data_tab), _translate("Opening_Window", "Data"))
- self.curve_type_combo.setItemText(0, _translate("Opening_Window", "Quantity Exceedance"))
- self.curve_type_combo.setItemText(1, _translate("Opening_Window", "Delivery Exceedance"))
- self.all_scenarios_checkbox.setText(_translate("Opening_Window", "All Scenarios"))
- self.label_23.setText(_translate("Opening_Window", "Settings"))
- self.save_curve_button.setText(_translate("Opening_Window", "Browse"))
+ item.setText(_translate('Opening_Window', 'Status'))
+ self.load_results_button.setText(_translate('Opening_Window', 'Load'))
+ self.label_27.setText(_translate('Opening_Window', 'Load Result Files'))
+ self.read_files_check_box.setText(_translate('Opening_Window', 'Read Files'))
+ self.population_browser_button.setText(
+ _translate('Opening_Window', 'Browse')
+ )
+ self.label_28.setText(_translate('Opening_Window', 'Population Data'))
+ self.population_load_button.setText(_translate('Opening_Window', 'Load'))
+ self.label_33.setText(_translate('Opening_Window', 'Node ID Header'))
+ self.label_34.setText(_translate('Opening_Window', 'Population Header'))
+ self.results_tabs_widget.setTabText(
+ self.results_tabs_widget.indexOf(self.data_tab),
+ _translate('Opening_Window', 'Data'),
+ )
+ self.curve_type_combo.setItemText(
+ 0, _translate('Opening_Window', 'Quantity Exceedance')
+ )
+ self.curve_type_combo.setItemText(
+ 1, _translate('Opening_Window', 'Delivery Exceedance')
+ )
+ self.all_scenarios_checkbox.setText(
+ _translate('Opening_Window', 'All Scenarios')
+ )
+ self.label_23.setText(_translate('Opening_Window', 'Settings'))
+ self.save_curve_button.setText(_translate('Opening_Window', 'Browse'))
item = self.curve_settings_table.verticalHeaderItem(0)
- item.setText(_translate("Opening_Window", "Percentage"))
+ item.setText(_translate('Opening_Window', 'Percentage'))
item = self.curve_settings_table.verticalHeaderItem(1)
- item.setText(_translate("Opening_Window", "Leak Ratio"))
+ item.setText(_translate('Opening_Window', 'Leak Ratio'))
item = self.curve_settings_table.verticalHeaderItem(2)
- item.setText(_translate("Opening_Window", "Time Unit"))
+ item.setText(_translate('Opening_Window', 'Time Unit'))
item = self.curve_settings_table.verticalHeaderItem(3)
- item.setText(_translate("Opening_Window", "Time Shift"))
+ item.setText(_translate('Opening_Window', 'Time Shift'))
item = self.curve_settings_table.horizontalHeaderItem(0)
- item.setText(_translate("Opening_Window", "Values"))
- __sortingEnabled = self.curve_settings_table.isSortingEnabled()
+ item.setText(_translate('Opening_Window', 'Values'))
+ __sortingEnabled = self.curve_settings_table.isSortingEnabled() # noqa: N806
self.curve_settings_table.setSortingEnabled(False)
self.curve_settings_table.setSortingEnabled(__sortingEnabled)
- self.label_25.setText(_translate("Opening_Window", "Scenario"))
- self.label_26.setText(_translate("Opening_Window", "Curve Type"))
- self.label_24.setText(_translate("Opening_Window", "Save Curve Data"))
- self.results_tabs_widget.setTabText(self.results_tabs_widget.indexOf(self.curve_tab), _translate("Opening_Window", "Curve"))
- self.label_35.setText(_translate("Opening_Window", "Curve Type"))
+ self.label_25.setText(_translate('Opening_Window', 'Scenario'))
+ self.label_26.setText(_translate('Opening_Window', 'Curve Type'))
+ self.label_24.setText(_translate('Opening_Window', 'Save Curve Data'))
+ self.results_tabs_widget.setTabText(
+ self.results_tabs_widget.indexOf(self.curve_tab),
+ _translate('Opening_Window', 'Curve'),
+ )
+ self.label_35.setText(_translate('Opening_Window', 'Curve Type'))
item = self.map_settings_table.verticalHeaderItem(0)
- item.setText(_translate("Opening_Window", "Percentage"))
+ item.setText(_translate('Opening_Window', 'Percentage'))
item = self.map_settings_table.verticalHeaderItem(1)
- item.setText(_translate("Opening_Window", "Leak Ratio"))
+ item.setText(_translate('Opening_Window', 'Leak Ratio'))
item = self.map_settings_table.verticalHeaderItem(2)
- item.setText(_translate("Opening_Window", "Time Unit"))
+ item.setText(_translate('Opening_Window', 'Time Unit'))
item = self.map_settings_table.verticalHeaderItem(3)
- item.setText(_translate("Opening_Window", "Time Shift"))
+ item.setText(_translate('Opening_Window', 'Time Shift'))
item = self.map_settings_table.horizontalHeaderItem(0)
- item.setText(_translate("Opening_Window", "Values"))
- __sortingEnabled = self.map_settings_table.isSortingEnabled()
+ item.setText(_translate('Opening_Window', 'Values'))
+ __sortingEnabled = self.map_settings_table.isSortingEnabled() # noqa: N806
self.map_settings_table.setSortingEnabled(False)
self.map_settings_table.setSortingEnabled(__sortingEnabled)
- self.label_36.setText(_translate("Opening_Window", "Settings"))
- self.label_37.setText(_translate("Opening_Window", "Map"))
- self.save_map_button.setText(_translate("Opening_Window", "Browse"))
- self.label_38.setText(_translate("Opening_Window", "Save Map Shape File"))
- self.label_39.setText(_translate("Opening_Window", "Scenario"))
- self.map_all_scenarios_checkbox.setText(_translate("Opening_Window", "All Scenarios"))
- self.groupBox_12.setTitle(_translate("Opening_Window", "Annotation"))
- self.annotation_checkbox.setText(_translate("Opening_Window", "On"))
- self.annotation_event_combo.setItemText(0, _translate("Opening_Window", "Mouse hover"))
- self.annotation_event_combo.setItemText(1, _translate("Opening_Window", "Mouse click"))
- self.label_40.setText(_translate("Opening_Window", "Radius"))
- self.annotation_radius_line.setText(_translate("Opening_Window", "0"))
- self.groupBox_13.setTitle(_translate("Opening_Window", "Substitute Layer"))
- self.spatial_join_button.setText(_translate("Opening_Window", "Setup"))
- self.groupBox_14.setTitle(_translate("Opening_Window", "Map Appearance"))
- self.symbology_button.setText(_translate("Opening_Window", "Symbology"))
- self.major_tick_size_line.setText(_translate("Opening_Window", "10"))
- self.label_41.setText(_translate("Opening_Window", "Grid Font Size"))
- self.results_tabs_widget.setTabText(self.results_tabs_widget.indexOf(self.tab), _translate("Opening_Window", "Map"))
- self.main_tab.setTabText(self.main_tab.indexOf(self.post_process), _translate("Opening_Window", "Post-Process"))
- self.menuFile.setTitle(_translate("Opening_Window", "File"))
- self.menuHelp.setTitle(_translate("Opening_Window", "Help"))
- self.action_Open_Project.setText(_translate("Opening_Window", "Open Project"))
- self.action_Save.setText(_translate("Opening_Window", "Save"))
- self.action_Save_Project_As.setText(_translate("Opening_Window", "Save Project As"))
- self.action_Exit.setText(_translate("Opening_Window", "Exit"))
- self.action_About.setText(_translate("Opening_Window", "About"))
- self.action_REWET_GITHUB.setText(_translate("Opening_Window", "REWET GITHUB"))
-from .MplWidget import MplWidget
+ self.label_36.setText(_translate('Opening_Window', 'Settings'))
+ self.label_37.setText(_translate('Opening_Window', 'Map'))
+ self.save_map_button.setText(_translate('Opening_Window', 'Browse'))
+ self.label_38.setText(_translate('Opening_Window', 'Save Map Shape File'))
+ self.label_39.setText(_translate('Opening_Window', 'Scenario'))
+ self.map_all_scenarios_checkbox.setText(
+ _translate('Opening_Window', 'All Scenarios')
+ )
+ self.groupBox_12.setTitle(_translate('Opening_Window', 'Annotation'))
+ self.annotation_checkbox.setText(_translate('Opening_Window', 'On'))
+ self.annotation_event_combo.setItemText(
+ 0, _translate('Opening_Window', 'Mouse hover')
+ )
+ self.annotation_event_combo.setItemText(
+ 1, _translate('Opening_Window', 'Mouse click')
+ )
+ self.label_40.setText(_translate('Opening_Window', 'Radius'))
+ self.annotation_radius_line.setText(_translate('Opening_Window', '0'))
+ self.groupBox_13.setTitle(_translate('Opening_Window', 'Substitute Layer'))
+ self.spatial_join_button.setText(_translate('Opening_Window', 'Setup'))
+ self.groupBox_14.setTitle(_translate('Opening_Window', 'Map Appearance'))
+ self.symbology_button.setText(_translate('Opening_Window', 'Symbology'))
+ self.major_tick_size_line.setText(_translate('Opening_Window', '10'))
+ self.label_41.setText(_translate('Opening_Window', 'Grid Font Size'))
+ self.results_tabs_widget.setTabText(
+ self.results_tabs_widget.indexOf(self.tab),
+ _translate('Opening_Window', 'Map'),
+ )
+ self.main_tab.setTabText(
+ self.main_tab.indexOf(self.post_process),
+ _translate('Opening_Window', 'Post-Process'),
+ )
+ self.menuFile.setTitle(_translate('Opening_Window', 'File'))
+ self.menuHelp.setTitle(_translate('Opening_Window', 'Help'))
+ self.action_Open_Project.setText(
+ _translate('Opening_Window', 'Open Project')
+ )
+ self.action_Save.setText(_translate('Opening_Window', 'Save'))
+ self.action_Save_Project_As.setText(
+ _translate('Opening_Window', 'Save Project As')
+ )
+ self.action_Exit.setText(_translate('Opening_Window', 'Exit'))
+ self.action_About.setText(_translate('Opening_Window', 'About'))
+ self.action_REWET_GITHUB.setText(
+ _translate('Opening_Window', 'REWET GITHUB')
+ )
+
+from .MplWidget import MplWidget # noqa: E402
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Opening_Window = QtWidgets.QMainWindow()
ui = Ui_Opening_Window()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/PP_Data_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/PP_Data_Tab_Designer.py
index 957894044..1aaf3ef9c 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/PP_Data_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/PP_Data_Tab_Designer.py
@@ -1,133 +1,158 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Dec 29 15:41:03 2022
+"""Created on Thu Dec 29 15:41:03 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
import os
-from PyQt5 import QtWidgets
+
import pandas as pd
+from PyQt5 import QtWidgets
from Result_Project import Project_Result
-class PP_Data_Tab():
-
+
+class PP_Data_Tab: # noqa: D101
def __init__(self, project):
- self.pp_project = project
- #self.__settings = settings
- self.result_scenarios = []
- self.population_data = None
- #self.pp_result_folder_addr = result_folder_addr
- self.load_results_button.clicked.connect(self.resultLoadButtonPressed )
+ self.pp_project = project
+ # self.__settings = settings
+ self.result_scenarios = []
+ self.population_data = None
+ # self.pp_result_folder_addr = result_folder_addr
+ self.load_results_button.clicked.connect(self.resultLoadButtonPressed)
self.population_browser_button.clicked.connect(self.browsePopulationData)
self.population_load_button.clicked.connect(self.loadPopulationData)
- #self.results_tabs_widget.setTabEnabled(1, False)
+ # self.results_tabs_widget.setTabEnabled(1, False)
self.project_result = None
self.current_population_directory = ''
-
- def initalizeResultData(self):
- if self.project == None:
- self.errorMSG("Error", "No project is found. open or save a new project.")
+
+ def initalizeResultData(self): # noqa: N802, D102
+ if self.project == None: # noqa: E711
+ self.errorMSG(
+ 'Error', 'No project is found. open or save a new project.'
+ )
return
result_directory = self.result_folder_addr_line.text()
- self.project_result = Project_Result(self.project, iObject=True, result_file_dir=result_directory, ignore_not_found=True)
+ self.project_result = Project_Result(
+ self.project,
+ iObject=True,
+ result_file_dir=result_directory,
+ ignore_not_found=True,
+ )
self.clearResultData()
-
- print(self.project_result.scn_name_list_that_result_file_not_found)
- for index, row in self.scenario_list.iterrows():
+
+ print(self.project_result.scn_name_list_that_result_file_not_found) # noqa: T201
+ for index, row in self.scenario_list.iterrows(): # noqa: B007
number_of_rows = self.result_file_status_table.rowCount()
- scenario_name = row["Scenario Name"]
- scenario_item = QtWidgets.QTableWidgetItem(scenario_name )
- if scenario_name in self.project_result.scn_name_list_that_result_file_not_found:
- status_item = QtWidgets.QTableWidgetItem("NOT Available" )
+ scenario_name = row['Scenario Name']
+ scenario_item = QtWidgets.QTableWidgetItem(scenario_name)
+ if (
+ scenario_name
+ in self.project_result.scn_name_list_that_result_file_not_found
+ ):
+ status_item = QtWidgets.QTableWidgetItem('NOT Available')
else:
self.result_scenarios.append(scenario_name)
- status_item = QtWidgets.QTableWidgetItem("Available" )
-
+ status_item = QtWidgets.QTableWidgetItem('Available')
+
self.result_file_status_table.insertRow(number_of_rows)
self.result_file_status_table.setItem(number_of_rows, 0, scenario_item)
- self.result_file_status_table.setItem(number_of_rows, 1, status_item )
-
+ self.result_file_status_table.setItem(number_of_rows, 1, status_item)
+
if self.read_files_check_box.isChecked():
for scenario_name in self.result_scenarios:
try:
self.project_result.loadScneariodata(scenario_name)
- except Exception:
- self.errorMSG("Error","Error occured in reading data")
+ except Exception: # noqa: BLE001, PERF203
+ self.errorMSG('Error', 'Error occurred in reading data')
self.clearResultData()
- raise Exception
+ raise Exception # noqa: B904, TRY002
return
-
- self.results_tabs_widget.setTabEnabled(1, True)
-
- def clearResultData(self):
- for i in range(self.result_file_status_table.rowCount()):
+
+ self.results_tabs_widget.setTabEnabled(1, True) # noqa: FBT003
+
+ def clearResultData(self): # noqa: N802, D102
+ for i in range(self.result_file_status_table.rowCount()): # noqa: B007
self.result_file_status_table.removeRow(0)
-
- def resultLoadButtonPressed(self):
- #data_retrived = False
- #if self.getSimulationSettings():
- #if self.getHydraulicSettings():
- #if self.getDamageSettings():
- #if self.getRestorationSettings():
- #data_retrived = True
-
- #if not data_retrived:
- #return
-
+
+ def resultLoadButtonPressed(self): # noqa: N802, D102
+ # data_retrived = False
+ # if self.getSimulationSettings():
+ # if self.getHydraulicSettings():
+ # if self.getDamageSettings():
+ # if self.getRestorationSettings():
+ # data_retrived = True
+
+ # if not data_retrived:
+ # return
+
self.initalizeResultData()
-
- def browsePopulationData(self):
- file = QtWidgets.QFileDialog.getOpenFileName(self.asli_MainWindow, 'Open file',
- self.current_population_directory,"Excel file (*.xlsx);;CSV File (*.csv)")
- if file[0] == '':
+
+ def browsePopulationData(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self.asli_MainWindow,
+ 'Open file',
+ self.current_population_directory,
+ 'Excel file (*.xlsx);;CSV File (*.csv)',
+ )
+ if file[0] == '': # noqa: PLC1901
return
- split_addr = os.path.split(file[0])
+ split_addr = os.path.split(file[0])
self.current_population_directory = split_addr[0]
-
+
self.population_addr_line.setText(file[0])
-
- print(file)
- if file[1]=="Excel file (*.xlsx)":
+
+ print(file) # noqa: T201
+ if file[1] == 'Excel file (*.xlsx)':
self.population_data = pd.read_excel(file[0])
- elif file[1]=="CSV File (*.csv)":
+ elif file[1] == 'CSV File (*.csv)':
self.population_data = pd.read_scv(file[0])
else:
- raise ValueError("Unknown population file type: "+repr(file[1]) )
-
+ raise ValueError('Unknown population file type: ' + repr(file[1]))
+
self.population_node_ID_header_combo.clear()
- self.population_node_ID_header_combo.addItems(self.population_data.columns.to_list() )
+ self.population_node_ID_header_combo.addItems(
+ self.population_data.columns.to_list()
+ )
self.population_population_header_combo.clear()
- self.population_population_header_combo.addItems(self.population_data.columns.to_list() )
-
- if len(self.population_data.columns.to_list() ) >= 2:
+ self.population_population_header_combo.addItems(
+ self.population_data.columns.to_list()
+ )
+
+ if len(self.population_data.columns.to_list()) >= 2: # noqa: PLR2004
self.population_population_header_combo.setCurrentIndex(1)
-
- def loadPopulationData(self):
- node_id_header = self.population_node_ID_header_combo.currentText()
+
+ def loadPopulationData(self): # noqa: N802, D102
+ node_id_header = self.population_node_ID_header_combo.currentText()
population_header = self.population_population_header_combo.currentText()
-
+
if node_id_header == population_header:
- self.errorMSG("Error", "Node ID Header and Population Header cannot be the same")
+ self.errorMSG(
+ 'Error', 'Node ID Header and Population Header cannot be the same'
+ )
return
-
- if node_id_header == '' or population_header=='':
- self.errorMSG("Error", "Node ID Header or/and Population Header is not selected. Maybe an empty population file?")
+
+ if node_id_header == '' or population_header == '': # noqa: PLC1901
+ self.errorMSG(
+ 'Error',
+ 'Node ID Header or/and Population Header is not selected. Maybe an empty population file?',
+ )
return
-
- if self.project_result == None:
- self.errorMSG("Error", "No project and data is loaded. Please load the data first.")
+
+ if self.project_result == None: # noqa: E711
+ self.errorMSG(
+ 'Error', 'No project and data is loaded. Please load the data first.'
+ )
return
-
- self.project_result.loadPopulation(self.population_data, node_id_header, population_header)
-
-
- def errorMSG(self, error_title, error_msg, error_more_msg=None):
+
+ self.project_result.loadPopulation(
+ self.population_data, node_id_header, population_header
+ )
+
+ def errorMSG(self, error_title, error_msg, error_more_msg=None): # noqa: D102, N802, PLR6301
error_widget = QtWidgets.QMessageBox()
error_widget.setIcon(QtWidgets.QMessageBox.Critical)
error_widget.setText(error_msg)
error_widget.setWindowTitle(error_title)
error_widget.setStandardButtons(QtWidgets.QMessageBox.Ok)
- if error_more_msg!=None:
+ if error_more_msg != None: # noqa: E711
error_widget.setInformativeText(error_more_msg)
- error_widget.exec_()
\ No newline at end of file
+ error_widget.exec_()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Designer.py
index 650bb111f..01358e4fd 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Designer.py
@@ -1,13 +1,12 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 23:25:30 2022
+"""Created on Tue Nov 1 23:25:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from .Damage_Discovery_Designer import Damage_Discovery_Designer
-class Pipe_Damage_Discovery_Designer(Damage_Discovery_Designer):
+
+class Pipe_Damage_Discovery_Designer(Damage_Discovery_Designer): # noqa: D101
def __init__(self, pipe_damage_discovery_model):
super().__init__(pipe_damage_discovery_model)
- self._window.setWindowTitle("Pipe Damage Discovery")
\ No newline at end of file
+ self._window.setWindowTitle('Pipe Damage Discovery')
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Window.py
index 73d1a8fa7..af76147a7 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Discovery_Window.py
@@ -1,96 +1,113 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Pipe_Damage_Discovery_Window.ui'
+# Form implementation generated from reading ui file 'Pipe_Damage_Discovery_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
-from PyQt5 import QtCore, QtGui, QtWidgets
+from PyQt5 import QtCore, QtWidgets
-class Ui_pipe_damage_discovery(object):
- def setupUi(self, pipe_damage_discovery):
- pipe_damage_discovery.setObjectName("pipe_damage_discovery")
+class Ui_pipe_damage_discovery: # noqa: D101
+ def setupUi(self, pipe_damage_discovery): # noqa: N802, D102
+ pipe_damage_discovery.setObjectName('pipe_damage_discovery')
pipe_damage_discovery.resize(450, 400)
pipe_damage_discovery.setMinimumSize(QtCore.QSize(450, 400))
pipe_damage_discovery.setMaximumSize(QtCore.QSize(450, 400))
self.buttonBox = QtWidgets.QDialogButtonBox(pipe_damage_discovery)
self.buttonBox.setGeometry(QtCore.QRect(350, 20, 81, 61))
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.groupBox = QtWidgets.QGroupBox(pipe_damage_discovery)
self.groupBox.setGeometry(QtCore.QRect(19, 19, 311, 351))
- self.groupBox.setObjectName("groupBox")
+ self.groupBox.setObjectName('groupBox')
self.leak_amount_line = QtWidgets.QLineEdit(self.groupBox)
self.leak_amount_line.setGeometry(QtCore.QRect(80, 50, 51, 20))
- self.leak_amount_line.setObjectName("leak_amount_line")
+ self.leak_amount_line.setObjectName('leak_amount_line')
self.leak_anount_label = QtWidgets.QLabel(self.groupBox)
self.leak_anount_label.setGeometry(QtCore.QRect(10, 50, 71, 16))
- self.leak_anount_label.setObjectName("leak_anount_label")
+ self.leak_anount_label.setObjectName('leak_anount_label')
self.leak_time_line = QtWidgets.QLineEdit(self.groupBox)
self.leak_time_line.setGeometry(QtCore.QRect(210, 50, 81, 20))
- self.leak_time_line.setObjectName("leak_time_line")
+ self.leak_time_line.setObjectName('leak_time_line')
self.time_discovery_ratio_table = QtWidgets.QTableWidget(self.groupBox)
self.time_discovery_ratio_table.setGeometry(QtCore.QRect(10, 141, 211, 191))
- self.time_discovery_ratio_table.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
- self.time_discovery_ratio_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
+ self.time_discovery_ratio_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.ExtendedSelection
+ )
+ self.time_discovery_ratio_table.setSelectionBehavior(
+ QtWidgets.QAbstractItemView.SelectRows
+ )
self.time_discovery_ratio_table.setRowCount(0)
- self.time_discovery_ratio_table.setObjectName("time_discovery_ratio_table")
+ self.time_discovery_ratio_table.setObjectName('time_discovery_ratio_table')
self.time_discovery_ratio_table.setColumnCount(2)
item = QtWidgets.QTableWidgetItem()
self.time_discovery_ratio_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.time_discovery_ratio_table.setHorizontalHeaderItem(1, item)
- self.time_discovery_ratio_table.horizontalHeader().setStretchLastSection(True)
+ self.time_discovery_ratio_table.horizontalHeader().setStretchLastSection(
+ True
+ )
self.time_discovery_ratio_table.verticalHeader().setVisible(False)
self.time_discovery_ratio_table.verticalHeader().setStretchLastSection(False)
self.leak_time_label = QtWidgets.QLabel(self.groupBox)
self.leak_time_label.setGeometry(QtCore.QRect(150, 50, 61, 16))
- self.leak_time_label.setObjectName("leak_time_label")
+ self.leak_time_label.setObjectName('leak_time_label')
self.leak_based_radio = QtWidgets.QRadioButton(self.groupBox)
self.leak_based_radio.setGeometry(QtCore.QRect(10, 20, 111, 17))
- self.leak_based_radio.setObjectName("leak_based_radio")
+ self.leak_based_radio.setObjectName('leak_based_radio')
self.time_based_radio = QtWidgets.QRadioButton(self.groupBox)
self.time_based_radio.setGeometry(QtCore.QRect(10, 90, 111, 17))
- self.time_based_radio.setObjectName("time_based_radio")
+ self.time_based_radio.setObjectName('time_based_radio')
self.time_line = QtWidgets.QLineEdit(self.groupBox)
self.time_line.setGeometry(QtCore.QRect(10, 120, 101, 20))
- self.time_line.setObjectName("time_line")
+ self.time_line.setObjectName('time_line')
self.discovery_ratio_line = QtWidgets.QLineEdit(self.groupBox)
self.discovery_ratio_line.setGeometry(QtCore.QRect(110, 120, 111, 20))
- self.discovery_ratio_line.setObjectName("discovery_ratio_line")
+ self.discovery_ratio_line.setObjectName('discovery_ratio_line')
self.add_button = QtWidgets.QPushButton(self.groupBox)
self.add_button.setGeometry(QtCore.QRect(230, 120, 51, 23))
- self.add_button.setObjectName("add_button")
+ self.add_button.setObjectName('add_button')
self.remove_button = QtWidgets.QPushButton(self.groupBox)
self.remove_button.setGeometry(QtCore.QRect(230, 150, 51, 23))
- self.remove_button.setObjectName("remove_button")
+ self.remove_button.setObjectName('remove_button')
self.retranslateUi(pipe_damage_discovery)
self.buttonBox.rejected.connect(pipe_damage_discovery.reject)
QtCore.QMetaObject.connectSlotsByName(pipe_damage_discovery)
- def retranslateUi(self, pipe_damage_discovery):
+ def retranslateUi(self, pipe_damage_discovery): # noqa: N802, D102
_translate = QtCore.QCoreApplication.translate
- pipe_damage_discovery.setWindowTitle(_translate("pipe_damage_discovery", "Pipe Damaeg Discovery"))
- self.groupBox.setTitle(_translate("pipe_damage_discovery", "Leak Model"))
- self.leak_anount_label.setText(_translate("pipe_damage_discovery", "Leak Amount"))
+ pipe_damage_discovery.setWindowTitle(
+ _translate('pipe_damage_discovery', 'Pipe Damaeg Discovery')
+ )
+ self.groupBox.setTitle(_translate('pipe_damage_discovery', 'Leak Model'))
+ self.leak_anount_label.setText(
+ _translate('pipe_damage_discovery', 'Leak Amount')
+ )
item = self.time_discovery_ratio_table.horizontalHeaderItem(0)
- item.setText(_translate("pipe_damage_discovery", "Time"))
+ item.setText(_translate('pipe_damage_discovery', 'Time'))
item = self.time_discovery_ratio_table.horizontalHeaderItem(1)
- item.setText(_translate("pipe_damage_discovery", "Discovery Ratio"))
- self.leak_time_label.setText(_translate("pipe_damage_discovery", "leak time"))
- self.leak_based_radio.setText(_translate("pipe_damage_discovery", "Leak Based"))
- self.time_based_radio.setText(_translate("pipe_damage_discovery", "Time Based"))
- self.add_button.setText(_translate("pipe_damage_discovery", "add"))
- self.remove_button.setText(_translate("pipe_damage_discovery", "Remove"))
+ item.setText(_translate('pipe_damage_discovery', 'Discovery Ratio'))
+ self.leak_time_label.setText(
+ _translate('pipe_damage_discovery', 'leak time')
+ )
+ self.leak_based_radio.setText(
+ _translate('pipe_damage_discovery', 'Leak Based')
+ )
+ self.time_based_radio.setText(
+ _translate('pipe_damage_discovery', 'Time Based')
+ )
+ self.add_button.setText(_translate('pipe_damage_discovery', 'add'))
+ self.remove_button.setText(_translate('pipe_damage_discovery', 'Remove'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
pipe_damage_discovery = QtWidgets.QDialog()
ui = Ui_pipe_damage_discovery()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Designer.py
index d7b9d748c..78f1e27ed 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Designer.py
@@ -1,72 +1,104 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 18:32:32 2022
+"""Created on Tue Nov 1 18:32:32 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from PyQt5 import QtGui, QtWidgets
+
from .Pipe_Damage_Model_Window import Ui_Pipe_Damage_Model
-class Pipe_Damage_Model_Designer(Ui_Pipe_Damage_Model):
+class Pipe_Damage_Model_Designer(Ui_Pipe_Damage_Model): # noqa: D101
def __init__(self, pipe_damage_model):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
self.pipe_damage_model = pipe_damage_model
self.material_list.addItems(pipe_damage_model.keys())
- self.alpha_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.beta_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.gamma_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.a_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
- self.b_line.setValidator(QtGui.QDoubleValidator(-1000000, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
-
+ self.alpha_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.beta_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.gamma_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.a_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+ self.b_line.setValidator(
+ QtGui.QDoubleValidator(
+ -1000000,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+
self.buttonBox.accepted.connect(self.okButtonPressed)
-
+
self.material_list.currentItemChanged.connect(self.materialChanged)
-
- def materialChanged(self, current_item, previous_item):
- if previous_item != None:
+
+ def materialChanged(self, current_item, previous_item): # noqa: N802, D102
+ if previous_item != None: # noqa: E711
previous_material = previous_item.text()
-
+
alpha = self.alpha_line.text()
- beta = self.beta_line.text()
+ beta = self.beta_line.text()
gamma = self.gamma_line.text()
- a = self.a_line.text()
- b = self.b_line.text()
+ a = self.a_line.text()
+ b = self.b_line.text()
self.pipe_damage_model[previous_material]['alpha'] = float(alpha)
self.pipe_damage_model[previous_material]['beta'] = float(beta)
self.pipe_damage_model[previous_material]['gamma'] = float(gamma)
self.pipe_damage_model[previous_material]['a'] = float(a)
self.pipe_damage_model[previous_material]['b'] = float(b)
-
- current_material = current_item.text()
+
+ current_material = current_item.text()
alpha = self.pipe_damage_model[current_material]['alpha']
- beta = self.pipe_damage_model[current_material]['beta']
+ beta = self.pipe_damage_model[current_material]['beta']
gamma = self.pipe_damage_model[current_material]['gamma']
- a = self.pipe_damage_model[current_material]['a']
- b = self.pipe_damage_model[current_material]['b']
-
- self.alpha_line.setText(str(alpha) )
- self.beta_line.setText(str(beta) )
- self.gamma_line.setText(str(gamma) )
- self.a_line.setText(str(a) )
- self.b_line.setText(str(b) )
-
- def okButtonPressed(self):
- current_material = self.material_list.selectedItems()[0].text()
-
+ a = self.pipe_damage_model[current_material]['a']
+ b = self.pipe_damage_model[current_material]['b']
+
+ self.alpha_line.setText(str(alpha))
+ self.beta_line.setText(str(beta))
+ self.gamma_line.setText(str(gamma))
+ self.a_line.setText(str(a))
+ self.b_line.setText(str(b))
+
+ def okButtonPressed(self): # noqa: N802, D102
+ current_material = self.material_list.selectedItems()[0].text()
+
alpha = self.alpha_line.text()
- beta = self.beta_line.text()
+ beta = self.beta_line.text()
gamma = self.gamma_line.text()
- a = self.a_line.text()
- b = self.b_line.text()
-
+ a = self.a_line.text()
+ b = self.b_line.text()
+
self.pipe_damage_model[current_material]['alpha'] = float(alpha)
self.pipe_damage_model[current_material]['beta'] = float(beta)
self.pipe_damage_model[current_material]['gamma'] = float(gamma)
self.pipe_damage_model[current_material]['a'] = float(a)
self.pipe_damage_model[current_material]['b'] = float(b)
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Window.py
index 7286553cf..22c7d3772 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Pipe_Damage_Model_Window.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Pipe_Damage_Model_Window.ui'
+# Form implementation generated from reading ui file 'Pipe_Damage_Model_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
@@ -10,103 +8,114 @@
from PyQt5 import QtCore, QtGui, QtWidgets
-class Ui_Pipe_Damage_Model(object):
- def setupUi(self, Pipe_Damage_Model):
- Pipe_Damage_Model.setObjectName("Pipe_Damage_Model")
+class Ui_Pipe_Damage_Model: # noqa: D101
+ def setupUi(self, Pipe_Damage_Model): # noqa: N802, N803, D102
+ Pipe_Damage_Model.setObjectName('Pipe_Damage_Model')
Pipe_Damage_Model.resize(377, 372)
self.buttonBox = QtWidgets.QDialogButtonBox(Pipe_Damage_Model)
self.buttonBox.setGeometry(QtCore.QRect(260, 50, 81, 91))
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.material_list = QtWidgets.QListWidget(Pipe_Damage_Model)
self.material_list.setGeometry(QtCore.QRect(10, 50, 231, 192))
- self.material_list.setObjectName("material_list")
+ self.material_list.setObjectName('material_list')
self.label = QtWidgets.QLabel(Pipe_Damage_Model)
self.label.setGeometry(QtCore.QRect(10, 30, 101, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.alpha_line = QtWidgets.QLineEdit(Pipe_Damage_Model)
self.alpha_line.setGeometry(QtCore.QRect(60, 300, 50, 20))
self.alpha_line.setLayoutDirection(QtCore.Qt.RightToLeft)
- self.alpha_line.setText("")
- self.alpha_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.alpha_line.setObjectName("alpha_line")
+ self.alpha_line.setText('')
+ self.alpha_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.alpha_line.setObjectName('alpha_line')
self.label_2 = QtWidgets.QLabel(Pipe_Damage_Model)
self.label_2.setGeometry(QtCore.QRect(115, 302, 16, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_2.setFont(font)
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.a_line = QtWidgets.QLineEdit(Pipe_Damage_Model)
self.a_line.setGeometry(QtCore.QRect(125, 280, 41, 20))
- self.a_line.setText("")
- self.a_line.setObjectName("a_line")
+ self.a_line.setText('')
+ self.a_line.setObjectName('a_line')
self.label_3 = QtWidgets.QLabel(Pipe_Damage_Model)
self.label_3.setGeometry(QtCore.QRect(165, 300, 16, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.label_4 = QtWidgets.QLabel(Pipe_Damage_Model)
self.label_4.setGeometry(QtCore.QRect(285, 300, 16, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
- self.label_4.setObjectName("label_4")
+ self.label_4.setObjectName('label_4')
self.beta_line = QtWidgets.QLineEdit(Pipe_Damage_Model)
self.beta_line.setGeometry(QtCore.QRect(180, 300, 50, 20))
- self.beta_line.setText("")
- self.beta_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.beta_line.setObjectName("beta_line")
+ self.beta_line.setText('')
+ self.beta_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.beta_line.setObjectName('beta_line')
self.label_5 = QtWidgets.QLabel(Pipe_Damage_Model)
self.label_5.setGeometry(QtCore.QRect(235, 300, 16, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_5.setFont(font)
- self.label_5.setObjectName("label_5")
+ self.label_5.setObjectName('label_5')
self.b_line = QtWidgets.QLineEdit(Pipe_Damage_Model)
self.b_line.setGeometry(QtCore.QRect(245, 280, 41, 20))
- self.b_line.setText("")
- self.b_line.setObjectName("b_line")
+ self.b_line.setText('')
+ self.b_line.setObjectName('b_line')
self.gamma_line = QtWidgets.QLineEdit(Pipe_Damage_Model)
self.gamma_line.setGeometry(QtCore.QRect(300, 300, 50, 20))
- self.gamma_line.setText("")
- self.gamma_line.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
- self.gamma_line.setObjectName("gamma_line")
+ self.gamma_line.setText('')
+ self.gamma_line.setAlignment(
+ QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
+ )
+ self.gamma_line.setObjectName('gamma_line')
self.label_6 = QtWidgets.QLabel(Pipe_Damage_Model)
self.label_6.setGeometry(QtCore.QRect(10, 300, 51, 20))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_6.setFont(font)
- self.label_6.setObjectName("label_6")
+ self.label_6.setObjectName('label_6')
self.retranslateUi(Pipe_Damage_Model)
self.buttonBox.accepted.connect(Pipe_Damage_Model.accept)
self.buttonBox.rejected.connect(Pipe_Damage_Model.reject)
QtCore.QMetaObject.connectSlotsByName(Pipe_Damage_Model)
- def retranslateUi(self, Pipe_Damage_Model):
+ def retranslateUi(self, Pipe_Damage_Model): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Pipe_Damage_Model.setWindowTitle(_translate("Pipe_Damage_Model", "Pipe Damage Model"))
- self.label.setText(_translate("Pipe_Damage_Model", "Pipe Material"))
- self.label_2.setText(_translate("Pipe_Damage_Model", "D"))
- self.label_3.setText(_translate("Pipe_Damage_Model", "+"))
- self.label_4.setText(_translate("Pipe_Damage_Model", "+"))
- self.label_5.setText(_translate("Pipe_Damage_Model", "D"))
- self.label_6.setText(_translate("Pipe_Damage_Model", "opening="))
+ Pipe_Damage_Model.setWindowTitle(
+ _translate('Pipe_Damage_Model', 'Pipe Damage Model')
+ )
+ self.label.setText(_translate('Pipe_Damage_Model', 'Pipe Material'))
+ self.label_2.setText(_translate('Pipe_Damage_Model', 'D'))
+ self.label_3.setText(_translate('Pipe_Damage_Model', '+'))
+ self.label_4.setText(_translate('Pipe_Damage_Model', '+'))
+ self.label_5.setText(_translate('Pipe_Damage_Model', 'D'))
+ self.label_6.setText(_translate('Pipe_Damage_Model', 'opening='))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Pipe_Damage_Model = QtWidgets.QDialog()
ui = Ui_Pipe_Damage_Model()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Pump_Damage_Discovery_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Pump_Damage_Discovery_Designer.py
index 3f2d4051a..465c06bf0 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Pump_Damage_Discovery_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Pump_Damage_Discovery_Designer.py
@@ -1,14 +1,13 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 23:25:30 2022
+"""Created on Tue Nov 1 23:25:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from .Damage_Discovery_Designer import Damage_Discovery_Designer
-class Pump_Damage_Discovery_Designer(Damage_Discovery_Designer):
+
+class Pump_Damage_Discovery_Designer(Damage_Discovery_Designer): # noqa: D101
def __init__(self, pump_damage_discovery_model):
super().__init__(pump_damage_discovery_model)
- self._window.setWindowTitle("Pump Damage Discovery")
- self.leak_based_radio.setEnabled(False)
\ No newline at end of file
+ self._window.setWindowTitle('Pump Damage Discovery')
+ self.leak_based_radio.setEnabled(False)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/REWET_Resource_rc.py b/modules/systemPerformance/REWET/REWET/GUI/REWET_Resource_rc.py
index 3c1152357..9beb68f24 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/REWET_Resource_rc.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/REWET_Resource_rc.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Resource object code
+# Resource object code # noqa: CPY001, D100, N999
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.12.9)
#
@@ -8,7 +6,7 @@
from PyQt5 import QtCore
-qt_resource_data = b"\
+qt_resource_data = b'\
\x00\x00\x02\xd7\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
@@ -6838,9 +6836,9 @@
\x75\x63\x93\xd8\xba\xb1\x49\x6c\xbd\x70\x9c\x77\x69\xcd\x78\xc7\
\x7f\xf6\xa6\x43\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
-"
+'
-qt_resource_name = b"\
+qt_resource_name = b'\
\x00\x09\
\x0a\x6c\x78\x43\
\x00\x72\
@@ -6891,9 +6889,9 @@
\x0b\x3b\xfc\x07\
\x00\x6e\
\x00\x6f\x00\x64\x00\x65\x00\x5f\x00\x42\x00\x2e\x00\x70\x00\x6e\x00\x67\
-"
+'
-qt_resource_struct_v1 = b"\
+qt_resource_struct_v1 = b'\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0b\x00\x00\x00\x03\
@@ -6908,9 +6906,9 @@
\x00\x00\x01\x0c\x00\x00\x00\x00\x00\x01\x00\x01\x41\x84\
\x00\x00\x00\x18\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x05\x96\
-"
+'
-qt_resource_struct_v2 = b"\
+qt_resource_struct_v2 = b'\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
@@ -6939,7 +6937,7 @@
\x00\x00\x01\x84\x35\xa0\xb2\x82\
\x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x05\x96\
\x00\x00\x01\x84\x35\xae\x2b\x19\
-"
+'
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
@@ -6949,10 +6947,17 @@
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
-def qInitResources():
- QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
-def qCleanupResources():
- QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
+def qInitResources(): # noqa: N802, D103
+ QtCore.qRegisterResourceData(
+ rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
+ )
+
+
+def qCleanupResources(): # noqa: N802, D103
+ QtCore.qUnregisterResourceData(
+ rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
+ )
+
qInitResources()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Restoration_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Restoration_Tab_Designer.py
index ad2bee1db..b46fce1dd 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Restoration_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Restoration_Tab_Designer.py
@@ -1,164 +1,217 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Nov 2 00:24:43 2022
+"""Created on Wed Nov 2 00:24:43 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
import os
+
from PyQt5 import QtGui, QtWidgets
-from .Pipe_Damage_Discovery_Designer import Pipe_Damage_Discovery_Designer
+
from .Node_Damage_Discovery_Designer import Node_Damage_Discovery_Designer
+from .Pipe_Damage_Discovery_Designer import Pipe_Damage_Discovery_Designer
from .Pump_Damage_Discovery_Designer import Pump_Damage_Discovery_Designer
from .Tank_Damage_Discovery_Designer import Tank_Damage_Discovery_Designer
-class Restoration_Tab_Designer():
+class Restoration_Tab_Designer: # noqa: D101
def __init__(self):
-
- """
- These are variables that are shared between ui and settings.
- """
+ """These are variables that are shared between ui and settings.""" # noqa: D401
self.setRestorationSettings(self.settings)
-
+
"""
Reassignment of shared variables.
"""
- self.current_policy_directory = os.getcwd()
-
+ self.current_policy_directory = os.getcwd() # noqa: PTH109
+
"""
- ui value assigments.
+ ui value assignments.
"""
self.setRestorationUI()
-
-
+
"""
Field Validators.
"""
- self.minimum_job_time_line.setValidator(QtGui.QIntValidator(0.0, 2147483647) )
-
+ self.minimum_job_time_line.setValidator(QtGui.QIntValidator(0.0, 2147483647))
+
"""
Signals connection.
"""
self.policy_browse_button.clicked.connect(self.browsePolicyDefinitionFile)
- self.pipe_damage_discovery_button.clicked.connect(self.pipeDamageDiscoveryByButton)
- self.node_damage_discovery_button.clicked.connect(self.nodeDamageDiscoveryByButton)
- self.pump_damage_discovery_button.clicked.connect(self.pumpDamageDiscoveryByButton)
- self.tank_damage_discovery_button.clicked.connect(self.tankDamageDiscoveryByButton)
-
- def getRestorationSettings(self):
-
+ self.pipe_damage_discovery_button.clicked.connect(
+ self.pipeDamageDiscoveryByButton
+ )
+ self.node_damage_discovery_button.clicked.connect(
+ self.nodeDamageDiscoveryByButton
+ )
+ self.pump_damage_discovery_button.clicked.connect(
+ self.pumpDamageDiscoveryByButton
+ )
+ self.tank_damage_discovery_button.clicked.connect(
+ self.tankDamageDiscoveryByButton
+ )
+
+ def getRestorationSettings(self): # noqa: N802, D102
if self.restoration_on_radio.isChecked():
self.restoration_on = True
elif self.restoration_off_radio.isChecked():
self.restoration_on = False
else:
- raise ValueError("None of Restoration-on/off buttons are checked which is an error.")
-
+ raise ValueError( # noqa: TRY003
+ 'None of Restoration-on/off buttons are checked which is an error.' # noqa: EM101
+ )
+
if self.script_txt_radio.isChecked():
self.restoraion_policy_type = 'script'
elif self.script_rrp_radio.isChecked():
self.restoraion_policy_type = 'binary'
else:
- raise ValueError("None of File-Type buttons are checked which is an error.")
-
- self.minimum_job_time = int(float(self.minimum_job_time_line.text() ))
-
- if self.restoraion_policy_addr == '':
- self.errorMSG("REWET", "Policy Definition file is needed.")
+ raise ValueError( # noqa: TRY003
+ 'None of File-Type buttons are checked which is an error.' # noqa: EM101
+ )
+
+ self.minimum_job_time = int(float(self.minimum_job_time_line.text()))
+
+ if self.restoraion_policy_addr == '': # noqa: PLC1901
+ self.errorMSG('REWET', 'Policy Definition file is needed.')
return False
-
- self.settings.process['Restoration_on' ] = self.restoration_on
- self.settings.scenario['Restoraion_policy_type' ] = self.restoraion_policy_type
- self.settings.scenario['Restortion_config_file' ] = self.restoraion_policy_addr
- self.settings.process['minimum_job_time' ] = self.minimum_job_time
- self.settings.scenario['pipe_damage_discovery_model'] = self.pipe_damage_discovery_model
- self.settings.scenario['node_damage_discovery_model'] = self.node_damage_discovery_model
- self.settings.scenario['pump_damage_discovery_model'] = self.pump_damage_discovery_model
- self.settings.scenario['tank_damage_discovery_model'] = self.tank_damage_discovery_model
- self.settings.scenario['crew_out_of_zone_travel' ] = self.out_of_zone_allowed
-
+
+ self.settings.process['Restoration_on'] = self.restoration_on
+ self.settings.scenario['Restoraion_policy_type'] = (
+ self.restoraion_policy_type
+ )
+ self.settings.scenario['Restortion_config_file'] = (
+ self.restoraion_policy_addr
+ )
+ self.settings.process['minimum_job_time'] = self.minimum_job_time
+ self.settings.scenario['pipe_damage_discovery_model'] = (
+ self.pipe_damage_discovery_model
+ )
+ self.settings.scenario['node_damage_discovery_model'] = (
+ self.node_damage_discovery_model
+ )
+ self.settings.scenario['pump_damage_discovery_model'] = (
+ self.pump_damage_discovery_model
+ )
+ self.settings.scenario['tank_damage_discovery_model'] = (
+ self.tank_damage_discovery_model
+ )
+ self.settings.scenario['crew_out_of_zone_travel'] = self.out_of_zone_allowed
+
return True
- def setRestorationUI(self):
- if self.restoration_on == True:
+
+ def setRestorationUI(self): # noqa: N802, D102
+ if self.restoration_on == True: # noqa: E712
self.restoration_on_radio.setChecked(True)
- elif self.restoration_on == False:
+ elif self.restoration_on == False: # noqa: E712
self.restoration_off_radio.setChecked(True)
else:
- raise ValueError("Unknown restoration-on status: " + repr(self.restoration_on) )
-
+ raise ValueError(
+ 'Unknown restoration-on status: ' + repr(self.restoration_on)
+ )
+
self.script_rrp_radio.setEnabled(False)
self.policy_designer.setEnabled(False)
self.policy_definition_addr_line.setText(self.restoraion_policy_addr)
-
+
if self.restoraion_policy_type == 'script':
self.script_txt_radio.setChecked(True)
elif self.restoraion_policy_type == 'binary':
self.script_rrp_radio.setChecked(True)
else:
- raise ValueError("Uknown policy type: "+ repr(self.restoraion_policy_type) )
-
- self.minimum_job_time_line.setText(str(self.minimum_job_time) )
-
- if self.out_of_zone_allowed == True:
+ raise ValueError(
+ 'Uknown policy type: ' + repr(self.restoraion_policy_type)
+ )
+
+ self.minimum_job_time_line.setText(str(self.minimum_job_time))
+
+ if self.out_of_zone_allowed == True: # noqa: E712
self.out_of_zone_travel_yes.setChecked(True)
- elif self.out_of_zone_allowed== False:
+ elif self.out_of_zone_allowed == False: # noqa: E712
self.out_of_zone_travel_no.setChecked(True)
else:
- raise ValueError("Unknown out-of-zone travel value: "+repr(self.out_of_zone_travel_no) )
-
- def setRestorationSettings(self, settings):
- self.restoration_on = settings.process['Restoration_on' ]
- self.restoraion_policy_type = settings.scenario['Restoraion_policy_type' ]
- self.restoraion_policy_addr = settings.scenario['Restortion_config_file' ]
- self.minimum_job_time = settings.process['minimum_job_time' ]
- self.pipe_damage_discovery_model = settings.scenario['pipe_damage_discovery_model']
- self.node_damage_discovery_model = settings.scenario['node_damage_discovery_model']
- self.pump_damage_discovery_model = settings.scenario['pump_damage_discovery_model']
- self.tank_damage_discovery_model = settings.scenario['tank_damage_discovery_model']
- self.out_of_zone_allowed = settings.scenario['crew_out_of_zone_travel' ]
-
- def browsePolicyDefinitionFile(self):
- if self.script_txt_radio.isChecked():
- file_type = "scenrario text file (*.txt)"
+ raise ValueError(
+ 'Unknown out-of-zone travel value: '
+ + repr(self.out_of_zone_travel_no)
+ )
+
+ def setRestorationSettings(self, settings): # noqa: N802, D102
+ self.restoration_on = settings.process['Restoration_on']
+ self.restoraion_policy_type = settings.scenario['Restoraion_policy_type']
+ self.restoraion_policy_addr = settings.scenario['Restortion_config_file']
+ self.minimum_job_time = settings.process['minimum_job_time']
+ self.pipe_damage_discovery_model = settings.scenario[
+ 'pipe_damage_discovery_model'
+ ]
+ self.node_damage_discovery_model = settings.scenario[
+ 'node_damage_discovery_model'
+ ]
+ self.pump_damage_discovery_model = settings.scenario[
+ 'pump_damage_discovery_model'
+ ]
+ self.tank_damage_discovery_model = settings.scenario[
+ 'tank_damage_discovery_model'
+ ]
+ self.out_of_zone_allowed = settings.scenario['crew_out_of_zone_travel']
+
+ def browsePolicyDefinitionFile(self): # noqa: N802, D102
+ if self.script_txt_radio.isChecked():
+ file_type = 'scenrario text file (*.txt)'
elif self.script_rrp_radio.isChecked():
- file_type = "scenrario binary (*.rrp)"
-
- file = QtWidgets.QFileDialog.getOpenFileName(self.asli_MainWindow, 'Open file',
- self.current_policy_directory, file_type)
- if file[0] == '':
+ file_type = 'scenrario binary (*.rrp)'
+
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self.asli_MainWindow,
+ 'Open file',
+ self.current_policy_directory,
+ file_type,
+ )
+ if file[0] == '': # noqa: PLC1901
return
split_addr = os.path.split(file[0])
self.current_policy_directory = split_addr[0]
- self.restoraion_policy_addr = file[0]
+ self.restoraion_policy_addr = file[0]
self.policy_definition_addr_line.setText(file[0])
-
- def pipeDamageDiscoveryByButton(self):
- pipe_damage_discovery_designer = Pipe_Damage_Discovery_Designer(self.pipe_damage_discovery_model)
- return_value = pipe_damage_discovery_designer._window.exec_()
-
+
+ def pipeDamageDiscoveryByButton(self): # noqa: N802, D102
+ pipe_damage_discovery_designer = Pipe_Damage_Discovery_Designer(
+ self.pipe_damage_discovery_model
+ )
+ return_value = pipe_damage_discovery_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
- self.pipe_damage_discovery_model = pipe_damage_discovery_designer.damage_discovery_model
-
- def nodeDamageDiscoveryByButton(self):
- node_damage_discovery_designer = Node_Damage_Discovery_Designer(self.node_damage_discovery_model)
- return_value = node_damage_discovery_designer._window.exec_()
-
+ self.pipe_damage_discovery_model = (
+ pipe_damage_discovery_designer.damage_discovery_model
+ )
+
+ def nodeDamageDiscoveryByButton(self): # noqa: N802, D102
+ node_damage_discovery_designer = Node_Damage_Discovery_Designer(
+ self.node_damage_discovery_model
+ )
+ return_value = node_damage_discovery_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
- self.node_damage_discovery_model = node_damage_discovery_designer.damage_discovery_model
-
- def pumpDamageDiscoveryByButton(self):
- pump_damage_discovery_designer = Pump_Damage_Discovery_Designer(self.pump_damage_discovery_model)
- return_value = pump_damage_discovery_designer._window.exec_()
-
+ self.node_damage_discovery_model = (
+ node_damage_discovery_designer.damage_discovery_model
+ )
+
+ def pumpDamageDiscoveryByButton(self): # noqa: N802, D102
+ pump_damage_discovery_designer = Pump_Damage_Discovery_Designer(
+ self.pump_damage_discovery_model
+ )
+ return_value = pump_damage_discovery_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
- self.pump_damage_discovery_model = pump_damage_discovery_designer.damage_discovery_model
-
- def tankDamageDiscoveryByButton(self):
- tank_damage_discovery_designer = Tank_Damage_Discovery_Designer(self.tank_damage_discovery_model)
- return_value = tank_damage_discovery_designer._window.exec_()
-
+ self.pump_damage_discovery_model = (
+ pump_damage_discovery_designer.damage_discovery_model
+ )
+
+ def tankDamageDiscoveryByButton(self): # noqa: N802, D102
+ tank_damage_discovery_designer = Tank_Damage_Discovery_Designer(
+ self.tank_damage_discovery_model
+ )
+ return_value = tank_damage_discovery_designer._window.exec_() # noqa: SLF001
+
if return_value == 1:
- self.tank_damage_discovery_model = tank_damage_discovery_designer.damage_discovery_model
-
-
\ No newline at end of file
+ self.tank_damage_discovery_model = (
+ tank_damage_discovery_designer.damage_discovery_model
+ )
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Result_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Result_Designer.py
index 3161756f7..f3dc7b10f 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Result_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Result_Designer.py
@@ -1,32 +1,87 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 18:29:50 2022
+"""Created on Thu Nov 10 18:29:50 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-from PyQt5 import QtWidgets, QtGui
import pandas as pd
+from PyQt5 import QtGui, QtWidgets
single_scenario_curve_options = ['', 'Quantity', 'Delivery', 'SSI']
-multi_scenario_curve_options = ['','Quantity Exceedance','Delivery Exceedance']
-curve_settings = {'Quantity Exceedance':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"Population", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"Percentage", "Type":"Custom_Combo", "Default":"Yes", "Content":["Yes", "No"]}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"0.75"}, {"Label":"Group method", "Type":"Custom_Combo", "Default":"Mean", "Content":["Mean", "Min", "Max"]}, {"Label":"Daily bin", "Type":"Yes-No_Combo", "Default":"No"},{"Label":"Min time", "Type":"Int Line", "Default":"0"}, {"Label":"Max time", "Type":"Int Line", "Default":"9999999999"}],
- 'Delivery Exceedance':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"Population", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"Percentage", "Type":"Custom_Combo", "Default":"Yes", "Content":["Yes", "No"]}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"0.75"}, {"Label":"Group method", "Type":"Custom_Combo", "Default":"Mean", "Content":["Mean", "Min", "Max"]}, {"Label":"Daily bin", "Type":"Yes-No_Combo", "Default":"No"},{"Label":"Min time", "Type":"Int Line", "Default":"0"}, {"Label":"Max time", "Type":"Int Line", "Default":"9999999999"}],
- 'Quantity':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"Population", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"Percentage", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"0.75"}],
- 'Delivery':[{"Label":"Time", "Type":"Time", "Default":"seconds"}, {"Label":"Population", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"Percentage", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"LDN leak", "Type":"Yes-No_Combo", "Default":"No"}, {"Label":"leak Criteria", "Type":"Float Line", "Default":"1.25"}],
- 'SSI':[{"Label":"Time", "Type":"Time","Default":"seconds"}, {"Label":"Population", "Type":"Yes-No_Combo", "Default":"No"}]}
-
-class Time_Unit_Combo(QtWidgets.QComboBox):
+multi_scenario_curve_options = ['', 'Quantity Exceedance', 'Delivery Exceedance']
+curve_settings = {
+ 'Quantity Exceedance': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'Population', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {
+ 'Label': 'Percentage',
+ 'Type': 'Custom_Combo',
+ 'Default': 'Yes',
+ 'Content': ['Yes', 'No'],
+ },
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '0.75'},
+ {
+ 'Label': 'Group method',
+ 'Type': 'Custom_Combo',
+ 'Default': 'Mean',
+ 'Content': ['Mean', 'Min', 'Max'],
+ },
+ {'Label': 'Daily bin', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'Min time', 'Type': 'Int Line', 'Default': '0'},
+ {'Label': 'Max time', 'Type': 'Int Line', 'Default': '9999999999'},
+ ],
+ 'Delivery Exceedance': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'Population', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {
+ 'Label': 'Percentage',
+ 'Type': 'Custom_Combo',
+ 'Default': 'Yes',
+ 'Content': ['Yes', 'No'],
+ },
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '0.75'},
+ {
+ 'Label': 'Group method',
+ 'Type': 'Custom_Combo',
+ 'Default': 'Mean',
+ 'Content': ['Mean', 'Min', 'Max'],
+ },
+ {'Label': 'Daily bin', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'Min time', 'Type': 'Int Line', 'Default': '0'},
+ {'Label': 'Max time', 'Type': 'Int Line', 'Default': '9999999999'},
+ ],
+ 'Quantity': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'Population', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'Percentage', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '0.75'},
+ ],
+ 'Delivery': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'Population', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'Percentage', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'LDN leak', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ {'Label': 'leak Criteria', 'Type': 'Float Line', 'Default': '1.25'},
+ ],
+ 'SSI': [
+ {'Label': 'Time', 'Type': 'Time', 'Default': 'seconds'},
+ {'Label': 'Population', 'Type': 'Yes-No_Combo', 'Default': 'No'},
+ ],
+}
+
+
+class Time_Unit_Combo(QtWidgets.QComboBox): # noqa: D101
def __init__(self):
super().__init__()
- time_units = ["second", "hour", "day"]
+ time_units = ['second', 'hour', 'day']
self.addItems(time_units)
-
- def changeCurveTimeUnit(self, raw_time_curve):
-
+
+ def changeCurveTimeUnit(self, raw_time_curve): # noqa: N802, D102
res = {}
- if type(raw_time_curve) == pd.core.series.Series:
+ if type(raw_time_curve) == pd.core.series.Series: # noqa: E721
time_justified_curve = raw_time_curve.copy()
res = self.applyUnitToSeries(time_justified_curve)
else:
@@ -34,348 +89,414 @@ def changeCurveTimeUnit(self, raw_time_curve):
time_justified_curve = raw_time_curve[k].copy()
res[k] = self.applyUnitToSeries(time_justified_curve)
return res
-
- def applyUnitToSeries(self, data):
+
+ def applyUnitToSeries(self, data): # noqa: N802, D102
time_unit = self.currentText()
- if time_unit == "second":
- pass
- elif time_unit == "hour":
- data.index = data.index/3600
- elif time_unit == "day":
- data.index = data.index/3600/24
+ if time_unit == 'second':
+ pass
+ elif time_unit == 'hour':
+ data.index = data.index / 3600 # noqa: PLR6104
+ elif time_unit == 'day':
+ data.index = data.index / 3600 / 24
else:
- raise ValueError("Unknown unit time: "+repr(time_unit) )
+ raise ValueError('Unknown unit time: ' + repr(time_unit))
return data
-class Yes_No_Combo(QtWidgets.QComboBox):
+
+class Yes_No_Combo(QtWidgets.QComboBox): # noqa: D101
def __init__(self):
super().__init__()
- self.addItems(["No", "Yes"])
+ self.addItems(['No', 'Yes'])
-class Result_Designer():
- def __init__(self):
+class Result_Designer: # noqa: D101
+ def __init__(self):
self.current_raw_curve = None
- self.current_curve = None
+ self.current_curve = None
self.curve_settings_widgets = {}
self.main_tab.currentChanged.connect(self.tabChanged)
- self.all_scenarios_checkbox.stateChanged.connect(self.curveAllScenarioCheckboxChanged)
+ self.all_scenarios_checkbox.stateChanged.connect(
+ self.curveAllScenarioCheckboxChanged
+ )
self.save_curve_button.clicked.connect(self.saveCurrentCurveByButton)
self.scenario_combo.currentTextChanged.connect(self.resultScenarioChanged)
self.curve_type_combo.currentTextChanged.connect(self.curveTypeChanegd)
-
+
self.initalize_result()
-
- def initalize_result(self):
+
+ def initalize_result(self): # noqa: D102
self.setCurveAllScenarios(True)
self.all_scenarios_checkbox.setChecked(True)
self.scenario_combo.clear()
self.scenario_combo.addItems(self.result_scenarios)
- #self.current_curve_data = None
-
- def curveAllScenarioCheckboxChanged(self, state):
+ # self.current_curve_data = None
+
+ def curveAllScenarioCheckboxChanged(self, state): # noqa: N802, D102
if state == 0:
self.setCurveAllScenarios(False)
- elif state == 2:
+ elif state == 2: # noqa: PLR2004
self.setCurveAllScenarios(True)
-
- def clearCurvePlot(self):
+
+ def clearCurvePlot(self): # noqa: N802, D102
self.mpl_curve.canvas.ax.cla()
-
- def plot_data(self):
- x=range(0, 10)
- y=range(0, 20, 2)
+
+ def plot_data(self): # noqa: D102
+ x = range(10)
+ y = range(0, 20, 2)
self.mpl_curve.canvas.ax.plot(x, y)
self.mpl_curve.canvas.draw()
- #self.mpl_curve.canvas.ax.set_ylabel("y_label")
- #self.mpl_curve.canvas.ax.set_xlabel("x_label")
- #self.mpl_curve.canvas.fig.tight_layout()
-
- def plotCurve(self, y_label=None, x_label=None):
- if y_label == None:
+ # self.mpl_curve.canvas.ax.set_ylabel("y_label")
+ # self.mpl_curve.canvas.ax.set_xlabel("x_label")
+ # self.mpl_curve.canvas.fig.tight_layout()
+
+ def plotCurve(self, y_label=None, x_label=None): # noqa: N802, D102
+ if y_label == None: # noqa: E711
y_label = self.mpl_curve.canvas.ax.get_ylabel()
- if x_label == None:
+ if x_label == None: # noqa: E711
x_label = self.mpl_curve.canvas.ax.get_xlabel()
-
+
self.mpl_curve.canvas.ax.clear()
data = self.current_curve
-
- if type(data) == pd.core.series.Series:
- self.mpl_curve.canvas.ax.plot(self.current_curve.index, self.current_curve.to_list())
+
+ if type(data) == pd.core.series.Series: # noqa: E721
+ self.mpl_curve.canvas.ax.plot(
+ self.current_curve.index, self.current_curve.to_list()
+ )
else:
for k in data:
self.mpl_curve.canvas.ax.plot(data[k].index, data[k].to_list())
-
+
self.mpl_curve.canvas.ax.set_ylabel(y_label)
self.mpl_curve.canvas.ax.set_xlabel(x_label)
- self.mpl_curve.canvas.draw()
+ self.mpl_curve.canvas.draw()
self.mpl_curve.canvas.fig.tight_layout()
-
- def setCurveAllScenarios(self, flag):
- if flag == True:
+
+ def setCurveAllScenarios(self, flag): # noqa: N802, D102
+ if flag == True: # noqa: E712
self.all_scenarios_checkbox.setChecked(True)
self.scenario_combo.setEnabled(False)
self.curve_type_combo.clear()
self.curve_type_combo.addItems(multi_scenario_curve_options)
self.clearCurvePlot()
- elif flag == False:
+ elif flag == False: # noqa: E712
self.all_scenarios_checkbox.setChecked(False)
self.scenario_combo.setEnabled(True)
self.curve_type_combo.clear()
self.curve_type_combo.addItems(single_scenario_curve_options)
self.clearCurvePlot()
else:
- raise ValueError("Unknown flag: " + repr(flag))
-
- def resultScenarioChanged(self, text):
- self.result_current_scenario = text #self.scenario_combo.getText()
- #self.current_curve_data = None
-
- def curveTypeChanegd(self, text):
- if self.project_result == None:
+ raise ValueError('Unknown flag: ' + repr(flag))
+
+ def resultScenarioChanged(self, text): # noqa: N802, D102
+ self.result_current_scenario = text # self.scenario_combo.getText()
+ # self.current_curve_data = None
+
+ def curveTypeChanegd(self, text): # noqa: N802, D102
+ if self.project_result == None: # noqa: E711
return
- self.current_curve_type = text
+ self.current_curve_type = text
self.setCurveSettingBox(text)
self.calculateCurrentCurve()
-
- def calculateCurrentCurve(self):
-
+
+ def calculateCurrentCurve(self): # noqa: C901, N802, D102
curve_type = self.current_curve_type
if curve_type == 'Quantity Exceedance':
- iPopulation = self.curve_settings_widgets["Population"].currentText()
- iRatio = self.curve_settings_widgets["Percentage"].currentText()
- iConsider_leak = self.curve_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.curve_settings_widgets["leak Criteria"].text()
- group_method = self.curve_settings_widgets["Group method"].currentText()
- daily_bin = self.curve_settings_widgets["Daily bin"].currentText()
- min_time = self.curve_settings_widgets["Min time"].text()
- max_time = self.curve_settings_widgets["Max time"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
- else:
- iConsider_leak = False
-
- if iRatio == "Yes":
- iRatio = True
- else:
- iRatio = False
-
- if daily_bin == "Yes":
- daily_bin = True
- else:
- daily_bin = False
-
- group_method = group_method.lower()
- min_time = int(float(min_time) )
- max_time = int(float(max_time) )
-
- self.current_raw_curve = self.project_result.getQuantityExceedanceCurve(iPopulation=iPopulation, ratio=iRatio, consider_leak=iConsider_leak, leak_ratio=leak_ratio, result_type=group_method, daily=daily_bin, min_time=min_time, max_time=max_time)
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
- self.plotCurve("Exceedance Probability", "Time")
-
+ iPopulation = self.curve_settings_widgets['Population'].currentText() # noqa: N806
+ iRatio = self.curve_settings_widgets['Percentage'].currentText() # noqa: N806
+ iConsider_leak = self.curve_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.curve_settings_widgets['leak Criteria'].text()
+ group_method = self.curve_settings_widgets['Group method'].currentText()
+ daily_bin = self.curve_settings_widgets['Daily bin'].currentText()
+ min_time = self.curve_settings_widgets['Min time'].text()
+ max_time = self.curve_settings_widgets['Max time'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
+ else:
+ iConsider_leak = False # noqa: N806
+
+ if iRatio == 'Yes':
+ iRatio = True # noqa: N806
+ else:
+ iRatio = False # noqa: N806
+
+ if daily_bin == 'Yes':
+ daily_bin = True
+ else:
+ daily_bin = False
+
+ group_method = group_method.lower()
+ min_time = int(float(min_time))
+ max_time = int(float(max_time))
+
+ self.current_raw_curve = self.project_result.getQuantityExceedanceCurve(
+ iPopulation=iPopulation,
+ ratio=iRatio,
+ consider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ result_type=group_method,
+ daily=daily_bin,
+ min_time=min_time,
+ max_time=max_time,
+ )
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
+ self.plotCurve('Exceedance Probability', 'Time')
+
elif curve_type == 'Delivery Exceedance':
- iPopulation = self.curve_settings_widgets["Population"].currentText()
- iRatio = self.curve_settings_widgets["Percentage"].currentText()
- iConsider_leak = self.curve_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.curve_settings_widgets["leak Criteria"].text()
- group_method = self.curve_settings_widgets["Group method"].currentText()
- daily_bin = self.curve_settings_widgets["Daily bin"].currentText()
- min_time = self.curve_settings_widgets["Min time"].text()
- max_time = self.curve_settings_widgets["Max time"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
+ iPopulation = self.curve_settings_widgets['Population'].currentText() # noqa: N806
+ iRatio = self.curve_settings_widgets['Percentage'].currentText() # noqa: N806
+ iConsider_leak = self.curve_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.curve_settings_widgets['leak Criteria'].text()
+ group_method = self.curve_settings_widgets['Group method'].currentText()
+ daily_bin = self.curve_settings_widgets['Daily bin'].currentText()
+ min_time = self.curve_settings_widgets['Min time'].text()
+ max_time = self.curve_settings_widgets['Max time'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
else:
- iConsider_leak = False
-
- if iRatio == "Yes":
- iRatio = True
+ iConsider_leak = False # noqa: N806
+
+ if iRatio == 'Yes':
+ iRatio = True # noqa: N806
else:
- iRatio = False
-
- if daily_bin == "Yes":
+ iRatio = False # noqa: N806
+
+ if daily_bin == 'Yes':
daily_bin = True
else:
daily_bin = False
group_method = group_method.lower()
- min_time = int(float(min_time) )
- max_time = int(float(max_time) )
-
- self.current_raw_curve = self.project_result.getDeliveryExceedanceCurve(iPopulation=iPopulation, ratio=iRatio, consider_leak=iConsider_leak, leak_ratio=leak_ratio, result_type=group_method, daily=daily_bin, min_time=min_time, max_time=max_time)
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
- self.plotCurve("Exceedance Probability", "Time")
+ min_time = int(float(min_time))
+ max_time = int(float(max_time))
+
+ self.current_raw_curve = self.project_result.getDeliveryExceedanceCurve(
+ iPopulation=iPopulation,
+ ratio=iRatio,
+ consider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ result_type=group_method,
+ daily=daily_bin,
+ min_time=min_time,
+ max_time=max_time,
+ )
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
+ self.plotCurve('Exceedance Probability', 'Time')
elif curve_type == 'Quantity':
- iPopulation = self.curve_settings_widgets["Population"].currentText()
- #iPopulation = self.curve_population_settings_combo.currentText()
- iRatio = self.curve_settings_widgets["Percentage"].currentText()
- iConsider_leak = self.curve_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.curve_settings_widgets["leak Criteria"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
+ iPopulation = self.curve_settings_widgets['Population'].currentText() # noqa: N806
+ # iPopulation = self.curve_population_settings_combo.currentText()
+ iRatio = self.curve_settings_widgets['Percentage'].currentText() # noqa: N806
+ iConsider_leak = self.curve_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.curve_settings_widgets['leak Criteria'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
else:
- iConsider_leak = False
-
- if iRatio == "Yes":
- iRatio = True
+ iConsider_leak = False # noqa: N806
+
+ if iRatio == 'Yes':
+ iRatio = True # noqa: N806
else:
- iRatio = False
+ iRatio = False # noqa: N806
+
+ scn_name = self.scenario_combo.currentText()
+ self.current_raw_curve = self.project_result.getQNIndexPopulation_4(
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=iRatio,
+ consider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ )
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
+ self.plotCurve('Quantity', 'Time')
- scn_name = self.scenario_combo.currentText()
- self.current_raw_curve = self.project_result.getQNIndexPopulation_4(scn_name, iPopulation=iPopulation, ratio=iRatio, consider_leak=iConsider_leak, leak_ratio=leak_ratio)
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
- self.plotCurve("Quantity", "Time")
-
elif curve_type == 'Delivery':
- #self.current_curve_data = (curve_type, pd.DataFrame())
- iPopulation = self.curve_settings_widgets["Population"].currentText()
- #iPopulation = self.curve_population_settings_combo.currentText()
- iRatio = self.curve_settings_widgets["Percentage"].currentText()
- iConsider_leak = self.curve_settings_widgets["LDN leak"].currentText()
- leak_ratio = self.curve_settings_widgets["leak Criteria"].text()
-
- if iConsider_leak == "Yes":
- iConsider_leak = True
+ # self.current_curve_data = (curve_type, pd.DataFrame())
+ iPopulation = self.curve_settings_widgets['Population'].currentText() # noqa: N806
+ # iPopulation = self.curve_population_settings_combo.currentText()
+ iRatio = self.curve_settings_widgets['Percentage'].currentText() # noqa: N806
+ iConsider_leak = self.curve_settings_widgets['LDN leak'].currentText() # noqa: N806
+ leak_ratio = self.curve_settings_widgets['leak Criteria'].text()
+
+ if iConsider_leak == 'Yes':
+ iConsider_leak = True # noqa: N806
else:
- iConsider_leak = False
-
- if iRatio == "Yes":
- iRatio = True
+ iConsider_leak = False # noqa: N806
+
+ if iRatio == 'Yes':
+ iRatio = True # noqa: N806
else:
- iRatio = False
-
- scn_name = self.scenario_combo.currentText()
- self.current_raw_curve = self.project_result.getDLIndexPopulation_4(scn_name, iPopulation=iPopulation, ratio=iRatio, consider_leak=iConsider_leak, leak_ratio=leak_ratio)
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
- self.plotCurve("Delivery", "Time")
-
+ iRatio = False # noqa: N806
+
+ scn_name = self.scenario_combo.currentText()
+ self.current_raw_curve = self.project_result.getDLIndexPopulation_4(
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=iRatio,
+ consider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ )
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
+ self.plotCurve('Delivery', 'Time')
+
elif curve_type == 'SSI':
- #self.current_curve_data = (curve_type, pd.DataFrame())
- iPopulation = self.curve_settings_widgets["Population"].currentText()
- scn_name = self.scenario_combo.currentText()
- self.current_raw_curve = self.project_result.getSystemServiceabilityIndexCurve(scn_name, iPopulation=iPopulation)
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
- self.plotCurve("SSI", "Time")
-
-
- def setCurveSettingBox(self, curve_type):
- for i in range(self.curve_settings_table.rowCount()):
+ # self.current_curve_data = (curve_type, pd.DataFrame())
+ iPopulation = self.curve_settings_widgets['Population'].currentText() # noqa: N806
+ scn_name = self.scenario_combo.currentText()
+ self.current_raw_curve = (
+ self.project_result.getSystemServiceabilityIndexCurve(
+ scn_name, iPopulation=iPopulation
+ )
+ )
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
+ self.plotCurve('SSI', 'Time')
+
+ def setCurveSettingBox(self, curve_type): # noqa: N802, D102
+ for i in range(self.curve_settings_table.rowCount()): # noqa: B007
self.curve_settings_table.removeRow(0)
-
+
if curve_type in curve_settings:
- self.populateCurveSettingsTable(curve_settings[curve_type] )
+ self.populateCurveSettingsTable(curve_settings[curve_type])
else:
pass
- #raise ValueError("Unknown Curve type: "+repr(curve_type))
-
- def populateCurveSettingsTable(self, settings_content):
+ # raise ValueError("Unknown Curve type: "+repr(curve_type))
+
+ def populateCurveSettingsTable(self, settings_content): # noqa: C901, N802, D102
self.curve_settings_widgets.clear()
vertical_header = []
- cell_type_list = []
- default_list = []
- content_list = []
+ cell_type_list = []
+ default_list = []
+ content_list = []
for row in settings_content:
for k in row:
- if k == "Label":
+ if k == 'Label':
vertical_header.append(row[k])
- elif k == "Type":
+ elif k == 'Type':
cell_type_list.append(row[k])
- elif k == "Default":
+ elif k == 'Default':
default_list.append(row[k])
-
- if "Content" in row:
- content_list.append(row["Content" ])
+
+ if 'Content' in row:
+ content_list.append(row['Content'])
else:
content_list.append(None)
-
- self.curve_settings_table.setColumnCount(1 )
+
+ self.curve_settings_table.setColumnCount(1)
self.curve_settings_table.setRowCount(len(settings_content))
self.curve_settings_table.setVerticalHeaderLabels(vertical_header)
-
+
i = 0
for cell_type in cell_type_list:
- if cell_type=="Time":
+ if cell_type == 'Time':
self.time_combo = Time_Unit_Combo()
- self.curve_settings_table.setCellWidget(i,0, self.time_combo)
- self.time_combo.currentTextChanged.connect(self.curveTimeSettingsChanged )
-
- elif cell_type=="Yes-No_Combo":
+ self.curve_settings_table.setCellWidget(i, 0, self.time_combo)
+ self.time_combo.currentTextChanged.connect(
+ self.curveTimeSettingsChanged
+ )
+
+ elif cell_type == 'Yes-No_Combo':
current_widget = Yes_No_Combo()
- self.curve_settings_table.setCellWidget(i,0, current_widget)
- current_widget.currentTextChanged.connect(self.curveSettingChanged )
-
+ self.curve_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.currentTextChanged.connect(self.curveSettingChanged)
+
default_value = default_list[i]
current_widget.setCurrentText(default_value)
-
+
self.curve_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Custom_Combo":
+
+ elif cell_type == 'Custom_Combo':
current_widget = QtWidgets.QComboBox()
- contents = content_list[i]
+ contents = content_list[i]
current_widget.addItems(contents)
- self.curve_settings_table.setCellWidget(i,0, current_widget)
- current_widget.currentTextChanged.connect(self.curveSettingChanged )
-
+ self.curve_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.currentTextChanged.connect(self.curveSettingChanged)
+
default_value = default_list[i]
current_widget.setCurrentText(default_value)
-
+
self.curve_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Float Line":
+
+ elif cell_type == 'Float Line':
current_widget = QtWidgets.QLineEdit()
- self.curve_settings_table.setCellWidget(i,0, current_widget)
- current_widget.editingFinished.connect(self.curveSettingChanged )
- current_widget.setValidator(QtGui.QDoubleValidator(0, 1000000, 20, notation=QtGui.QDoubleValidator.StandardNotation) )
-
+ self.curve_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.editingFinished.connect(self.curveSettingChanged)
+ current_widget.setValidator(
+ QtGui.QDoubleValidator(
+ 0,
+ 1000000,
+ 20,
+ notation=QtGui.QDoubleValidator.StandardNotation,
+ )
+ )
+
default_value = default_list[i]
current_widget.setText(default_value)
self.curve_settings_widgets[vertical_header[i]] = current_widget
-
- elif cell_type=="Int Line":
+
+ elif cell_type == 'Int Line':
current_widget = QtWidgets.QLineEdit()
- self.curve_settings_table.setCellWidget(i,0, current_widget)
- current_widget.editingFinished.connect(self.curveSettingChanged )
- current_widget.setValidator(QtGui.QIntValidator(0, 3600*24*1000) )
-
+ self.curve_settings_table.setCellWidget(i, 0, current_widget)
+ current_widget.editingFinished.connect(self.curveSettingChanged)
+ current_widget.setValidator(QtGui.QIntValidator(0, 3600 * 24 * 1000))
+
default_value = default_list[i]
current_widget.setText(default_value)
self.curve_settings_widgets[vertical_header[i]] = current_widget
else:
- raise ValueError(repr(cell_type) )
-
- i += 1
- #for label in settings_content:
-
- def curveTimeSettingsChanged(self, x):
- self.current_curve = self.time_combo.changeCurveTimeUnit(self.current_raw_curve)
+ raise ValueError(repr(cell_type))
+
+ i += 1 # noqa: SIM113
+ # for label in settings_content:
+
+ def curveTimeSettingsChanged(self, x): # noqa: ARG002, N802, D102
+ self.current_curve = self.time_combo.changeCurveTimeUnit(
+ self.current_raw_curve
+ )
self.plotCurve()
-
- def curveSettingChanged(self):
- if "Population" in self.curve_settings_widgets:
- new_population_setting = self.curve_settings_widgets["Population"].currentText()
- if new_population_setting == "Yes" and type(self.project_result._population_data) == type(None):
- self.errorMSG("Error", "Population data is not loaded")
- self.curve_settings_widgets["Population"].setCurrentText("No")
+
+ def curveSettingChanged(self): # noqa: N802, D102
+ if 'Population' in self.curve_settings_widgets:
+ new_population_setting = self.curve_settings_widgets[
+ 'Population'
+ ].currentText()
+ if new_population_setting == 'Yes' and type( # noqa: E721
+ self.project_result._population_data # noqa: SLF001
+ ) == type(None):
+ self.errorMSG('Error', 'Population data is not loaded')
+ self.curve_settings_widgets['Population'].setCurrentText('No')
return
self.calculateCurrentCurve()
- def tabChanged(self, index):
+
+ def tabChanged(self, index): # noqa: N802, D102
if index == 1:
self.initalize_result()
-
- def saveCurrentCurveByButton(self):
- #if self.current_curve_data == None:
- if type(self.current_curve) == type(None):
- self.errorMSG("REWET", 'No curve is ploted')
+
+ def saveCurrentCurveByButton(self): # noqa: N802, D102
+ # if self.current_curve_data == None:
+ if type(self.current_curve) == type(None): # noqa: E721
+ self.errorMSG('REWET', 'No curve is ploted')
return
-
- file_addr = QtWidgets.QFileDialog.getSaveFileName(self.asli_MainWindow, 'Save File',
- self.project_file_addr,"Excel Workbook (*.xlsx)")
- if file_addr[0] == '':
+
+ file_addr = QtWidgets.QFileDialog.getSaveFileName(
+ self.asli_MainWindow,
+ 'Save File',
+ self.project_file_addr,
+ 'Excel Workbook (*.xlsx)',
+ )
+ if file_addr[0] == '': # noqa: PLC1901
return
-
- #self.current_curve_data[1].to_excel(file_addr[0])
+
+ # self.current_curve_data[1].to_excel(file_addr[0])
self.current_curve.to_excel(file_addr[0])
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Run_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Run_Tab_Designer.py
index 2b5bd986f..5ac333e9e 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Run_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Run_Tab_Designer.py
@@ -1,100 +1,105 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Nov 2 14:40:45 2022
+"""Created on Wed Nov 2 14:40:45 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+import subprocess # noqa: S404
import threading
-from PyQt5.QtCore import pyqtSlot, pyqtSignal, QObject
-from initial import Starter
-import subprocess
-import time
+from PyQt5.QtCore import QObject, pyqtSignal
-class Custom_Object(QObject):
- outSignal = pyqtSignal(bytes)
-class Run_Tab_Designer():
+class Custom_Object(QObject): # noqa: D101
+ outSignal = pyqtSignal(bytes) # noqa: N815
+
+
+class Run_Tab_Designer: # noqa: D101
def __init__(self):
self.run_button.clicked.connect(self.runREWET)
- self.stop_button.clicked.connect(self.stopRun )
+ self.stop_button.clicked.connect(self.stopRun)
self.cobject = Custom_Object()
self.cobject.outSignal.connect(self.updateRunOuput)
- self.rewet_sub_process = None
+ self.rewet_sub_process = None
self.if_run_in_progress = False
-
- def runREWET(self):
- if self.if_run_in_progress == True:
+
+ def runREWET(self): # noqa: N802, D102
+ if self.if_run_in_progress == True: # noqa: E712
return False
if_saved = self.saveProject()
-
- if if_saved == False:
+
+ if if_saved == False: # noqa: E712
return False
self.ouput_textedit.clear()
- #start = Starter()
- if self.project_file_addr == None:
- self.errorMSG("REWET","File address is empty. Please report it as a bug to the developer.")
+ # start = Starter()
+ if self.project_file_addr == None: # noqa: E711
+ self.errorMSG(
+ 'REWET',
+ 'File address is empty. Please report it as a bug to the developer.',
+ )
self.if_run_in_progress = True
self.setAllTabsEnabled(False)
- threading.Thread(
- target=self._RunREWETHelper, args=(), daemon=True
- ).start()
-
-
-
- def _RunREWETHelper(self):
- self.rewet_sub_process = subprocess.Popen(["python","initial.py", self.project_file_addr], stdout=subprocess.PIPE, bufsize=0)
+ threading.Thread(target=self._RunREWETHelper, args=(), daemon=True).start() # noqa: RET503
+
+ def _RunREWETHelper(self): # noqa: N802
+ self.rewet_sub_process = subprocess.Popen( # noqa: S603
+ ['python', 'initial.py', self.project_file_addr], # noqa: S607
+ stdout=subprocess.PIPE,
+ bufsize=0,
+ )
for line in iter(self.rewet_sub_process.stdout.readline, b''):
- #sys.stdout.flush()
- self.cobject.outSignal.emit(line )
-#
+ # sys.stdout.flush()
+ self.cobject.outSignal.emit(line)
self.rewet_sub_process.stdout.close()
-
- def setAllTabsEnabled(self, enabled):
- #self.ouput_textedit.setEnabled(enabled)
+
+ def setAllTabsEnabled(self, enabled): # noqa: N802, D102
+ # self.ouput_textedit.setEnabled(enabled)
self.main_tab.setTabEnabled(1, enabled)
self.main_process1.setTabEnabled(0, enabled)
self.main_process1.setTabEnabled(1, enabled)
self.main_process1.setTabEnabled(2, enabled)
self.main_process1.setTabEnabled(3, enabled)
self.run_button.setEnabled(enabled)
- #self.results_tabs_widget.setEnabled(enabled)
- #self.stop_button.setEnabled(True)
-
- #@pyqtSlot(bytes)
- def updateRunOuput(self, string):
+ # self.results_tabs_widget.setEnabled(enabled)
+ # self.stop_button.setEnabled(True)
+
+ # @pyqtSlot(bytes)
+ def updateRunOuput(self, string): # noqa: N802, D102
string = string.decode()
-
- if "Time of Single run is" in string:
+
+ if 'Time of Single run is' in string:
self.endSimulation()
- elif "Error" in string:
+ elif 'Error' in string:
self.errorInSimulation()
-
- self.ouput_textedit.appendPlainText(string )
- #running code for teh project
-
- def endSimulation(self):
- end_message = "\n-------------------\nSIMULATION FINISHED\n-------------------\n"
+ self.ouput_textedit.appendPlainText(string)
+
+ # running code for the project
+
+ def endSimulation(self): # noqa: N802, D102
+ end_message = (
+ '\n-------------------\nSIMULATION FINISHED\n-------------------\n'
+ )
self.setAllTabsEnabled(True)
self.if_run_in_progress = False
self.ouput_textedit.appendPlainText(end_message)
-
- def errorInSimulation(self):
- end_message = "\n-------------\nERROR OCCURED\n-------------\n"
+
+ def errorInSimulation(self): # noqa: N802, D102
+ end_message = '\n-------------\nERROR OCCURRED\n-------------\n'
self.setAllTabsEnabled(True)
self.if_run_in_progress = False
- self.errorMSG("REWET","An error happened during the simulation. Please look at the log for further information.")
+ self.errorMSG(
+ 'REWET',
+ 'An error happened during the simulation. Please look at the log for further information.',
+ )
self.ouput_textedit.appendPlainText(end_message)
- def stopRun(self):
- if self.if_run_in_progress == False:
+ def stopRun(self): # noqa: N802, D102
+ if self.if_run_in_progress == False: # noqa: E712
return
- if type(self.rewet_sub_process) != type(None):
+ if type(self.rewet_sub_process) != type(None): # noqa: E721
self.rewet_sub_process.terminate()
- termination_message = "\n-------------\nRUN CANCELLED\n-------------\n"
+ termination_message = '\n-------------\nRUN CANCELLED\n-------------\n'
self.setAllTabsEnabled(True)
self.if_run_in_progress = False
- self.ouput_textedit.appendPlainText(termination_message)
\ No newline at end of file
+ self.ouput_textedit.appendPlainText(termination_message)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Designer.py
index ad702101f..a743fd79f 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Designer.py
@@ -1,25 +1,27 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Fri Oct 28 14:09:49 2022
+"""Created on Fri Oct 28 14:09:49 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+
+from PyQt5 import QtGui, QtWidgets
-import os
-from PyQt5 import QtCore, QtGui, QtWidgets
from .Scenario_Dialog_Window import Ui_Scenario_Dialog
-class Scenario_Dialog_Designer(Ui_Scenario_Dialog):
+
+class Scenario_Dialog_Designer(Ui_Scenario_Dialog): # noqa: D101
def __init__(self):
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
self.last_probability = 1
- self.probability_line.setValidator(QtGui.QDoubleValidator(0.0, 1, 3, notation=QtGui.QDoubleValidator.StandardNotation) )
+ self.probability_line.setValidator(
+ QtGui.QDoubleValidator(
+ 0.0, 1, 3, notation=QtGui.QDoubleValidator.StandardNotation
+ )
+ )
self.probability_line.textChanged.connect(self.probabilityValidatorHelper)
-
- def probabilityValidatorHelper(self, text):
+
+ def probabilityValidatorHelper(self, text): # noqa: N802, D102
if float(text) > 1:
self.probability_line.setText(self.last_probability)
else:
self.last_probability = text
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Window.py
index 0afe03292..4aeda05b0 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Scenario_Dialog_Window.py
@@ -1,80 +1,81 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Scenario_Dialog_Window.ui'
+# Form implementation generated from reading ui file 'Scenario_Dialog_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
-from PyQt5 import QtCore, QtGui, QtWidgets
+from PyQt5 import QtCore, QtWidgets
-class Ui_Scenario_Dialog(object):
- def setupUi(self, Scenario_Dialog):
- Scenario_Dialog.setObjectName("Scenario_Dialog")
+class Ui_Scenario_Dialog: # noqa: D101
+ def setupUi(self, Scenario_Dialog): # noqa: N802, N803, D102
+ Scenario_Dialog.setObjectName('Scenario_Dialog')
Scenario_Dialog.resize(351, 241)
self.buttonBox = QtWidgets.QDialogButtonBox(Scenario_Dialog)
self.buttonBox.setGeometry(QtCore.QRect(260, 40, 81, 241))
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.scenario_name_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.scenario_name_line.setGeometry(QtCore.QRect(110, 40, 113, 20))
- self.scenario_name_line.setObjectName("scenario_name_line")
+ self.scenario_name_line.setObjectName('scenario_name_line')
self.pipe_damage_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.pipe_damage_line.setGeometry(QtCore.QRect(110, 70, 113, 20))
- self.pipe_damage_line.setObjectName("pipe_damage_line")
+ self.pipe_damage_line.setObjectName('pipe_damage_line')
self.node_damage_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.node_damage_line.setGeometry(QtCore.QRect(110, 100, 113, 20))
- self.node_damage_line.setObjectName("node_damage_line")
+ self.node_damage_line.setObjectName('node_damage_line')
self.pump_damage_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.pump_damage_line.setGeometry(QtCore.QRect(110, 130, 113, 20))
- self.pump_damage_line.setObjectName("pump_damage_line")
+ self.pump_damage_line.setObjectName('pump_damage_line')
self.label = QtWidgets.QLabel(Scenario_Dialog)
self.label.setGeometry(QtCore.QRect(20, 40, 91, 16))
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.label_2 = QtWidgets.QLabel(Scenario_Dialog)
self.label_2.setGeometry(QtCore.QRect(20, 70, 71, 16))
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.label_3 = QtWidgets.QLabel(Scenario_Dialog)
self.label_3.setGeometry(QtCore.QRect(20, 100, 71, 16))
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.label_4 = QtWidgets.QLabel(Scenario_Dialog)
self.label_4.setGeometry(QtCore.QRect(20, 130, 71, 16))
- self.label_4.setObjectName("label_4")
+ self.label_4.setObjectName('label_4')
self.label_5 = QtWidgets.QLabel(Scenario_Dialog)
self.label_5.setGeometry(QtCore.QRect(20, 160, 81, 16))
- self.label_5.setObjectName("label_5")
+ self.label_5.setObjectName('label_5')
self.label_6 = QtWidgets.QLabel(Scenario_Dialog)
self.label_6.setGeometry(QtCore.QRect(20, 190, 61, 16))
- self.label_6.setObjectName("label_6")
+ self.label_6.setObjectName('label_6')
self.tank_damage_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.tank_damage_line.setGeometry(QtCore.QRect(110, 160, 113, 20))
- self.tank_damage_line.setObjectName("tank_damage_line")
+ self.tank_damage_line.setObjectName('tank_damage_line')
self.probability_line = QtWidgets.QLineEdit(Scenario_Dialog)
self.probability_line.setGeometry(QtCore.QRect(110, 190, 113, 20))
- self.probability_line.setObjectName("probability_line")
+ self.probability_line.setObjectName('probability_line')
self.retranslateUi(Scenario_Dialog)
self.buttonBox.accepted.connect(Scenario_Dialog.accept)
self.buttonBox.rejected.connect(Scenario_Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Scenario_Dialog)
- def retranslateUi(self, Scenario_Dialog):
+ def retranslateUi(self, Scenario_Dialog): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Scenario_Dialog.setWindowTitle(_translate("Scenario_Dialog", "New Scenario"))
- self.label.setText(_translate("Scenario_Dialog", "Scenario Name"))
- self.label_2.setText(_translate("Scenario_Dialog", "Pipe Damage"))
- self.label_3.setText(_translate("Scenario_Dialog", "Nodal Damage"))
- self.label_4.setText(_translate("Scenario_Dialog", "Pump Damage"))
- self.label_5.setText(_translate("Scenario_Dialog", "Tank Damage"))
- self.label_6.setText(_translate("Scenario_Dialog", "Probability"))
- self.probability_line.setText(_translate("Scenario_Dialog", "1"))
+ Scenario_Dialog.setWindowTitle(_translate('Scenario_Dialog', 'New Scenario'))
+ self.label.setText(_translate('Scenario_Dialog', 'Scenario Name'))
+ self.label_2.setText(_translate('Scenario_Dialog', 'Pipe Damage'))
+ self.label_3.setText(_translate('Scenario_Dialog', 'Nodal Damage'))
+ self.label_4.setText(_translate('Scenario_Dialog', 'Pump Damage'))
+ self.label_5.setText(_translate('Scenario_Dialog', 'Tank Damage'))
+ self.label_6.setText(_translate('Scenario_Dialog', 'Probability'))
+ self.probability_line.setText(_translate('Scenario_Dialog', '1'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Scenario_Dialog = QtWidgets.QDialog()
ui = Ui_Scenario_Dialog()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Simulation_Tab_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Simulation_Tab_Designer.py
index ea0fef657..74dc6cf27 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Simulation_Tab_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Simulation_Tab_Designer.py
@@ -1,144 +1,159 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Oct 27 19:00:30 2022
+"""Created on Thu Oct 27 19:00:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+
import os
import tempfile
+
from PyQt5 import QtGui, QtWidgets
-class Simulation_Tab_Designer():
+
+class Simulation_Tab_Designer: # noqa: D101
def __init__(self):
-
- """
- These are variables that are shared between ui and settings.
- """
+ """These are variables that are shared between ui and settings.""" # noqa: D401
self.setSimulationSettings(self.settings)
-
+
"""
Reassignment of shared variables.
"""
- self.result_folder_addr = os.getcwd()
- self.temp_folder_addr = tempfile.mkdtemp()
-
+ self.result_folder_addr = os.getcwd() # noqa: PTH109
+ self.temp_folder_addr = tempfile.mkdtemp()
+
"""
- ui value assigments.
+ ui value assignments.
"""
self.setSimulationUI()
-
+
"""
Field Validators.
"""
- self.simulation_time_line.setValidator(QtGui.QIntValidator(0, 10000*24*3600) )
- self.simulation_time_step_line.setValidator(QtGui.QIntValidator(0, 10000*24*3600) )
-
+ self.simulation_time_line.setValidator(
+ QtGui.QIntValidator(0, 10000 * 24 * 3600)
+ )
+ self.simulation_time_step_line.setValidator(
+ QtGui.QIntValidator(0, 10000 * 24 * 3600)
+ )
+
"""
Signals connection.
"""
- self.result_directory_browser_button.clicked.connect(self.ResultFileBrowserClicked)
+ self.result_directory_browser_button.clicked.connect(
+ self.ResultFileBrowserClicked
+ )
self.temp_browser_button.clicked.connect(self.tempFileBrowserClicked)
- self.simulation_time_line.textChanged.connect(self.SimulationTimeValidatorHelper)
- self.simulation_time_step_line.textChanged.connect(self.SimulationTimeValidatorHelper)
-
- def getSimulationSettings(self):
- if self.result_folder_addr == '':
- self.errorMSG("REWET", "Result folder must be provided")
+ self.simulation_time_line.textChanged.connect(
+ self.SimulationTimeValidatorHelper
+ )
+ self.simulation_time_step_line.textChanged.connect(
+ self.SimulationTimeValidatorHelper
+ )
+
+ def getSimulationSettings(self): # noqa: N802, D102
+ if self.result_folder_addr == '': # noqa: PLC1901
+ self.errorMSG('REWET', 'Result folder must be provided')
return False
- if self.temp_folder_addr == '':
- self.errorMSG("REWET", "Temp folder must be provided")
+ if self.temp_folder_addr == '': # noqa: PLC1901
+ self.errorMSG('REWET', 'Temp folder must be provided')
return False
-
- self.simulation_time = int(self.simulation_time_line.text() )
- self.simulation_time_step = int(self.simulation_time_step_line.text() )
+
+ self.simulation_time = int(self.simulation_time_line.text())
+ self.simulation_time_step = int(self.simulation_time_step_line.text())
if self.single_radio.isChecked():
- self.number_of_damages = 'single'
+ self.number_of_damages = 'single'
elif self.multiple_radio.isChecked():
- self.number_of_damages = 'multiple'
+ self.number_of_damages = 'multiple'
else:
- raise ValueError("Borh of Run-Type Buttons are not selected which is an error.")
- #self.result_folder_addr -- already set
- #self.temp_folder_addr -- already set
-
+ raise ValueError( # noqa: TRY003
+ 'Borh of Run-Type Buttons are not selected which is an error.' # noqa: EM101
+ )
+ # self.result_folder_addr -- already set
+ # self.temp_folder_addr -- already set
+
if self.save_time_step_yes_radio.isChecked():
- self.save_time_step = True
+ self.save_time_step = True
elif self.save_time_step_no_radio.isChecked():
- self.save_time_step = False
+ self.save_time_step = False
else:
- raise ValueError("Both of Time-Save Buttons are not selected which is an error.")
-
- self.settings.process['RUN_TIME' ] = self.simulation_time
+ raise ValueError( # noqa: TRY003
+ 'Both of Time-Save Buttons are not selected which is an error.' # noqa: EM101
+ )
+
+ self.settings.process['RUN_TIME'] = self.simulation_time
self.settings.process['simulation_time_step'] = self.simulation_time_step
- self.settings.process['number_of_damages' ] = self.number_of_damages
- self.settings.process['result_directory' ] = self.result_folder_addr
- self.settings.process['temp_directory' ] = self.temp_folder_addr
- self.settings.process['save_time_step' ] = self.save_time_step
-
+ self.settings.process['number_of_damages'] = self.number_of_damages
+ self.settings.process['result_directory'] = self.result_folder_addr
+ self.settings.process['temp_directory'] = self.temp_folder_addr
+ self.settings.process['save_time_step'] = self.save_time_step
+
return True
-
- def setSimulationUI(self):
- self.simulation_time_line.setText(str(int(self.simulation_time) ) )
- self.simulation_time_step_line.setText(str(int(self.simulation_time_step) ) )
+
+ def setSimulationUI(self): # noqa: N802, D102
+ self.simulation_time_line.setText(str(int(self.simulation_time)))
+ self.simulation_time_step_line.setText(str(int(self.simulation_time_step)))
self.result_folder_addr_line.setText(self.result_folder_addr)
self.temp_folder_addr_line.setText(self.temp_folder_addr)
-
+
if self.number_of_damages == 'single':
self.single_radio.setChecked(True)
elif self.number_of_damages == 'multiple':
self.multiple_radio.setChecked(True)
else:
- raise ValueError("Unknown runtype: "+repr(self.number_of_damages) )
-
- if self.save_time_step == True:
+ raise ValueError('Unknown runtype: ' + repr(self.number_of_damages))
+
+ if self.save_time_step == True: # noqa: E712
self.save_time_step_yes_radio.setChecked(True)
- elif self.save_time_step == False:
+ elif self.save_time_step == False: # noqa: E712
self.save_time_step_no_radio.setChecked(True)
else:
- raise ValueError("Unknown time save value: " + repr(self.save_time_step_no_radio) )
-
- def setSimulationSettings(self, settings):
- self.simulation_time = settings.process['RUN_TIME']
+ raise ValueError(
+ 'Unknown time save value: ' + repr(self.save_time_step_no_radio)
+ )
+
+ def setSimulationSettings(self, settings): # noqa: N802, D102
+ self.simulation_time = settings.process['RUN_TIME']
self.simulation_time_step = settings.process['simulation_time_step']
- self.number_of_damages = settings.process['number_of_damages']
- self.result_folder_addr = settings.process['result_directory']
- self.temp_folder_addr = settings.process['temp_directory']
- self.save_time_step = settings.process['save_time_step']
-
- def ResultFileBrowserClicked(self):
- directory = QtWidgets.QFileDialog.getExistingDirectory(self.asli_MainWindow, "Select Directory")
- if directory == '':
+ self.number_of_damages = settings.process['number_of_damages']
+ self.result_folder_addr = settings.process['result_directory']
+ self.temp_folder_addr = settings.process['temp_directory']
+ self.save_time_step = settings.process['save_time_step']
+
+ def ResultFileBrowserClicked(self): # noqa: N802, D102
+ directory = QtWidgets.QFileDialog.getExistingDirectory(
+ self.asli_MainWindow, 'Select Directory'
+ )
+ if directory == '': # noqa: PLC1901
return
self.result_folder_addr = directory
self.result_folder_addr_line.setText(self.result_folder_addr)
-
- def tempFileBrowserClicked(self):
- directory = QtWidgets.QFileDialog.getExistingDirectory(self.asli_MainWindow, "Select Directory")
- if directory == '':
- return
+
+ def tempFileBrowserClicked(self): # noqa: N802, D102
+ directory = QtWidgets.QFileDialog.getExistingDirectory(
+ self.asli_MainWindow, 'Select Directory'
+ )
+ if directory == '': # noqa: PLC1901
+ return
self.temp_folder_addr = directory
self.temp_folder_addr_line.setText(self.temp_folder_addr)
-
- def SimulationTimeValidatorHelper(self, text):
+
+ def SimulationTimeValidatorHelper(self, text): # noqa: N802, D102
try:
- simulation_time = int(float(self.simulation_time_line.text() ) )
- except:
+ simulation_time = int(float(self.simulation_time_line.text()))
+ except: # noqa: E722
simulation_time = 0
try:
- simulation_time_step = int(float(self.simulation_time_step_line.text() ) )
- except:
+ simulation_time_step = int(float(self.simulation_time_step_line.text()))
+ except: # noqa: E722
simulation_time_step = 0
-
+
if text == self.simulation_time_line.text():
sim_time_changed = True
else:
sim_time_changed = False
- #print(simulation_time_step)
- #print(simulation_time)
+ # print(simulation_time_step)
+ # print(simulation_time)
if simulation_time_step > simulation_time:
if sim_time_changed:
- self.simulation_time_line.setText(str(simulation_time_step ))
+ self.simulation_time_line.setText(str(simulation_time_step))
else:
- self.simulation_time_step_line.setText(str(simulation_time ))
-
-
-
\ No newline at end of file
+ self.simulation_time_step_line.setText(str(simulation_time))
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Designer.py
index db204a707..6a54c681a 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Designer.py
@@ -1,111 +1,128 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Jan 5 16:31:32 2023
+"""Created on Thu Jan 5 16:31:32 2023
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-import shapely
import os
-import pandas as pd
+
import geopandas as gpd
-from PyQt5 import QtWidgets
+import pandas as pd
from GUI.Subsitute_Layer_Window import Ui_subsitite_layer_dialoge
+from PyQt5 import QtWidgets
-class Subsitute_Layer_Designer(Ui_subsitite_layer_dialoge):
- def __init__(self, subsitute_layer_addr, subsitute_layer,iUse_substitute_layer, demand_node_layers):
+class Subsitute_Layer_Designer(Ui_subsitite_layer_dialoge): # noqa: D101
+ def __init__(
+ self,
+ subsitute_layer_addr,
+ subsitute_layer,
+ iUse_substitute_layer, # noqa: N803
+ demand_node_layers,
+ ):
super().__init__()
self._window = QtWidgets.QDialog()
self.setupUi(self._window)
-
+
self.current_substitute_directory = ''
- self.subsitute_layer = subsitute_layer
- self.old_subsitute_layer = subsitute_layer
- self.subsitute_layer_addr = subsitute_layer_addr
- self.demand_node_layers = demand_node_layers
+ self.subsitute_layer = subsitute_layer
+ self.old_subsitute_layer = subsitute_layer
+ self.subsitute_layer_addr = subsitute_layer_addr
+ self.demand_node_layers = demand_node_layers
self.iUse_substitute_layer = iUse_substitute_layer
- self.demand_node_layers.to_file("Northridge\demand_node_layer.shp")
- self.subsitute_layer_addr_line.setText(self.subsitute_layer_addr )
- if type(self.subsitute_layer) != type(None):
- self.subsitute_layer_projection_name_line.setText(self.subsitute_layer.crs.name )
-
- apply_button = self.Subsitute_buttonBox.button(QtWidgets.QDialogButtonBox.Apply)
+ self.demand_node_layers.to_file(r'Northridge\demand_node_layer.shp')
+ self.subsitute_layer_addr_line.setText(self.subsitute_layer_addr)
+ if type(self.subsitute_layer) != type(None): # noqa: E721
+ self.subsitute_layer_projection_name_line.setText(
+ self.subsitute_layer.crs.name
+ )
+
+ apply_button = self.Subsitute_buttonBox.button(
+ QtWidgets.QDialogButtonBox.Apply
+ )
apply_button.clicked.connect(self.applyNewSubsituteLayer)
ok_button = self.Subsitute_buttonBox.button(QtWidgets.QDialogButtonBox.Ok)
ok_button.clicked.connect(self.applyNewSubsituteLayer)
-
- self.population_browser_button.clicked.connect(self.substituteLayerBrowseButton)
-
+
+ self.population_browser_button.clicked.connect(
+ self.substituteLayerBrowseButton
+ )
+
self.iUse_sub_checkbox.setChecked(self.iUse_substitute_layer)
- self.iUse_sub_checkbox.stateChanged.connect(self.iUseSubstituteCheckBoxStateChanged)
-
- def iUseSubstituteCheckBoxStateChanged(self, state):
+ self.iUse_sub_checkbox.stateChanged.connect(
+ self.iUseSubstituteCheckBoxStateChanged
+ )
+
+ def iUseSubstituteCheckBoxStateChanged(self, state): # noqa: N802, D102
if state == 0:
self.iUse_substitute_layer = False
- elif state == 2:
+ elif state == 2: # noqa: PLR2004
self.iUse_substitute_layer = True
-
- def applyNewSubsituteLayer(self):
- #demand_node_layers = self.createGeopandasPointDataFrameForNodes(self, self.wn, self.demand_node_name)
- if type(self.subsitute_layer) == type(None):
+
+ def applyNewSubsituteLayer(self): # noqa: N802, D102
+ # demand_node_layers = self.createGeopandasPointDataFrameForNodes(self, self.wn, self.demand_node_name)
+ if type(self.subsitute_layer) == type(None): # noqa: E721
return
-
- def substituteLayerBrowseButton(self):
- file = QtWidgets.QFileDialog.getOpenFileName(self._window, 'Open file',
- self.current_substitute_directory,"Shapefile file (*.shp)")
-
- if file[0] == '':
+
+ def substituteLayerBrowseButton(self): # noqa: N802, D102
+ file = QtWidgets.QFileDialog.getOpenFileName(
+ self._window,
+ 'Open file',
+ self.current_substitute_directory,
+ 'Shapefile file (*.shp)',
+ )
+
+ if file[0] == '': # noqa: PLC1901
return
- split_addr = os.path.split(file[0])
+ split_addr = os.path.split(file[0])
self.current_substitute_directory = split_addr[0]
-
+
self.subsitute_layer_addr_line.setText(file[0])
- self.subsitute_layer = gpd.read_file(file[0])
+ self.subsitute_layer = gpd.read_file(file[0])
self.subsitute_layer_addr = file[0]
self.subsitute_layer_addr_line.setText(file[0])
- self.subsitute_layer_projection_name_line.setText(self.subsitute_layer.crs.name)
-
+ self.subsitute_layer_projection_name_line.setText(
+ self.subsitute_layer.crs.name
+ )
+
self.sub_error_text_edit.clear()
- self.demand_node_layers = self.demand_node_layers.set_crs(crs=self.subsitute_layer.crs)
+ self.demand_node_layers = self.demand_node_layers.set_crs(
+ crs=self.subsitute_layer.crs
+ )
joined_map = gpd.sjoin(self.subsitute_layer, self.demand_node_layers)
-
+
number_list = pd.Series(index=self.demand_node_layers.index, data=0)
- for ind, val in joined_map["index_right"].iteritems():
- number_list.loc[val] = number_list.loc[val] + 1
-
+ for ind, val in joined_map['index_right'].iteritems(): # noqa: B007
+ number_list.loc[val] = number_list.loc[val] + 1 # noqa: PLR6104
+
number_list = number_list[number_list > 1]
number_list = number_list.sort_values(ascending=False)
-
- text = ""
+
+ text = ''
if len(number_list) > 0:
- text += "The following nodes is joined with more than 1 substitute layer feature\n"
-
+ text += 'The following nodes is joined with more than 1 substitute layer feature\n'
+
for ind, num in number_list.iteritems():
- text+=repr(ind) + " : " + repr(num) + "\n"
-
- text += "\n\n"
-
- index_number_list = pd.Series(index=self.subsitute_layer.index.unique(), data=0)
+ text += repr(ind) + ' : ' + repr(num) + '\n'
+
+ text += '\n\n'
+
+ index_number_list = pd.Series(
+ index=self.subsitute_layer.index.unique(), data=0
+ )
for ind in joined_map.index.to_list():
- index_number_list.loc[ind] = index_number_list.loc[ind] + 1
-
+ index_number_list.loc[ind] = index_number_list.loc[ind] + 1 # noqa: PLR6104
+
index_number_list = index_number_list[index_number_list > 1]
index_number_list = index_number_list.sort_values(ascending=False)
-
+
if len(index_number_list) > 0:
- text += "The following substitute layer feature have multiple nodes\n"
- i=1
+ text += 'The following substitute layer feature have multiple nodes\n'
+ i = 1
for ind, num in index_number_list.iteritems():
st = self.subsitute_layer.loc[ind]
- st = st.drop("geometry")
- text += repr(st) + " : "+repr(num) + "\n"
- text += "---------- "+ repr(i)+" ----------"
- i+=1
+ st = st.drop('geometry')
+ text += repr(st) + ' : ' + repr(num) + '\n'
+ text += '---------- ' + repr(i) + ' ----------'
+ i += 1
self.sub_error_text_edit.setText(text)
-
-
-
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.py
index ad0a234bd..82b19b2e2 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Subsitute_Layer_Window.ui'
+# Form implementation generated from reading ui file 'Subsitute_Layer_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
@@ -10,22 +8,30 @@
from PyQt5 import QtCore, QtGui, QtWidgets
-class Ui_subsitite_layer_dialoge(object):
- def setupUi(self, subsitite_layer_dialoge):
- subsitite_layer_dialoge.setObjectName("subsitite_layer_dialoge")
+class Ui_subsitite_layer_dialoge: # noqa: D101
+ def setupUi(self, subsitite_layer_dialoge): # noqa: N802, D102
+ subsitite_layer_dialoge.setObjectName('subsitite_layer_dialoge')
subsitite_layer_dialoge.resize(403, 407)
- self.Subsitute_buttonBox = QtWidgets.QDialogButtonBox(subsitite_layer_dialoge)
+ self.Subsitute_buttonBox = QtWidgets.QDialogButtonBox(
+ subsitite_layer_dialoge
+ )
self.Subsitute_buttonBox.setGeometry(QtCore.QRect(110, 360, 261, 32))
self.Subsitute_buttonBox.setOrientation(QtCore.Qt.Horizontal)
- self.Subsitute_buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Apply|QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.Subsitute_buttonBox.setObjectName("Subsitute_buttonBox")
+ self.Subsitute_buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Apply
+ | QtWidgets.QDialogButtonBox.Cancel
+ | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.Subsitute_buttonBox.setObjectName('Subsitute_buttonBox')
self.subsitute_layer_addr_line = QtWidgets.QLineEdit(subsitite_layer_dialoge)
self.subsitute_layer_addr_line.setGeometry(QtCore.QRect(20, 40, 261, 20))
self.subsitute_layer_addr_line.setReadOnly(True)
- self.subsitute_layer_addr_line.setObjectName("subsitute_layer_addr_line")
- self.population_browser_button = QtWidgets.QPushButton(subsitite_layer_dialoge)
+ self.subsitute_layer_addr_line.setObjectName('subsitute_layer_addr_line')
+ self.population_browser_button = QtWidgets.QPushButton(
+ subsitite_layer_dialoge
+ )
self.population_browser_button.setGeometry(QtCore.QRect(290, 40, 81, 23))
- self.population_browser_button.setObjectName("population_browser_button")
+ self.population_browser_button.setObjectName('population_browser_button')
self.label_27 = QtWidgets.QLabel(subsitite_layer_dialoge)
self.label_27.setGeometry(QtCore.QRect(20, 20, 121, 16))
font = QtGui.QFont()
@@ -33,14 +39,20 @@ def setupUi(self, subsitite_layer_dialoge):
font.setBold(False)
font.setWeight(50)
self.label_27.setFont(font)
- self.label_27.setObjectName("label_27")
+ self.label_27.setObjectName('label_27')
self.groupBox = QtWidgets.QGroupBox(subsitite_layer_dialoge)
self.groupBox.setGeometry(QtCore.QRect(20, 70, 351, 61))
- self.groupBox.setObjectName("groupBox")
- self.subsitute_layer_projection_name_line = QtWidgets.QLineEdit(self.groupBox)
- self.subsitute_layer_projection_name_line.setGeometry(QtCore.QRect(110, 30, 231, 20))
+ self.groupBox.setObjectName('groupBox')
+ self.subsitute_layer_projection_name_line = QtWidgets.QLineEdit(
+ self.groupBox
+ )
+ self.subsitute_layer_projection_name_line.setGeometry(
+ QtCore.QRect(110, 30, 231, 20)
+ )
self.subsitute_layer_projection_name_line.setReadOnly(True)
- self.subsitute_layer_projection_name_line.setObjectName("subsitute_layer_projection_name_line")
+ self.subsitute_layer_projection_name_line.setObjectName(
+ 'subsitute_layer_projection_name_line'
+ )
self.label_28 = QtWidgets.QLabel(self.groupBox)
self.label_28.setGeometry(QtCore.QRect(10, 30, 121, 16))
font = QtGui.QFont()
@@ -48,13 +60,13 @@ def setupUi(self, subsitite_layer_dialoge):
font.setBold(False)
font.setWeight(50)
self.label_28.setFont(font)
- self.label_28.setObjectName("label_28")
+ self.label_28.setObjectName('label_28')
self.iUse_sub_checkbox = QtWidgets.QCheckBox(subsitite_layer_dialoge)
self.iUse_sub_checkbox.setGeometry(QtCore.QRect(20, 150, 141, 17))
- self.iUse_sub_checkbox.setObjectName("iUse_sub_checkbox")
+ self.iUse_sub_checkbox.setObjectName('iUse_sub_checkbox')
self.sub_error_text_edit = QtWidgets.QTextEdit(subsitite_layer_dialoge)
self.sub_error_text_edit.setGeometry(QtCore.QRect(25, 191, 341, 151))
- self.sub_error_text_edit.setObjectName("sub_error_text_edit")
+ self.sub_error_text_edit.setObjectName('sub_error_text_edit')
self.label_29 = QtWidgets.QLabel(subsitite_layer_dialoge)
self.label_29.setGeometry(QtCore.QRect(30, 170, 121, 16))
font = QtGui.QFont()
@@ -62,26 +74,39 @@ def setupUi(self, subsitite_layer_dialoge):
font.setBold(False)
font.setWeight(50)
self.label_29.setFont(font)
- self.label_29.setObjectName("label_29")
+ self.label_29.setObjectName('label_29')
self.retranslateUi(subsitite_layer_dialoge)
self.Subsitute_buttonBox.accepted.connect(subsitite_layer_dialoge.accept)
self.Subsitute_buttonBox.rejected.connect(subsitite_layer_dialoge.reject)
QtCore.QMetaObject.connectSlotsByName(subsitite_layer_dialoge)
- def retranslateUi(self, subsitite_layer_dialoge):
+ def retranslateUi(self, subsitite_layer_dialoge): # noqa: N802, D102
_translate = QtCore.QCoreApplication.translate
- subsitite_layer_dialoge.setWindowTitle(_translate("subsitite_layer_dialoge", "Dialog"))
- self.population_browser_button.setText(_translate("subsitite_layer_dialoge", "Browse"))
- self.label_27.setText(_translate("subsitite_layer_dialoge", "Subsitute Layer File"))
- self.groupBox.setTitle(_translate("subsitite_layer_dialoge", "Projection System"))
- self.label_28.setText(_translate("subsitite_layer_dialoge", "Subsitute Projection"))
- self.iUse_sub_checkbox.setText(_translate("subsitite_layer_dialoge", "Use the substitute Layer"))
- self.label_29.setText(_translate("subsitite_layer_dialoge", "Warnings"))
+ subsitite_layer_dialoge.setWindowTitle(
+ _translate('subsitite_layer_dialoge', 'Dialog')
+ )
+ self.population_browser_button.setText(
+ _translate('subsitite_layer_dialoge', 'Browse')
+ )
+ self.label_27.setText(
+ _translate('subsitite_layer_dialoge', 'Subsitute Layer File')
+ )
+ self.groupBox.setTitle(
+ _translate('subsitite_layer_dialoge', 'Projection System')
+ )
+ self.label_28.setText(
+ _translate('subsitite_layer_dialoge', 'Subsitute Projection')
+ )
+ self.iUse_sub_checkbox.setText(
+ _translate('subsitite_layer_dialoge', 'Use the substitute Layer')
+ )
+ self.label_29.setText(_translate('subsitite_layer_dialoge', 'Warnings'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
subsitite_layer_dialoge = QtWidgets.QDialog()
ui = Ui_subsitite_layer_dialoge()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.ui b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.ui
index c56addb6d..aae14c563 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.ui
+++ b/modules/systemPerformance/REWET/REWET/GUI/Subsitute_Layer_Window.ui
@@ -72,7 +72,7 @@
- Subsitute Layer File
+ Substitute Layer File
@@ -117,7 +117,7 @@
- Subsitute Projection
+ Substitute Projection
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Designer.py
index f377542e6..7b7fd3805 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Designer.py
@@ -1,22 +1,20 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Fri Jan 6 00:08:01 2023
+"""Created on Fri Jan 6 00:08:01 2023
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+
import sys
-from PyQt5 import QtCore, QtWidgets
-from GUI.Symbology_Window import Ui_Symbology_Dialog
-import pandas as pd
-import geopandas as gpd
+
+import geopandas as gpd
import mapclassify
import matplotlib.pylab as plt
-from matplotlib.backends.backend_qt5agg import FigureCanvas
-from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
-
+import pandas as pd
+from GUI.Symbology_Window import Ui_Symbology_Dialog
+from matplotlib.backends.backend_qt5agg import FigureCanvas
+from PyQt5 import QtCore, QtWidgets
-class Symbology_Designer(Ui_Symbology_Dialog):
+class Symbology_Designer(Ui_Symbology_Dialog): # noqa: D101
def __init__(self, sym, data, val_column):
super().__init__()
self._window = QtWidgets.QDialog()
@@ -28,256 +26,278 @@ def __init__(self, sym, data, val_column):
self.current_item_value = None
self.fig, self.ax1 = plt.subplots()
self.legend_widget = FigureCanvas(self.fig)
- lay = QtWidgets.QVBoxLayout(self.sample_legend_widget)
- lay.setContentsMargins(0, 0, 0, 0)
+ lay = QtWidgets.QVBoxLayout(self.sample_legend_widget)
+ lay.setContentsMargins(0, 0, 0, 0)
lay.addWidget(self.legend_widget)
self.initializeForm()
-
+
self.method_combo.currentTextChanged.connect(self.methodChanged)
self.range_table.currentItemChanged.connect(self.currentItemChanged)
self.range_table.itemChanged.connect(self.tableDataChanged)
- #self.range_table.currentItemChanged.connect(self.currentItemChanged)
+ # self.range_table.currentItemChanged.connect(self.currentItemChanged)
self.remove_button.clicked.connect(self.removeButtonClicked)
self.color_combo.currentTextChanged.connect(self.colorChanged)
- self.no_clases_line.editingFinished.connect(self.numberOfClassEditingFinished)
- self.add_up_button.clicked.connect(lambda : self.addByButton("UP"))
- self.add_below_button.clicked.connect(lambda : self.addByButton("DOWN"))
-
- self.sample_legend_widget
-
- def initializeForm(self):
- self.method_combo.setCurrentText(self.sym["Method"])
- if self.sym["Method"] == "FisherJenks" or self.sym["Method"] == "EqualInterval":
- self.no_clases_line.setText(str(self.sym["kw"]["k"]))
+ self.no_clases_line.editingFinished.connect(
+ self.numberOfClassEditingFinished
+ )
+ self.add_up_button.clicked.connect(lambda: self.addByButton('UP'))
+ self.add_below_button.clicked.connect(lambda: self.addByButton('DOWN'))
+
+ self.sample_legend_widget # noqa: B018
+
+ def initializeForm(self): # noqa: N802, D102
+ self.method_combo.setCurrentText(self.sym['Method'])
+ if (
+ self.sym['Method'] == 'FisherJenks'
+ or self.sym['Method'] == 'EqualInterval'
+ ):
+ self.no_clases_line.setText(str(self.sym['kw']['k']))
else:
self.no_clases_line.clear()
self.no_clases_line.setEnabled(False)
self.updateTable()
- #self.updateLegendSample()
-
- def addByButton(self, add_location):
- to_be_added_row = None
+ # self.updateLegendSample()
+
+ def addByButton(self, add_location): # noqa: N802, D102
+ to_be_added_row = None # noqa: F841
selected_item_list = self.range_table.selectedItems()
if len(selected_item_list) == 0:
return
- else:
+ else: # noqa: RET505
selected_row = selected_item_list[0].row()
-
- if add_location == "UP":
- old_value = float(self.range_table.item(selected_row, 0).text() )
+
+ if add_location == 'UP':
+ old_value = float(self.range_table.item(selected_row, 0).text())
if selected_row == 0:
- other_side_value = float(self.range_table.item(selected_row, 1).text() )
+ other_side_value = float(
+ self.range_table.item(selected_row, 1).text()
+ )
else:
- other_side_value = float(self.range_table.item(selected_row-1, 0).text() )
- elif add_location == "DOWN":
- old_value = float(self.range_table.item(selected_row, 1).text() )
- if selected_row == (self.range_table.rowCount()-1):
- other_side_value = float(self.range_table.item(selected_row, 1).text() )
+ other_side_value = float(
+ self.range_table.item(selected_row - 1, 0).text()
+ )
+ elif add_location == 'DOWN':
+ old_value = float(self.range_table.item(selected_row, 1).text())
+ if selected_row == (self.range_table.rowCount() - 1):
+ other_side_value = float(
+ self.range_table.item(selected_row, 1).text()
+ )
else:
- other_side_value = float(self.range_table.item(selected_row+1, 1).text() )
-
+ other_side_value = float(
+ self.range_table.item(selected_row + 1, 1).text()
+ )
+
bins = self.class_data.bins.tolist()
- #bins[bins==old_value] = new_item_value
- bins.append((old_value+other_side_value)/2)
+ # bins[bins==old_value] = new_item_value
+ bins.append((old_value + other_side_value) / 2)
bins.sort()
- kw = {"bins":bins}
+ kw = {'bins': bins}
self.bins = bins
- self.sym["kw"] = kw
-
- if self.sym["Method"] != "UserDefined":
- self.method_combo.blockSignals(True)
- self.sym["Method"] = "UserDefined"
+ self.sym['kw'] = kw
+
+ if self.sym['Method'] != 'UserDefined':
+ self.method_combo.blockSignals(True) # noqa: FBT003
+ self.sym['Method'] = 'UserDefined'
self.no_clases_line.setEnabled(False)
- self.method_combo.setCurrentText("User Defined")
- self.method_combo.blockSignals(False)
+ self.method_combo.setCurrentText('User Defined')
+ self.method_combo.blockSignals(False) # noqa: FBT003
self.updateTable()
-
- def numberOfClassEditingFinished(self):
+
+ def numberOfClassEditingFinished(self): # noqa: N802, D102
k = float(self.no_clases_line.text())
k = int(k)
- kw = {'k':k}
- self.sym["kw"] = kw
+ kw = {'k': k}
+ self.sym['kw'] = kw
self.updateTable()
-
- def colorChanged(self, text):
- self.sym["Color"] = text
+
+ def colorChanged(self, text): # noqa: N802, D102
+ self.sym['Color'] = text
self.updateLegendSample()
-
- def updateLegendSample(self):
- fig, ax = plt.subplots()
- self.plotted_map.plot(ax=ax, cax=self.ax1, column=self.val_column, cmap=self.sym["Color"], legend=True)
- self.legend_widget.draw()
- #self.mpl_map.canvas.fig.tight_layout()
-
- def updateTable(self):
- self.range_table.blockSignals(True)
+
+ def updateLegendSample(self): # noqa: N802, D102
+ fig, ax = plt.subplots() # noqa: F841
+ self.plotted_map.plot(
+ ax=ax,
+ cax=self.ax1,
+ column=self.val_column,
+ cmap=self.sym['Color'],
+ legend=True,
+ )
+ self.legend_widget.draw()
+ # self.mpl_map.canvas.fig.tight_layout()
+
+ def updateTable(self): # noqa: N802, D102
+ self.range_table.blockSignals(True) # noqa: FBT003
self.clearRangeTable()
- if self.sym["Method"] == "FisherJenks":
- self.class_data = mapclassify.FisherJenks(self.data, self.sym["kw"]["k"])
- elif self.sym["Method"] == "EqualInterval":
- self.class_data = mapclassify.EqualInterval(self.data, self.sym["kw"]["k"])
- elif self.sym["Method"] == "UserDefined":
- self.class_data = mapclassify.UserDefined(self.data, self.sym["kw"]["bins"])
+ if self.sym['Method'] == 'FisherJenks':
+ self.class_data = mapclassify.FisherJenks(self.data, self.sym['kw']['k'])
+ elif self.sym['Method'] == 'EqualInterval':
+ self.class_data = mapclassify.EqualInterval(
+ self.data, self.sym['kw']['k']
+ )
+ elif self.sym['Method'] == 'UserDefined':
+ self.class_data = mapclassify.UserDefined(
+ self.data, self.sym['kw']['bins']
+ )
else:
- raise ValueError("Unknown symbology method: "+repr(self.sym["Method"]) )
+ raise ValueError('Unknown symbology method: ' + repr(self.sym['Method']))
min_val = self.data.min()
max_val = self.data.max()
-
+
bins = [min_val]
bins.extend(self.class_data.bins.tolist())
bins.append(max_val)
-
+
bins = pd.Series(bins)
bins = bins.unique()
bins = bins.tolist()
-
- for i in range(len(bins)-1 ):
+
+ for i in range(len(bins) - 1):
number_of_rows = self.range_table.rowCount()
self.range_table.insertRow(number_of_rows)
- beg_item = QtWidgets.QTableWidgetItem(str(bins[i]) )
- end_item = QtWidgets.QTableWidgetItem(str(bins[i+1]) )
- count_item = QtWidgets.QTableWidgetItem(str(self.class_data.counts[i]) )
-
-
+ beg_item = QtWidgets.QTableWidgetItem(str(bins[i]))
+ end_item = QtWidgets.QTableWidgetItem(str(bins[i + 1]))
+ count_item = QtWidgets.QTableWidgetItem(str(self.class_data.counts[i]))
+
if i == 0:
beg_item.setFlags(QtCore.Qt.NoItemFlags)
-
- if i == len(bins)-2:
+
+ if i == len(bins) - 2:
end_item.setFlags(QtCore.Qt.NoItemFlags)
-
+
count_item.setFlags(QtCore.Qt.NoItemFlags)
-
- self.range_table.setItem(number_of_rows, 0, beg_item )
- self.range_table.setItem(number_of_rows, 1, end_item )
- self.range_table.setItem(number_of_rows, 2, count_item )
-
- self.range_table.blockSignals(False)
+
+ self.range_table.setItem(number_of_rows, 0, beg_item)
+ self.range_table.setItem(number_of_rows, 1, end_item)
+ self.range_table.setItem(number_of_rows, 2, count_item)
+
+ self.range_table.blockSignals(False) # noqa: FBT003
self.updateLegendSample()
-
- def clearRangeTable(self):
- for i in range(self.range_table.rowCount()):
+
+ def clearRangeTable(self): # noqa: N802, D102
+ for i in range(self.range_table.rowCount()): # noqa: B007
self.range_table.removeRow(0)
-
- def methodChanged(self, text):
- print(text)
- if text == "FisherJenks":
- self.sym["Method"] = "FisherJenks"
- elif text == "Equal Interval":
- self.sym["Method"] = "EqualInterval"
- elif text == "User Defined":
- self.sym["Method"] = "UserDefined"
-
- if text == "FisherJenks" or text == "Equal Interval":
+
+ def methodChanged(self, text): # noqa: N802, D102
+ print(text) # noqa: T201
+ if text == 'FisherJenks':
+ self.sym['Method'] = 'FisherJenks'
+ elif text == 'Equal Interval':
+ self.sym['Method'] = 'EqualInterval'
+ elif text == 'User Defined':
+ self.sym['Method'] = 'UserDefined'
+
+ if text == 'FisherJenks' or text == 'Equal Interval': # noqa: PLR1714
k = float(self.no_clases_line.text())
k = int(k)
- kw = {'k':k}
- elif text == "User Defined":
+ kw = {'k': k}
+ elif text == 'User Defined':
self.no_clases_line.setEnabled(False)
- #bins = self.getUserDefinedBins()
+ # bins = self.getUserDefinedBins()
try:
- kw = {'bins':self.bins}
- except:
- kw = {'bins':self.class_data}
+ kw = {'bins': self.bins}
+ except: # noqa: E722
+ kw = {'bins': self.class_data}
else:
- raise
-
- self.sym["kw"] = kw
+ raise # noqa: PLE0704
+
+ self.sym['kw'] = kw
self.updateTable()
-
- def currentItemChanged(self, current, previous):
- if current != None:
+
+ def currentItemChanged(self, current, previous): # noqa: ARG002, N802, D102
+ if current != None: # noqa: E711
self.current_item_value = float(current.text())
- print("cur "+repr(self.current_item_value) )
-
- def tableDataChanged(self, item):
- #row = item.row()
- #col = item.column()
-
- #item_text = self.range_table.item(row, col).text()
+ print('cur ' + repr(self.current_item_value)) # noqa: T201
+
+ def tableDataChanged(self, item): # noqa: N802, D102
+ # row = item.row()
+ # col = item.column()
+
+ # item_text = self.range_table.item(row, col).text()
previous_item_value = float(self.current_item_value)
try:
- new_item_value = float(item.text() )
+ new_item_value = float(item.text())
if new_item_value < self.data.min() or new_item_value > self.data.max():
- raise
- except:
- self.range_table.item(item.row(),item.column() ).setText(str(previous_item_value) )
+ raise # noqa: PLE0704
+ except: # noqa: E722
+ self.range_table.item(item.row(), item.column()).setText(
+ str(previous_item_value)
+ )
return
-
-
-
+
bins = self.class_data.bins
- bins[bins==previous_item_value] = new_item_value
+ bins[bins == previous_item_value] = new_item_value
bins.sort()
- kw = {"bins":bins}
+ kw = {'bins': bins}
self.bins = bins
- self.sym["kw"] = kw
-
- if self.sym["Method"] != "UserDefined":
- self.method_combo.blockSignals(True)
- self.sym["Method"] = "UserDefined"
+ self.sym['kw'] = kw
+
+ if self.sym['Method'] != 'UserDefined':
+ self.method_combo.blockSignals(True) # noqa: FBT003
+ self.sym['Method'] = 'UserDefined'
self.no_clases_line.setEnabled(False)
- self.method_combo.setCurrentText("User Defined")
- self.method_combo.blockSignals(False)
+ self.method_combo.setCurrentText('User Defined')
+ self.method_combo.blockSignals(False) # noqa: FBT003
self.updateTable()
-
+
return
-
-
- def findBeginingRowFor(self, value):
+
+ def findBeginingRowFor(self, value): # noqa: N802, D102
if self.range_table.rowCount() == 0:
- raise
-
- for i in range(self.range_table.rowCount() -1):
- current_item_value = float(self.range_table.item(i, 0).text() )
- next_item_value = float(self.range_table.item(i+1, 0).text() )
+ raise # noqa: PLE0704
+
+ for i in range(self.range_table.rowCount() - 1):
+ current_item_value = float(self.range_table.item(i, 0).text())
+ next_item_value = float(self.range_table.item(i + 1, 0).text())
if value >= current_item_value and next_item_value < current_item_value:
return i
- return self.range_table.rowCount()-1
-
- def findEndingRowFor(self, value):
+ return self.range_table.rowCount() - 1
+
+ def findEndingRowFor(self, value): # noqa: N802, D102
if self.range_table.rowCount() == 0:
- raise
-
- for i in range(self.range_table.rowCount() -1):
- current_item_value = float(self.range_table.item(i, 1).text() )
- next_item_value = float(self.range_table.item(i+1, 1).text() )
+ raise # noqa: PLE0704
+
+ for i in range(self.range_table.rowCount() - 1):
+ current_item_value = float(self.range_table.item(i, 1).text())
+ next_item_value = float(self.range_table.item(i + 1, 1).text())
if value > current_item_value and next_item_value >= current_item_value:
- return i+1
- return self.range_table.rowCount()-1
-
- def removeButtonClicked(self):
+ return i + 1
+ return self.range_table.rowCount() - 1
+
+ def removeButtonClicked(self): # noqa: N802, D102
selected_item_list = self.range_table.selectedItems()
if len(selected_item_list) == 0:
return
selected_row = selected_item_list[0].row()
self.removeRow(selected_row)
-
- def removeRow(self, row):
- if row == 0 and self.range_table.rowCount() >= 2:
+ def removeRow(self, row): # noqa: N802, D102
+ if row == 0 and self.range_table.rowCount() >= 2: # noqa: PLR2004
item_text = self.range_table.item(row, 0).text()
self.range_table.removeRow(0)
- self.range_table.item(0,0).setText(item_text)
- elif row == self.range_table.rowCount()-1 and self.range_table.rowCount() >= 2:
+ self.range_table.item(0, 0).setText(item_text)
+ elif (
+ row == self.range_table.rowCount() - 1
+ and self.range_table.rowCount() >= 2 # noqa: PLR2004
+ ):
item_text = self.range_table.item(row, 1).text()
self.range_table.removeRow(row)
- self.range_table.item(row-1,1).setText(item_text)
+ self.range_table.item(row - 1, 1).setText(item_text)
elif self.range_table.rowCount() == 1:
self.range_table.removeRow(0)
else:
beg_text = self.range_table.item(row, 0).text()
end_text = self.range_table.item(row, 1).text()
self.range_table.removeRow(row)
- self.range_table.item(row-1,1).setText(beg_text)
- self.range_table.item(row,0).setText(end_text)
-
-
-
-if __name__ == "__main__":
- symbology = {"Method":"FisherJenks", "kw":{"k":5}}
- s = gpd.read_file("ss2.shp" )
- print(s.columns)
+ self.range_table.item(row - 1, 1).setText(beg_text)
+ self.range_table.item(row, 0).setText(end_text)
+
+
+if __name__ == '__main__':
+ symbology = {'Method': 'FisherJenks', 'kw': {'k': 5}}
+ s = gpd.read_file('ss2.shp')
+ print(s.columns) # noqa: T201
app = QtWidgets.QApplication(sys.argv)
- ss = Symbology_Designer(symbology, s["restoratio"])
- ss._window.show()
+ ss = Symbology_Designer(symbology, s['restoratio'])
+ ss._window.show() # noqa: SLF001
sys.exit(app.exec_())
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.py b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.py
index 9467f3f23..3cffc2c21 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.py
@@ -1,29 +1,31 @@
-# -*- coding: utf-8 -*-
-
-# Form implementation generated from reading ui file 'Symbology_Window.ui'
+# Form implementation generated from reading ui file 'Symbology_Window.ui' # noqa: CPY001, D100, N999
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
-from PyQt5 import QtCore, QtGui, QtWidgets
+from PyQt5 import QtCore, QtWidgets
-class Ui_Symbology_Dialog(object):
- def setupUi(self, Symbology_Dialog):
- Symbology_Dialog.setObjectName("Symbology_Dialog")
+class Ui_Symbology_Dialog: # noqa: D101
+ def setupUi(self, Symbology_Dialog): # noqa: N802, N803, D102
+ Symbology_Dialog.setObjectName('Symbology_Dialog')
Symbology_Dialog.resize(491, 410)
self.buttonBox = QtWidgets.QDialogButtonBox(Symbology_Dialog)
self.buttonBox.setGeometry(QtCore.QRect(180, 360, 161, 31))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
- self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
- self.buttonBox.setObjectName("buttonBox")
+ self.buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok
+ )
+ self.buttonBox.setObjectName('buttonBox')
self.range_table = QtWidgets.QTableWidget(Symbology_Dialog)
self.range_table.setGeometry(QtCore.QRect(30, 80, 311, 261))
- self.range_table.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
+ self.range_table.setSelectionMode(
+ QtWidgets.QAbstractItemView.SingleSelection
+ )
self.range_table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
- self.range_table.setObjectName("range_table")
+ self.range_table.setObjectName('range_table')
self.range_table.setColumnCount(3)
self.range_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
@@ -35,101 +37,108 @@ def setupUi(self, Symbology_Dialog):
self.range_table.horizontalHeader().setStretchLastSection(True)
self.method_combo = QtWidgets.QComboBox(Symbology_Dialog)
self.method_combo.setGeometry(QtCore.QRect(30, 40, 111, 22))
- self.method_combo.setObjectName("method_combo")
- self.method_combo.addItem("")
- self.method_combo.addItem("")
- self.method_combo.addItem("")
+ self.method_combo.setObjectName('method_combo')
+ self.method_combo.addItem('')
+ self.method_combo.addItem('')
+ self.method_combo.addItem('')
self.label = QtWidgets.QLabel(Symbology_Dialog)
self.label.setGeometry(QtCore.QRect(30, 20, 47, 13))
- self.label.setObjectName("label")
+ self.label.setObjectName('label')
self.add_below_button = QtWidgets.QPushButton(Symbology_Dialog)
self.add_below_button.setGeometry(QtCore.QRect(350, 110, 61, 23))
- self.add_below_button.setObjectName("add_below_button")
+ self.add_below_button.setObjectName('add_below_button')
self.remove_button = QtWidgets.QPushButton(Symbology_Dialog)
self.remove_button.setGeometry(QtCore.QRect(350, 140, 61, 23))
- self.remove_button.setObjectName("remove_button")
+ self.remove_button.setObjectName('remove_button')
self.no_clases_line = QtWidgets.QLineEdit(Symbology_Dialog)
self.no_clases_line.setGeometry(QtCore.QRect(220, 40, 61, 20))
- self.no_clases_line.setObjectName("no_clases_line")
+ self.no_clases_line.setObjectName('no_clases_line')
self.label_2 = QtWidgets.QLabel(Symbology_Dialog)
self.label_2.setGeometry(QtCore.QRect(220, 20, 61, 16))
- self.label_2.setObjectName("label_2")
+ self.label_2.setObjectName('label_2')
self.sample_legend_widget = QtWidgets.QWidget(Symbology_Dialog)
self.sample_legend_widget.setGeometry(QtCore.QRect(350, 220, 131, 111))
- self.sample_legend_widget.setObjectName("sample_legend_widget")
+ self.sample_legend_widget.setObjectName('sample_legend_widget')
self.color_combo = QtWidgets.QComboBox(Symbology_Dialog)
self.color_combo.setGeometry(QtCore.QRect(350, 190, 91, 22))
- self.color_combo.setObjectName("color_combo")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
- self.color_combo.addItem("")
+ self.color_combo.setObjectName('color_combo')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
+ self.color_combo.addItem('')
self.label_3 = QtWidgets.QLabel(Symbology_Dialog)
self.label_3.setGeometry(QtCore.QRect(350, 170, 81, 20))
- self.label_3.setObjectName("label_3")
+ self.label_3.setObjectName('label_3')
self.add_up_button = QtWidgets.QPushButton(Symbology_Dialog)
self.add_up_button.setGeometry(QtCore.QRect(350, 80, 61, 23))
- self.add_up_button.setObjectName("add_up_button")
+ self.add_up_button.setObjectName('add_up_button')
self.retranslateUi(Symbology_Dialog)
self.buttonBox.accepted.connect(Symbology_Dialog.accept)
self.buttonBox.rejected.connect(Symbology_Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Symbology_Dialog)
- def retranslateUi(self, Symbology_Dialog):
+ def retranslateUi(self, Symbology_Dialog): # noqa: N802, N803, D102
_translate = QtCore.QCoreApplication.translate
- Symbology_Dialog.setWindowTitle(_translate("Symbology_Dialog", "Dialog"))
+ Symbology_Dialog.setWindowTitle(_translate('Symbology_Dialog', 'Dialog'))
item = self.range_table.horizontalHeaderItem(0)
- item.setText(_translate("Symbology_Dialog", "Begining"))
+ item.setText(_translate('Symbology_Dialog', 'Begining'))
item = self.range_table.horizontalHeaderItem(1)
- item.setText(_translate("Symbology_Dialog", "End"))
+ item.setText(_translate('Symbology_Dialog', 'End'))
item = self.range_table.horizontalHeaderItem(2)
- item.setText(_translate("Symbology_Dialog", "Counts"))
- self.method_combo.setItemText(0, _translate("Symbology_Dialog", "FisherJenks"))
- self.method_combo.setItemText(1, _translate("Symbology_Dialog", "Equal Interval"))
- self.method_combo.setItemText(2, _translate("Symbology_Dialog", "User Defined"))
- self.label.setText(_translate("Symbology_Dialog", "Method"))
- self.add_below_button.setText(_translate("Symbology_Dialog", "Add-Below"))
- self.remove_button.setText(_translate("Symbology_Dialog", "Remove"))
- self.label_2.setText(_translate("Symbology_Dialog", "No. of clasess"))
- self.color_combo.setItemText(0, _translate("Symbology_Dialog", "Blues"))
- self.color_combo.setItemText(1, _translate("Symbology_Dialog", "Greens"))
- self.color_combo.setItemText(2, _translate("Symbology_Dialog", "Greys"))
- self.color_combo.setItemText(3, _translate("Symbology_Dialog", "Reds"))
- self.color_combo.setItemText(4, _translate("Symbology_Dialog", "Oranges"))
- self.color_combo.setItemText(5, _translate("Symbology_Dialog", "binary"))
- self.color_combo.setItemText(6, _translate("Symbology_Dialog", "gist_grey"))
- self.color_combo.setItemText(7, _translate("Symbology_Dialog", "Seismic"))
- self.color_combo.setItemText(8, _translate("Symbology_Dialog", "hsv"))
- self.color_combo.setItemText(9, _translate("Symbology_Dialog", "Pastel1"))
- self.color_combo.setItemText(10, _translate("Symbology_Dialog", "Pastel2"))
- self.color_combo.setItemText(11, _translate("Symbology_Dialog", "Set1"))
- self.color_combo.setItemText(12, _translate("Symbology_Dialog", "Set2"))
- self.color_combo.setItemText(13, _translate("Symbology_Dialog", "Set3"))
- self.color_combo.setItemText(14, _translate("Symbology_Dialog", "tab10"))
- self.color_combo.setItemText(15, _translate("Symbology_Dialog", "tab20"))
- self.color_combo.setItemText(16, _translate("Symbology_Dialog", "tab20b"))
- self.color_combo.setItemText(17, _translate("Symbology_Dialog", "tab20c"))
- self.label_3.setText(_translate("Symbology_Dialog", "Color Scheme"))
- self.add_up_button.setText(_translate("Symbology_Dialog", "Add-Up"))
+ item.setText(_translate('Symbology_Dialog', 'Counts'))
+ self.method_combo.setItemText(
+ 0, _translate('Symbology_Dialog', 'FisherJenks')
+ )
+ self.method_combo.setItemText(
+ 1, _translate('Symbology_Dialog', 'Equal Interval')
+ )
+ self.method_combo.setItemText(
+ 2, _translate('Symbology_Dialog', 'User Defined')
+ )
+ self.label.setText(_translate('Symbology_Dialog', 'Method'))
+ self.add_below_button.setText(_translate('Symbology_Dialog', 'Add-Below'))
+ self.remove_button.setText(_translate('Symbology_Dialog', 'Remove'))
+ self.label_2.setText(_translate('Symbology_Dialog', 'No. of clasess'))
+ self.color_combo.setItemText(0, _translate('Symbology_Dialog', 'Blues'))
+ self.color_combo.setItemText(1, _translate('Symbology_Dialog', 'Greens'))
+ self.color_combo.setItemText(2, _translate('Symbology_Dialog', 'Greys'))
+ self.color_combo.setItemText(3, _translate('Symbology_Dialog', 'Reds'))
+ self.color_combo.setItemText(4, _translate('Symbology_Dialog', 'Oranges'))
+ self.color_combo.setItemText(5, _translate('Symbology_Dialog', 'binary'))
+ self.color_combo.setItemText(6, _translate('Symbology_Dialog', 'gist_grey'))
+ self.color_combo.setItemText(7, _translate('Symbology_Dialog', 'Seismic'))
+ self.color_combo.setItemText(8, _translate('Symbology_Dialog', 'hsv'))
+ self.color_combo.setItemText(9, _translate('Symbology_Dialog', 'Pastel1'))
+ self.color_combo.setItemText(10, _translate('Symbology_Dialog', 'Pastel2'))
+ self.color_combo.setItemText(11, _translate('Symbology_Dialog', 'Set1'))
+ self.color_combo.setItemText(12, _translate('Symbology_Dialog', 'Set2'))
+ self.color_combo.setItemText(13, _translate('Symbology_Dialog', 'Set3'))
+ self.color_combo.setItemText(14, _translate('Symbology_Dialog', 'tab10'))
+ self.color_combo.setItemText(15, _translate('Symbology_Dialog', 'tab20'))
+ self.color_combo.setItemText(16, _translate('Symbology_Dialog', 'tab20b'))
+ self.color_combo.setItemText(17, _translate('Symbology_Dialog', 'tab20c'))
+ self.label_3.setText(_translate('Symbology_Dialog', 'Color Scheme'))
+ self.add_up_button.setText(_translate('Symbology_Dialog', 'Add-Up'))
-if __name__ == "__main__":
+if __name__ == '__main__':
import sys
+
app = QtWidgets.QApplication(sys.argv)
Symbology_Dialog = QtWidgets.QDialog()
ui = Ui_Symbology_Dialog()
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.ui b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.ui
index 89699d911..d6cbf10b3 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.ui
+++ b/modules/systemPerformance/REWET/REWET/GUI/Symbology_Window.ui
@@ -49,7 +49,7 @@
- Begining
+ Beginning
diff --git a/modules/systemPerformance/REWET/REWET/GUI/Tank_Damage_Discovery_Designer.py b/modules/systemPerformance/REWET/REWET/GUI/Tank_Damage_Discovery_Designer.py
index 8e68d9480..9857e1baa 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/Tank_Damage_Discovery_Designer.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/Tank_Damage_Discovery_Designer.py
@@ -1,14 +1,13 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Nov 1 23:25:30 2022
+"""Created on Tue Nov 1 23:25:30 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
from .Damage_Discovery_Designer import Damage_Discovery_Designer
-class Tank_Damage_Discovery_Designer(Damage_Discovery_Designer):
+
+class Tank_Damage_Discovery_Designer(Damage_Discovery_Designer): # noqa: D101
def __init__(self, tank_damage_discovery_model):
super().__init__(tank_damage_discovery_model)
- self._window.setWindowTitle("Tank Damage Discovery")
- self.leak_based_radio.setEnabled(False)
\ No newline at end of file
+ self._window.setWindowTitle('Tank Damage Discovery')
+ self.leak_based_radio.setEnabled(False)
diff --git a/modules/systemPerformance/REWET/REWET/GUI/__init__.py b/modules/systemPerformance/REWET/REWET/GUI/__init__.py
index e69de29bb..b5142d879 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/__init__.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/__init__.py
@@ -0,0 +1 @@
+# noqa: CPY001, D104, N999
diff --git a/modules/systemPerformance/REWET/REWET/GUI/resources/REWET_Resource_rc.py b/modules/systemPerformance/REWET/REWET/GUI/resources/REWET_Resource_rc.py
index 681ceff55..cfc969ea2 100644
--- a/modules/systemPerformance/REWET/REWET/GUI/resources/REWET_Resource_rc.py
+++ b/modules/systemPerformance/REWET/REWET/GUI/resources/REWET_Resource_rc.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-
-# Resource object code
+# Resource object code # noqa: CPY001, D100, INP001
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.12.9)
#
@@ -8,7 +6,7 @@
from PyQt5 import QtCore
-qt_resource_data = b"\
+qt_resource_data = b'\
\x00\x00\x07\x42\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
@@ -631,9 +629,9 @@
\x11\x16\x69\x3e\xc2\x22\xcd\x47\x48\x64\x32\xbf\x00\x43\x56\xb8\
\x32\x46\xab\x27\xb2\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
-"
+'
-qt_resource_name = b"\
+qt_resource_name = b'\
\x00\x11\
\x08\x7a\x74\xd5\
\x00\x6e\
@@ -672,9 +670,9 @@
\x0b\x3a\xfc\x07\
\x00\x6e\
\x00\x6f\x00\x64\x00\x65\x00\x5f\x00\x45\x00\x2e\x00\x70\x00\x6e\x00\x67\
-"
+'
-qt_resource_struct_v1 = b"\
+qt_resource_struct_v1 = b'\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00\x02\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x83\
@@ -685,9 +683,9 @@
\x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x07\x46\
\x00\x00\x00\xaa\x00\x00\x00\x00\x00\x01\x00\x00\x16\x86\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
-"
+'
-qt_resource_struct_v2 = b"\
+qt_resource_struct_v2 = b'\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x08\x00\x00\x00\x02\
@@ -708,7 +706,7 @@
\x00\x00\x01\x84\x35\xa0\xb2\x82\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x84\x35\xae\x2b\x19\
-"
+'
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
@@ -718,10 +716,17 @@
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
-def qInitResources():
- QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
-def qCleanupResources():
- QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
+def qInitResources(): # noqa: N802, D103
+ QtCore.qRegisterResourceData(
+ rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
+ )
+
+
+def qCleanupResources(): # noqa: N802, D103
+ QtCore.qUnregisterResourceData(
+ rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
+ )
+
qInitResources()
diff --git a/modules/systemPerformance/REWET/REWET/Input/GUI_Input_Interface.py b/modules/systemPerformance/REWET/REWET/Input/GUI_Input_Interface.py
index bf1b4cd29..19028d8ae 100644
--- a/modules/systemPerformance/REWET/REWET/Input/GUI_Input_Interface.py
+++ b/modules/systemPerformance/REWET/REWET/Input/GUI_Input_Interface.py
@@ -1,18 +1,17 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Oct 10 15:04:40 2022
+"""Created on Mon Oct 10 15:04:40 2022
This is the input module, an interface between all inputs from GUI, TEXT-based
inputs and the mail code.
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-class input():
+
+class input: # noqa: A001, D101
def __init__(self, settings, registry):
pass
-
- def convertShiftFromDictToPandasTable(self, dict_data):
- shift_name_list = list(shift_data )
- shift_begining_list = [shift_data[i][0] for i in shift_name_list]
- shift_end_list = [shift_data[i][1] for i in shift_name_list]
+
+ def convertShiftFromDictToPandasTable(self, dict_data): # noqa: ARG002, D102, N802, PLR6301
+ shift_name_list = list(shift_data) # noqa: F821
+ shift_begining_list = [shift_data[i][0] for i in shift_name_list] # noqa: F821, F841
+ shift_end_list = [shift_data[i][1] for i in shift_name_list] # noqa: F821, F841
diff --git a/modules/systemPerformance/REWET/REWET/Input/Input_IO.py b/modules/systemPerformance/REWET/REWET/Input/Input_IO.py
index 6855b1b4c..fd24c597d 100644
--- a/modules/systemPerformance/REWET/REWET/Input/Input_IO.py
+++ b/modules/systemPerformance/REWET/REWET/Input/Input_IO.py
@@ -1,231 +1,272 @@
+import json # noqa: CPY001, D100, INP001
import os
+import pickle # noqa: S403
+
import pandas as pd
-import pickle
-import json
-##################### Read files From json #####################
+# Read files From json #####################
def read_pipe_damage_seperate_json_file(directory, pipe_file_name):
"""Read pipe damage of a single scenario.
Args:
- directory (path): damage scnearios path
+ ----
+ directory (path): damage scenarios path
pipe_file_name (str): pipe damage file name
- Raises:
+ Raises
+ ------
ValueError: _description_
RuntimeError: _description_
- Returns:
- Pandas.Series: Pipe Damage
+ Returns
+ -------
+ Pandas.Series: Pipe Damage
+
"""
pipe_damaage = []
pipe_time = []
- file_dest = os.path.join(directory, pipe_file_name)
-
- with open(file_dest, "rt") as f:
+ file_dest = os.path.join(directory, pipe_file_name) # noqa: PTH118
+
+ with open(file_dest) as f: # noqa: PLW1514, PTH123
read_file = json.load(f)
if not isinstance(read_file, list):
- raise ValueError("Wrong inpout in PIPE damage file")
-
+ raise ValueError('Wrong input in PIPE damage file') # noqa: DOC501, EM101, TRY003, TRY004
+
for each_damage in read_file:
- pipe_time.append(each_damage.get("time") )
-
- cur_damage = {"pipe_ID": each_damage.get("Pipe_ID"),
- "damage_loc": each_damage.get("Loc"),
- "type": each_damage.get("Type"),
- "Material": each_damage.get("Material")
+ pipe_time.append(each_damage.get('time'))
+
+ cur_damage = {
+ 'pipe_ID': each_damage.get('Pipe_ID'),
+ 'damage_loc': each_damage.get('Loc'),
+ 'type': each_damage.get('Type'),
+ 'Material': each_damage.get('Material'),
}
pipe_damaage.append(cur_damage)
- return pd.Series(index = pipe_time, data = pipe_damaage)
-
+ return pd.Series(index=pipe_time, data=pipe_damaage)
+
+
def read_node_damage_seperate_json_file(directory, node_file_name):
"""Read node damage of a single scenario.
Args:
- directory (path): damage scnearios path
+ ----
+ directory (path): damage scenarios path
pipe_file_name (str): node damage file name
- Raises:
+ Raises
+ ------
ValueError: _description_
RuntimeError: _description_
- Returns:
- Pandas.Series: node Damage
+ Returns
+ -------
+ Pandas.Series: node Damage
+
"""
node_damage = []
node_time = []
- file_dest = os.path.join(directory, node_file_name)
-
- with open(file_dest, "rt") as f:
+ file_dest = os.path.join(directory, node_file_name) # noqa: PTH118
+
+ with open(file_dest) as f: # noqa: PLW1514, PTH123
read_file = json.load(f)
if not isinstance(read_file, list):
- raise ValueError("Wrong inpout in NODE damage file")
-
+ raise ValueError('Wrong input in NODE damage file') # noqa: DOC501, EM101, TRY003, TRY004
+
for each_damage in read_file:
- node_time.append(each_damage.get("time") )
-
- cur_damage = {"node_name": each_damage.get("Node_ID"),
- "Number_of_damages": each_damage.get("Number_of_Damages"),
- "node_Pipe_Length": each_damage.get("Pipe Length")
+ node_time.append(each_damage.get('time'))
+
+ cur_damage = {
+ 'node_name': each_damage.get('Node_ID'),
+ 'Number_of_damages': each_damage.get('Number_of_Damages'),
+ 'node_Pipe_Length': each_damage.get('Pipe Length'),
}
node_damage.append(cur_damage)
- return pd.Series(index = node_time, data = node_damage)
+ return pd.Series(index=node_time, data=node_damage)
+
def read_tank_damage_seperate_json_file(directory, tank_file_name):
"""Read tank damage of a single scenario.
Args:
- directory (path): tank scnearios path
+ ----
+ directory (path): tank scenarios path
pipe_file_name (str): tank damage file name
- Raises:
+ Raises
+ ------
ValueError: _description_
RuntimeError: _description_
- Returns:
- Pandas.Series: tank Damage
+ Returns
+ -------
+ Pandas.Series: tank Damage
+
"""
tank_damage = []
tank_time = []
- file_dest = os.path.join(directory, tank_file_name)
-
- with open(file_dest, "rt") as f:
+ file_dest = os.path.join(directory, tank_file_name) # noqa: PTH118
+
+ with open(file_dest) as f: # noqa: PLW1514, PTH123
read_file = json.load(f)
if not isinstance(read_file, list):
- raise ValueError("Wrong inpout in TANK damage file")
-
+ raise ValueError('Wrong input in TANK damage file') # noqa: DOC501, EM101, TRY003, TRY004
+
for each_damage in read_file:
- tank_time.append(each_damage.get("time") )
-
- cur_damage = {"Tank_ID": each_damage.get("Tank_ID"),
- "Restore_time": each_damage.get("Restore_time"),
+ tank_time.append(each_damage.get('time'))
+
+ cur_damage = {
+ 'Tank_ID': each_damage.get('Tank_ID'),
+ 'Restore_time': each_damage.get('Restore_time'),
}
tank_time.append(cur_damage)
- return pd.Series(index = tank_time, data = tank_damage)
+ return pd.Series(index=tank_time, data=tank_damage)
+
def read_pump_damage_seperate_json_file(directory, pump_file_name):
"""Read pump damage of a single scenario.
Args:
- directory (path): pump scnearios path
+ ----
+ directory (path): pump scenarios path
pipe_file_name (str): pump damage file name
- Raises:
+ Raises
+ ------
ValueError: _description_
RuntimeError: _description_
- Returns:
- Pandas.Series: pump Damage
+ Returns
+ -------
+ Pandas.Series: pump Damage
+
"""
pump_damage = []
pump_time = []
- file_dest = os.path.join(directory, pump_file_name)
-
- with open(file_dest, "rt") as f:
+ file_dest = os.path.join(directory, pump_file_name) # noqa: PTH118
+
+ with open(file_dest) as f: # noqa: PLW1514, PTH123
read_file = json.load(f)
if not isinstance(read_file, list):
- raise ValueError("Wrong inpout in PUMP damage file")
-
+ raise ValueError('Wrong input in PUMP damage file') # noqa: DOC501, EM101, TRY003, TRY004
+
for each_damage in read_file:
- pump_time.append(each_damage.get("time") )
-
- cur_damage = {"PUMP_ID": each_damage.get("Pump_ID"),
- "Restore_time": each_damage.get("Restore_time"),
+ pump_time.append(each_damage.get('time'))
+
+ cur_damage = {
+ 'PUMP_ID': each_damage.get('Pump_ID'),
+ 'Restore_time': each_damage.get('Restore_time'),
}
pump_time.append(cur_damage)
- return pd.Series(index = pump_time, data = pump_damage)
+ return pd.Series(index=pump_time, data=pump_damage)
+
+
+# Read files From Pickle #####################
+def read_pipe_damage_seperate_pickle_file(directory, all_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, all_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _all_damages = pickle.load(f) # noqa: S301
+
+ return _all_damages # noqa: RET504
+
+
+def read_node_damage_seperate_pickle_file(directory, all_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, all_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _node_damages = pickle.load(f) # noqa: S301
+
+ return _node_damages # noqa: RET504
-##################### Read files From Pickle #####################
-def read_pipe_damage_seperate_pickle_file(directory, all_damages_file_name):
- file_dest=os.path.join(directory, all_damages_file_name)
- with open(file_dest, 'rb') as f:
- _all_damages = pickle.load(f)
+def read_tank_damage_seperate_pickle_file(directory, tank_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, tank_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _tank_damages = pickle.load(f) # noqa: S301
- return _all_damages
+ return _tank_damages # noqa: RET504
-def read_node_damage_seperate_pickle_file(directory, all_damages_file_name):
- file_dest=os.path.join(directory, all_damages_file_name)
- with open(file_dest, 'rb') as f:
- _node_damages = pickle.load(f)
-
- return _node_damages
-def read_tank_damage_seperate_pickle_file(directory, tank_damages_file_name):
- file_dest=os.path.join(directory, tank_damages_file_name)
- with open(file_dest, 'rb') as f:
- _tank_damages = pickle.load(f)
+def read_pump_damage_seperate_pickle_file(directory, pump_damages_file_name): # noqa: D103
+ file_dest = os.path.join(directory, pump_damages_file_name) # noqa: PTH118
+ with open(file_dest, 'rb') as f: # noqa: PTH123
+ _pump_damages = pickle.load(f) # noqa: S301
- return _tank_damages
+ return _pump_damages # noqa: RET504
-def read_pump_damage_seperate_pickle_file(directory, pump_damages_file_name):
- file_dest=os.path.join(directory, pump_damages_file_name)
- with open(file_dest, 'rb') as f:
- _pump_damages = pickle.load(f)
- return _pump_damages
+# Read files From Excel #####################
-##################### Read files From Excel #####################
-def read_pipe_damage_seperate_EXCEL_file(directory, pipe_damages_file_name):
- ss=None
- file_dest=os.path.join(directory, pipe_damages_file_name)
- ss=pd.read_excel(file_dest)
- ss.sort_values(['pipe_id','time','damage_loc'],ascending=[True,True,False], ignore_index=True, inplace=True)
+def read_pipe_damage_seperate_EXCEL_file(directory, pipe_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, pipe_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.sort_values(
+ ['pipe_id', 'time', 'damage_loc'],
+ ascending=[True, True, False],
+ ignore_index=True,
+ inplace=True, # noqa: PD002
+ )
unique_time = ss.groupby(['pipe_id']).time.unique()
- if 1 in [0 if len(i)<=1 else 1 for i in unique_time]: # checks if there are any pipe id with more than two unqiue time values
- raise ValueError("All damage location for one pipe should happen at the same time")
- ss.set_index('time', inplace=True)
+ if 1 in [
+ 0 if len(i) <= 1 else 1 for i in unique_time
+ ]: # checks if there are any pipe id with more than two unique time values
+ raise ValueError( # noqa: TRY003
+ 'All damage location for one pipe should happen at the same time' # noqa: EM101
+ )
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.pipe_id = ss.pipe_id.astype(str)
return pd.Series(ss.to_dict('records'), index=ss.index)
-def read_node_damage_seperate_EXCEL_file(directory, node_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, node_damages_file_name)
- ss = pd.read_excel(file_dest)
- ss.set_index('time', inplace=True)
+
+def read_node_damage_seperate_EXCEL_file(directory, node_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, node_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.node_name = ss.node_name.astype(str)
return pd.Series(ss.to_dict('records'), index=ss.index)
-def read_tank_damage_seperate_EXCEL_file(directory, tank_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, tank_damages_file_name)
- ss = pd.read_excel(file_dest)
-# ss.set_index('Tank_ID', inplace=True)
- ss.set_index('time', inplace=True)
+
+def read_tank_damage_seperate_EXCEL_file(directory, tank_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, tank_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ # ss.set_index('Tank_ID', inplace=True)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.Tank_ID = ss.Tank_ID.astype(str)
- #ss = ss['Tank_ID']
+ # ss = ss['Tank_ID']
return ss
-def read_pump_damage_seperate_EXCEL_file(directory, pump_damages_file_name):
- ss = None
- file_dest = os.path.join(directory, pump_damages_file_name)
- ss = pd.read_excel(file_dest)
- ss.set_index('time', inplace=True)
+
+def read_pump_damage_seperate_EXCEL_file(directory, pump_damages_file_name): # noqa: N802, D103
+ ss = None
+ file_dest = os.path.join(directory, pump_damages_file_name) # noqa: PTH118
+ ss = pd.read_excel(file_dest)
+ ss.set_index('time', inplace=True) # noqa: PD002
ss.Pump_ID = ss.Pump_ID.astype(str)
return ss
-def read_damage_list(list_file_addr, file_directory, iCheck=False):
- """
- Reads damage sceanrio list.
+
+def read_damage_list(list_file_addr, file_directory, iCheck=False): # noqa: FBT002, ARG001, N803
+ """Reads damage scenario list.
Parameters
----------
@@ -246,53 +287,59 @@ def read_damage_list(list_file_addr, file_directory, iCheck=False):
damage_list : Pandas Dataframe
DESCRIPTION.
- """
- damage_list=None
- error_file_name=[]
+ """ # noqa: D401
+ damage_list = None
+ error_file_name = []
- with open(list_file_addr, 'rb') as f:
+ with open(list_file_addr, 'rb') as f: # noqa: PTH123
damage_list = pd.read_excel(f)
- iError=False
+ iError = False # noqa: N806
temp = damage_list['Pipe Damage'].tolist()
-
- if iCheck==False:
+
+ if iCheck == False: # noqa: E712
return damage_list
-
+
for file_name in temp:
- if not os.path.exists(file_name):
- iError=True
+ if not os.path.exists(file_name): # noqa: PTH110
+ iError = True # noqa: N806
error_file_name.append(file_name)
-
+
if iError:
- raise RuntimeError('The Follwoing files could not be found: '+repr(error_file_name))
+ raise RuntimeError(
+ 'The Following files could not be found: ' + repr(error_file_name)
+ )
return damage_list
-##################### Save Results #####################
-def save_single(settings, result, name, restoration_data):
+# Save Results #####################
+
+
+def save_single(settings, result, name, restoration_data): # noqa: D103
result_file_directory = settings.process['result_directory']
- #print(result_file_directory)
- result_name = name + '.res'
- settings_name = name + '.xlsx'
-
- file_dest = os.path.join(result_file_directory, result_name)
- print("Saving: "+str(file_dest))
- with open(file_dest, 'wb') as f:
+ # print(result_file_directory)
+ result_name = name + '.res'
+ settings_name = name + '.xlsx'
+
+ file_dest = os.path.join(result_file_directory, result_name) # noqa: PTH118
+ print('Saving: ' + str(file_dest)) # noqa: T201
+ with open(file_dest, 'wb') as f: # noqa: PTH123
pickle.dump(result, f)
-
-
- process_set = pd.Series(settings.process.settings)
+
+ process_set = pd.Series(settings.process.settings)
scenario_set = pd.Series(settings.scenario.settings)
- _set = pd.Series(process_set.to_list()+scenario_set.to_list(), index=process_set.index.to_list()+scenario_set.index.to_list())
- file_dest = os.path.join(result_file_directory, settings_name)
+ _set = pd.Series(
+ process_set.to_list() + scenario_set.to_list(),
+ index=process_set.index.to_list() + scenario_set.index.to_list(),
+ )
+ file_dest = os.path.join(result_file_directory, settings_name) # noqa: PTH118
_set.to_excel(file_dest)
-
+
if settings.process['dmg_rst_data_save']:
- #file_dest = os.path.join(result_file_directory, 'restoration_file.pkl')
- #rest_data_out = pd.DataFrame.from_dict(restoration_data)
- #rest_data_out.to_pickle(file_dest)
- file_dest = os.path.join(result_file_directory, name+'_registry.pkl')
- print("Saving: "+str(file_dest))
- with open(file_dest, 'wb') as f:
- pickle.dump(restoration_data, f)
\ No newline at end of file
+ # file_dest = os.path.join(result_file_directory, 'restoration_file.pkl')
+ # rest_data_out = pd.DataFrame.from_dict(restoration_data)
+ # rest_data_out.to_pickle(file_dest)
+ file_dest = os.path.join(result_file_directory, name + '_registry.pkl') # noqa: PTH118
+ print('Saving: ' + str(file_dest)) # noqa: T201
+ with open(file_dest, 'wb') as f: # noqa: PTH123
+ pickle.dump(restoration_data, f)
diff --git a/modules/systemPerformance/REWET/REWET/Input/Policy_IO.py b/modules/systemPerformance/REWET/REWET/Input/Policy_IO.py
index 055431b75..b1f309404 100644
--- a/modules/systemPerformance/REWET/REWET/Input/Policy_IO.py
+++ b/modules/systemPerformance/REWET/REWET/Input/Policy_IO.py
@@ -1,23 +1,21 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Dec 19 19:10:35 2020
+"""Created on Wed Dec 19 19:10:35 2020
This is the Restoration Policy Reader/Writtter Module.
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-import io
import logging
-import pandas as pd
from collections import OrderedDict
+import pandas as pd
-ELEMENTS = ['PIPE', 'DISTNODE', 'GNODE', 'TANK','PUMP', 'RESERVOIR']
+ELEMENTS = ['PIPE', 'DISTNODE', 'GNODE', 'TANK', 'PUMP', 'RESERVOIR']
logger = logging.getLogger(__name__)
-#the follwing function is borrowed from WNTR
+
+# the following function is borrowed from WNTR
def _split_line(line):
_vc = line.split(';', 1)
_cmnt = None
@@ -26,7 +24,7 @@ def _split_line(line):
pass
elif len(_vc) == 1:
_vals = _vc[0].split()
- elif _vc[0] == '':
+ elif _vc[0] == '': # noqa: PLC1901
_cmnt = _vc[1]
else:
_vals = _vc[0].split()
@@ -34,29 +32,29 @@ def _split_line(line):
return _vals, _cmnt
-class restoration_data():
+class restoration_data: # noqa: D101
def __init__(self):
- self.files = {}
- self.shift = {}
- self.entity = {}
- self.entity_rule = {}
- self.sequence = {}
- self.agents = []
- self.group = {}
- self.priority = []
- self.jobs = []
- self.jobs_default = []
+ self.files = {}
+ self.shift = {}
+ self.entity = {}
+ self.entity_rule = {}
+ self.sequence = {}
+ self.agents = []
+ self.group = {}
+ self.priority = []
+ self.jobs = []
+ self.jobs_default = []
self.time_overwrite = {}
- self.final_method = {}
- self.once = {}
-
+ self.final_method = {}
+ self.once = {}
+
for el in ELEMENTS:
self.group[el] = OrderedDict()
-class RestorationIO():
+
+class RestorationIO: # noqa: D101
def __init__(self, definition_file_name):
- """
- Needs a file that contains:
+ """Needs a file that contains:
Parameters
----------
@@ -69,55 +67,68 @@ def __init__(self, definition_file_name):
-------
None.
- """
-
- #some of the following lines have been addopted from WNTR
+ """ # noqa: D400
+ # some of the following lines have been adopted from WNTR
self.rm = restoration_data()
-
- self.crew_data={}
-
- expected_sections=['[FILES]','[ENTITIES]', '[JOBS]','[AGENTS]',
- '[GROUPS]','[PRIORITIES]', '[SHIFTS]',
- '[SEQUENCES]', '[DEFINE]', '[DAMAGE GROUPS]',
- '[EFFECTS]', '[CREWS]']
-
+
+ self.crew_data = {}
+
+ expected_sections = [
+ '[FILES]',
+ '[ENTITIES]',
+ '[JOBS]',
+ '[AGENTS]',
+ '[GROUPS]',
+ '[PRIORITIES]',
+ '[SHIFTS]',
+ '[SEQUENCES]',
+ '[DEFINE]',
+ '[DAMAGE GROUPS]',
+ '[EFFECTS]',
+ '[CREWS]',
+ ]
+
self.config_file_comment = []
- self.edata = []
-
+ self.edata = []
+
self.sections = OrderedDict()
for sec in expected_sections:
self.sections[sec] = []
-
+
section = None
lnum = 0
edata = {'fname': definition_file_name}
- with io.open(definition_file_name, 'r', encoding='utf-8') as f:
+ with open(definition_file_name, encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
edata['lnum'] = lnum
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.startswith('['):
+ elif line.startswith('['): # noqa: RET507
vals = line.split()
sec = vals[0].upper()
edata['sec'] = sec
if sec in expected_sections:
section = sec
continue
- else:
- raise RuntimeError('%(fname)s:%(lnum)d: Invalid section "%(sec)s"' % edata)
+ else: # noqa: RET507
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Invalid section "%(sec)s"' % edata
+ )
elif section is None and line.startswith(';'):
self.config_file_comment.append(line[1:])
continue
elif section is None:
- raise RuntimeError('%(fname)s:%(lnum)d: Non-comment outside of valid section!' % edata)
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Non-comment outside of valid section!'
+ % edata
+ )
# We have text, and we are in a section
self.sections[section].append((lnum, line))
-
# Parse each of the sections
self._read_files()
self._read_shifts()
@@ -128,79 +139,82 @@ def __init__(self, definition_file_name):
self._read_priorities()
self._read_jobs()
self._read_define()
- #self._read_config()
-
+ # self._read_config()
+
def _read_files(self):
edata = OrderedDict()
- self.file_name=[]
- self._file_data={}
+ self.file_name = []
+ self._file_data = {}
self._file_handle_address = {}
for lnum, line in self.sections['[FILES]']:
edata['lnum'] = lnum
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 2:
+ if len(words) != 2: # noqa: PLR2004
edata['key'] = words[0]
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
- file_handle = words[0]
- file_address = words[1]
-
+ raise RuntimeError(
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s'
+ % edata
+ )
+ file_handle = words[0]
+ file_address = words[1]
+
self._file_handle_address[file_handle] = file_address
-
+
for file_handle, file_address in self._file_handle_address.items():
self._file_data[file_handle] = self._read_each_file(file_address)
self.rm.files = self._file_data
-
+
def _read_each_file(self, file_address, method=0):
lnum = 0
- iTitle = True
+ iTitle = True # noqa: N806
data_temp = None
- if method==0:
+ if method == 0: # noqa: PLR1702
try:
- raise
- with io.open(file_address, 'r', encoding='utf-8') as f:
+ raise # noqa: PLE0704
+ with open(file_address, encoding='utf-8') as f: # noqa: PTH123
for line in f:
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.startswith(';'):
+ elif line.startswith(';'): # noqa: RET507
# comment
continue
else:
lnum += 1
vals = line.split()
- if iTitle == True:
- iTitle = False
+ if iTitle == True: # noqa: E712
+ iTitle = False # noqa: N806
data_temp = pd.DataFrame(columns=vals)
else:
- data_temp.loc[lnum-2] = vals
- except:
+ data_temp.loc[lnum - 2] = vals
+ except: # noqa: E722
data_temp = self._read_each_file(file_address, method=1)
- elif method==1:
+ elif method == 1:
data_temp = pd.read_csv(file_address)
else:
- raise ValueError('Uknown method: '+str(method))
+ raise ValueError('Uknown method: ' + str(method))
return data_temp
-
+
def _read_shifts(self):
-
- for lnum, line in self.sections['[SHIFTS]']:
- #edata['lnum'] = lnum
- words, comments = _split_line(line)
+ for lnum, line in self.sections['[SHIFTS]']: # noqa: B007
+ # edata['lnum'] = lnum
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 3:
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s')
- shift_name = words[0]
- shift_begining = int(words[1])*3600
- shift_ending = int(words[2])*3600
-
+ if len(words) != 3: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' # noqa: EM101
+ )
+ shift_name = words[0]
+ shift_begining = int(words[1]) * 3600
+ shift_ending = int(words[2]) * 3600
+
self.rm.shift[shift_name] = (shift_begining, shift_ending)
-
- def _read_entities(self):
- """
- Reads damage group definitions and updates the Restoration Model
+
+ def _read_entities(self): # noqa: C901
+ """Reads damage group definitions and updates the Restoration Model
object data.
Raises
@@ -209,563 +223,753 @@ def _read_entities(self):
If the number of damages are not right.
ValueError
If the input data is not correctly provided.
-
+
If the input data is not correctly provided.
-
+
Returns
-------
None.
- """
-
+ """ # noqa: D205, D401
# Entities is kept for legacy compatibility with the first version
- damage_group_data = self.sections.get('[ENTITIES]', self.sections.get('[Damage Group]'))
-
+ damage_group_data = self.sections.get(
+ '[ENTITIES]', self.sections.get('[Damage Group]')
+ )
+
for lnum, line in damage_group_data:
arg1 = None
arg2 = None
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 2 and len(words)!=4:
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s')
- entity_name = words[0]
- element = words[1].upper()
-
+ if len(words) != 2 and len(words) != 4: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' # noqa: EM101
+ )
+ entity_name = words[0]
+ element = words[1].upper()
+
if element not in ELEMENTS:
raise ValueError('Unknown element line number ' + str(lnum))
-
- #if entity_name in self.rm.entity:
- #raise ValueError('Entity already defined')
-
- if len(words) == 4:
+
+ # if entity_name in self.rm.entity:
+ # raise ValueError('Entity already defined')
+
+ if len(words) == 4: # noqa: PLR2004
arg1 = words[2]
arg2 = words[3]
-
- #if (element=='PIPE' and arg1 not in self.rm._registry._pipe_damage_table.columns and arg1!='FILE' and arg1!='NOT_IN_FILE') and (element=='DISTNODE' and arg1 not in self.rm._registry._node_damage_table.columns):
- #raise ValueError('Argument 1('+arg1+') is not recognized in line number: ' + str(lnum))
-
- if arg1 == None:
+
+ # if (element=='PIPE' and arg1 not in self.rm._registry._pipe_damage_table.columns and arg1!='FILE' and arg1!='NOT_IN_FILE') and (element=='DISTNODE' and arg1 not in self.rm._registry._node_damage_table.columns):
+ # raise ValueError('Argument 1('+arg1+') is not recognized in line number: ' + str(lnum))
+
+ if arg1 == None: # noqa: E711
self.rm.entity[entity_name] = element
- ent_rule = [('ALL',None, None)]
-
+ ent_rule = [('ALL', None, None)]
+
if entity_name not in self.rm.entity_rule:
self.rm.entity_rule[entity_name] = ent_rule
else:
self.rm.entity_rule[entity_name].append(ent_rule[0])
- #sina: take care of this in regisry opening
- #self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
-
- elif arg1=='FILE' or arg1=='NOT_IN_FILE':
- name_list=self.rm.files[arg2]['ElementID'].unique().tolist()
+ # sina: take care of this in registry opening
+ # self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
+
+ elif arg1 == 'FILE' or arg1 == 'NOT_IN_FILE': # noqa: PLR1714
+ name_list = self.rm.files[arg2]['ElementID'].unique().tolist()
ent_rule = [(arg1, None, name_list)]
self.rm.entity[entity_name] = element
-
+
if entity_name not in self.rm.entity_rule:
self.rm.entity_rule[entity_name] = ent_rule
- #self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
+ # self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
else:
self.rm.entity_rule[entity_name].append(ent_rule[0])
-
+
else:
-
if ':' in arg2:
split_arg = arg2.split(':')
-
- if len(split_arg)!=2:
- raise ValueError('There must be two parts: PART1:PART2. Now there are '+repr(len(split_arg)+' parts. Line number is '+repr(lnum)))
- if split_arg[0]=='':
- raise ValueError('The first part is Empty in line '+repr(lnum))
- if split_arg[1]=='':
- raise ValueError('The second part is Empty in line '+repr(lnum))
+
+ if len(split_arg) != 2: # noqa: PLR2004
+ raise ValueError(
+ 'There must be two parts: PART1:PART2. Now there are '
+ + repr(
+ len(split_arg)
+ + ' parts. Line number is '
+ + repr(lnum)
+ )
+ )
+ if split_arg[0] == '': # noqa: PLC1901
+ raise ValueError(
+ 'The first part is Empty in line ' + repr(lnum)
+ )
+ if split_arg[1] == '': # noqa: PLC1901
+ raise ValueError(
+ 'The second part is Empty in line ' + repr(lnum)
+ )
else:
- raise ValueError('There must be two parts as a conditio, separted with ":". Example: PART1:PART2 \nPart1 can be one of teh following: EQ, BG, LT, BG-EQ, and LT-EQ. Line number: '+repr(lnum))
-
+ raise ValueError(
+ 'There must be two parts as a condition, separated with ":". Example: PART1:PART2 \nPart1 can be one of the following: EQ, BG, LT, BG-EQ, and LT-EQ. Line number: '
+ + repr(lnum)
+ )
+
rest_of_args = arg2.split(':')
- arg2=rest_of_args[0]
- arg3=rest_of_args[1]
-
+ arg2 = rest_of_args[0]
+ arg3 = rest_of_args[1]
+
try:
temp_arg3 = float(arg3)
- except:
+ except: # noqa: E722
temp_arg3 = str(arg3)
-
- arg3=temp_arg3
+
+ arg3 = temp_arg3
ent_rule = [(arg1, arg2, arg3)]
if entity_name not in self.rm.entity:
-
self.rm.entity[entity_name] = element
self.rm.entity_rule[entity_name] = ent_rule
- #self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
+ # self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
else:
if self.rm.entity[entity_name] != element:
- raise ValueError('Element must not chanage in an added condition. Line '+str(lnum))
+ raise ValueError(
+ 'Element must not change in an added condition. Line '
+ + str(lnum)
+ )
self.rm.entity_rule[entity_name].append(ent_rule[0])
-
def _read_sequences(self):
- #sina: there is a part that you need to add in restroation init
- for lnum, line in self.sections['[SEQUENCES]']:
- words, comments = _split_line(line)
+ # sina: there is a part that you need to add in restroation init
+ for lnum, line in self.sections['[SEQUENCES]']: # noqa: B007
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- #if len(words) != 2 or len(words)!=4:
- #raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
- element = words[0].upper()
+ # if len(words) != 2 or len(words)!=4:
+ # raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ element = words[0].upper()
seq = []
for arg in words[1:]:
- seq.append(arg)
+ seq.append(arg) # noqa: PERF402
if element in self.rm.sequence:
- raise ValueError('Element already in sequences')
+ raise ValueError('Element already in sequences') # noqa: EM101, TRY003
if element not in ELEMENTS:
- raise ValueError("The Element " + repr(element) + " is not a recognized element")
+ raise ValueError(
+ 'The Element '
+ + repr(element)
+ + ' is not a recognized element'
+ )
self.rm.sequence[element] = seq
-
-
+
def _read_agents(self):
- agent_file_handle={}
- group_names = {}
+ agent_file_handle = {}
+ group_names = {}
group_column = {}
-
- crews_data = self.sections.get('[AGENTS]', self.sections.get('CREWS') )
- for lnum, line in crews_data:
- #edata['lnum'] = lnum
- words, comments = _split_line(line)
+
+ crews_data = self.sections.get('[AGENTS]', self.sections.get('CREWS'))
+ for lnum, line in crews_data: # noqa: B007
+ # edata['lnum'] = lnum
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- _group_name = None
+ _group_name = None
_group_column = None
-
- if len(words) < 3:
- raise RuntimeError('less than three argument is not valid for crew definition')
+
+ if len(words) < 3: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ 'less than three argument is not valid for crew definition' # noqa: EM101
+ )
agent_type = words[0]
- if words[1].upper() == "FILE":
+ if words[1].upper() == 'FILE':
agent_file_handle[words[0]] = words[2]
else:
- raise ValueError("Unknown key")
- if len(words)>=4:
- group_data = words[3]
- _group_name = group_data.split(':')[0]
+ raise ValueError('Unknown key') # noqa: EM101, TRY003
+ if len(words) >= 4: # noqa: PLR2004
+ group_data = words[3]
+ _group_name = group_data.split(':')[0]
_group_column = group_data.split(':')[1]
-
-
- group_names[agent_type] = _group_name
+
+ group_names[agent_type] = _group_name
group_column[agent_type] = _group_column
-
+
for agent_type, file_handle in agent_file_handle.items():
data = self._file_data[file_handle]
-
- #print(file_handle)
- #print(self._file_data[file_handle])
-
+
+ # print(file_handle)
+ # print(self._file_data[file_handle])
+
agent_number = data['Number']
- j=0
- for lnum, line in data.iterrows():
- #try:
+ j = 0
+ for lnum, line in data.iterrows(): # noqa: B007
+ # try:
num = int(agent_number[j])
- #except :
- #print('exception')
- #pass
+ # except :
+ # print('exception')
+ # pass
_r = range(num)
-
+
for i in _r:
agent_name = agent_type + str(j) + str(i)
predefinitions = line.to_dict()
definitions = {}
- definitions['cur_x'] = predefinitions['Curr.-X-Coord']
- definitions['cur_y'] = predefinitions['Curr.-Y-Coord']
- definitions['base_x'] = predefinitions['Home-X-Coord']
- definitions['base_y'] = predefinitions['Home-Y-Coord']
+ definitions['cur_x'] = predefinitions['Curr.-X-Coord']
+ definitions['cur_y'] = predefinitions['Curr.-Y-Coord']
+ definitions['base_x'] = predefinitions['Home-X-Coord']
+ definitions['base_y'] = predefinitions['Home-Y-Coord']
definitions['shift_name'] = predefinitions['Shift']
-
- group_name_temp=None
- if group_names[agent_type] !=None:
- definitions['group'] = predefinitions[group_column[agent_type]]
+
+ group_name_temp = None
+ if group_names[agent_type] != None: # noqa: E711
+ definitions['group'] = predefinitions[
+ group_column[agent_type]
+ ]
group_name_temp = group_names[agent_type]
else:
group_name_temp = 'default'
- definitions['group'] = 'Default'
-
+ definitions['group'] = 'Default'
+
definitions['group_name'] = group_name_temp
- self.rm.agents.append((agent_name ,agent_type, definitions) )
- j += 1
-
+ self.rm.agents.append((agent_name, agent_type, definitions))
+ j += 1 # noqa: SIM113
+
def _read_groups(self):
-
for lnum, line in self.sections['[GROUPS]']:
- words, comments = _split_line(line)
-
+ words, comments = _split_line(line) # noqa: F841
+
if words is not None and len(words) > 0:
- if not len(words) >= 6:
+ if not len(words) >= 6: # noqa: PLR2004
raise ValueError('error in line: ' + str(lnum))
- group_name = words[0]
- element_type = words[1]
- arguement = words[2]
- file_handler = words[3]
- element_col_ID = words[4]
- pipe_col_ID = words[5]
-
+ group_name = words[0]
+ element_type = words[1]
+ argument = words[2]
+ file_handler = words[3]
+ element_col_ID = words[4] # noqa: N806
+ pipe_col_ID = words[5] # noqa: N806
+
if element_type not in ELEMENTS:
- raise ValueError('Unknown element type: '+repr(element_type)+', in line: '+repr(lnum))
- if arguement!='FILE':
- raise ValueError('the Only acceptable argument is FILE. Not: ' + repr(arguement) + '. Line: '+repr(lnum))
-
+ raise ValueError(
+ 'Unknown element type: '
+ + repr(element_type)
+ + ', in line: '
+ + repr(lnum)
+ )
+ if argument != 'FILE':
+ raise ValueError(
+ 'the Only acceptable argument is FILE. Not: '
+ + repr(argument)
+ + '. Line: '
+ + repr(lnum)
+ )
+
data = self.rm.files[file_handler]
-
+
if pipe_col_ID not in data:
- raise ValueError(repr(pipe_col_ID) + "not in file handle="+repr(file_handler) )
-
- group_list = data[pipe_col_ID]
+ raise ValueError(
+ repr(pipe_col_ID)
+ + 'not in file handle='
+ + repr(file_handler)
+ )
+
+ group_list = data[pipe_col_ID]
group_list.index = data[element_col_ID]
-
+
if element_type not in self.rm.group:
- raise ValueError('This error must never happen: '+repr(element_type))
-
+ raise ValueError(
+ 'This error must never happen: ' + repr(element_type)
+ )
+
if group_name in self.rm.group[element_type]:
- raise ValueError('The Group is already identified: '+repr(group_name)+' in line: '+repr(lnum))
-
+ raise ValueError(
+ 'The Group is already identified: '
+ + repr(group_name)
+ + ' in line: '
+ + repr(lnum)
+ )
+
self.rm.group[element_type][group_name] = group_list
-
-
- def _read_priorities(self):
+
+ def _read_priorities(self): # noqa: C901
for lnum, line in self.sections['[PRIORITIES]']:
- words, comments = _split_line(line)
-
+ words, comments = _split_line(line) # noqa: F841
+
if words is not None and len(words) > 0:
- if not len(words) >= 3:
+ if not len(words) >= 3: # noqa: PLR2004
raise ValueError('error in line: ' + str(lnum))
- agent_type = words[0]
-
- priority=None
+ agent_type = words[0]
+
+ priority = None
try:
- priority = int(words[1])
- except:
- print('exeption handled in _read_priorities')
- if type(priority) != int:
- raise ValueError('Priority casting failed:'+str(priority)+'in line: '+repr(lnum))
- arg=[]
+ priority = int(words[1])
+ except: # noqa: E722
+ print('exeption handled in _read_priorities') # noqa: T201
+ if type(priority) != int: # noqa: E721
+ raise ValueError(
+ 'Priority casting failed:'
+ + str(priority)
+ + 'in line: '
+ + repr(lnum)
+ )
+ arg = []
for word in words[2:]:
- temp = None
- if word.find(':')!=-1:
+ temp = None # noqa: F841
+ if word.find(':') != -1:
split_temp = word.split(':')
- arg.append((split_temp[0],split_temp[1]))
+ arg.append((split_temp[0], split_temp[1]))
if split_temp[1] not in self.rm.entity:
- raise ValueError('Entity value is used which is not defined before: '+split_temp[1]+', Line: ' + str(lnum))
- if split_temp[0] not in self.rm.sequence[self.rm.entity[split_temp[1]]]:
- raise ValueError('There is no action: '+repr(split_temp[0]) +' in element: '+repr(self.rm.entity[split_temp[1]]))
+ raise ValueError(
+ 'Entity value is used which is not defined before: '
+ + split_temp[1]
+ + ', Line: '
+ + str(lnum)
+ )
+ if (
+ split_temp[0]
+ not in self.rm.sequence[self.rm.entity[split_temp[1]]]
+ ):
+ raise ValueError(
+ 'There is no action: '
+ + repr(split_temp[0])
+ + ' in element: '
+ + repr(self.rm.entity[split_temp[1]])
+ )
else:
arg.append(word)
- if word not in ['EPICENTERDIST', 'WaterSource']:
+ if word not in ['EPICENTERDIST', 'WaterSource']: # noqa: PLR6201
raise ValueError('Unnown value in line: ' + str(lnum))
-
- self.rm.priority.append((agent_type, priority, arg) )
-
+
+ self.rm.priority.append((agent_type, priority, arg))
+
def _read_jobs(self):
for lnum, line in self.sections['[JOBS]']:
- words, comments = _split_line(line)
-
+ words, comments = _split_line(line) # noqa: F841
+
if words is not None and len(words) > 0:
- if not len(words) >= 3:
- raise ValueError('Not enough arguments. error in line: ' + str(lnum))
- agent_type = words[0]
-
+ if not len(words) >= 3: # noqa: PLR2004
+ raise ValueError(
+ 'Not enough arguments. error in line: ' + str(lnum)
+ )
+ agent_type = words[0]
+
action_entity = words[1]
- if not action_entity.find(':')!=-1:
- raise ValueError('There must be an action and entity seprated by : in line '+str(lnum))
+ if not action_entity.find(':') != -1:
+ raise ValueError(
+ 'There must be an action and entity separated by : in line '
+ + str(lnum)
+ )
split_temp = action_entity.split(':')
action = split_temp[0]
entity = split_temp[1]
-
+
definer_arg = words[2]
- if not definer_arg.find(':')!=-1:
- raise ValueError('There must be an Time Definer and Argument seprated by : in line '+str(lnum))
+ if not definer_arg.find(':') != -1:
+ raise ValueError(
+ 'There must be an Time Definer and Argument separated by : in line '
+ + str(lnum)
+ )
split_temp = definer_arg.split(':')
- definer = split_temp[0]
+ definer = split_temp[0]
argument = split_temp[1]
-
+
if definer.upper() == 'FIXED':
try:
- argument = int(argument)
- except:
- print('exeption handled in _read_jobs')
+ argument = int(argument)
+ except: # noqa: E722
+ print('exeption handled in _read_jobs') # noqa: T201
else:
- raise ValueError('Definer is not recognized: '+definer)
-
+ raise ValueError('Definer is not recognized: ' + definer)
+
effect = None
- if len(words)>=4:
+ if len(words) >= 4: # noqa: PLR2004
effect = words[3]
-
- self.rm.jobs.append((agent_type, entity, action, argument, effect) )
-
- def _read_define(self):
- job={}
-
+
+ self.rm.jobs.append((agent_type, entity, action, argument, effect))
+
+ def _read_define(self): # noqa: C901, PLR0912
+ job = {} # noqa: F841
+
effect_data = self.sections.get('[DEFINE]', self.sections.get('[EFFECTS]'))
- for lnum, line in effect_data:
- words, comments = _split_line(line)
+ for lnum, line in effect_data: # noqa: PLR1702
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- #if not len(words) >= 3:
- #raise ValueError('Not enough arguments. error in line: ' + str(lnum))
+ # if not len(words) >= 3:
+ # raise ValueError('Not enough arguments. error in line: ' + str(lnum))
job_name = words[0]
- try:
+ try:
method_name = float(words[1])
- except:
+ except: # noqa: E722
method_name = words[1]
-
- res_list=[]
- flag=False
-
+
+ res_list = []
+ flag = False
+
if method_name == 'FILE':
- file_data = self._read_file_effect(words[2:], job_name)
- self.rm.jobs.append((job_name, 'DATA', file_data) )
- continue
-
+ file_data = self._read_file_effect(words[2:], job_name)
+ self.rm.jobs.append((job_name, 'DATA', file_data))
+ continue
+
method_data_list = words[2:]
for method_data in method_data_list:
- res={}
+ res = {}
definition = method_data.split(':')
-
- i=0
- if len(definition)%2!=1:
- raise ValueError('Error in line '+str(lnum))
-
+
+ i = 0
+ if len(definition) % 2 != 1:
+ raise ValueError('Error in line ' + str(lnum))
+
main_arg = None
-
+
while i < len(definition):
arg = definition[i].upper()
- if i==0:
+ if i == 0:
main_arg = arg
i += 1
- res['EFFECT']=main_arg
+ res['EFFECT'] = main_arg
continue
- val = definition[i+1].upper()
-
+ val = definition[i + 1].upper()
+
if main_arg == 'RECONNECT':
if arg == 'PIPESIZE':
if 'PIPESIZEFACTOR' in res:
- raise ValueError('Either pipe size or pipe size factor can be defined')
+ raise ValueError( # noqa: TRY003
+ 'Either pipe size or pipe size factor can be defined' # noqa: EM101
+ )
res['PIPESIZE'] = float(val)
-
+
elif arg == 'PIPESIZEFACTOR':
if 'PIPESIZE' in res:
- raise ValueError('Either pipe size or pipe size factor can be defined')
+ raise ValueError( # noqa: TRY003
+ 'Either pipe size or pipe size factor can be defined' # noqa: EM101
+ )
val = float(val)
- if val>1 or val<0:
- raise ValueError('Pipe Size Factor must be bigger than 0 and less than or eqal to 1: '+str(val))
+ if val > 1 or val < 0:
+ raise ValueError(
+ 'Pipe Size Factor must be bigger than 0 and less than or eqal to 1: '
+ + str(val)
+ )
res['PIPESIZEFACTOR'] = float(val)
elif arg == 'CV':
- if val=='TRUE' or val=='1':
- val=True
- elif val=='FALSE' or val=='0':
- val=False
+ if val == 'TRUE' or val == '1': # noqa: PLR1714
+ val = True
+ elif val == 'FALSE' or val == '0': # noqa: PLR1714
+ val = False
else:
- raise ValueError('Unrecognized value for CV in line '+str(lnum)+': '+val+('Value for CV must be either True or False'))
- res['CV']=val
+ raise ValueError(
+ 'Unrecognized value for CV in line '
+ + str(lnum)
+ + ': '
+ + val
+ + (
+ 'Value for CV must be either True or False'
+ )
+ )
+ res['CV'] = val
elif arg == 'PIPELENGTH':
try:
- val == float(val)
+ val == float(val) # noqa: B015
except Exception as e:
- print("The value for PIPELENGTH must be a number")
- raise e
- res['PIPELENGTH']=val
+ print( # noqa: T201
+ 'The value for PIPELENGTH must be a number'
+ )
+ raise e # noqa: TRY201
+ res['PIPELENGTH'] = val
elif arg == 'PIPEFRICTION':
try:
- val == float(val)
+ val == float(val) # noqa: B015
except Exception as e:
- print("The value for PIPEFRICTION must be a number")
- raise e
- res['PIPEFRICTION']=val
+ print( # noqa: T201
+ 'The value for PIPEFRICTION must be a number'
+ )
+ raise e # noqa: TRY201
+ res['PIPEFRICTION'] = val
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
elif main_arg == 'ADD_RESERVOIR':
if arg == 'PUMP':
res['PUMP'] = float(val)
-
+
elif arg == 'CV':
- if val=='TRUE' or val=='1':
- val=True
- elif val=='FALSE' or val=='0':
- val=False
+ if val == 'TRUE' or val == '1': # noqa: PLR1714
+ val = True
+ elif val == 'FALSE' or val == '0': # noqa: PLR1714
+ val = False
else:
- raise ValueError('Unrecognized value for CV in line '+str(lnum)+': '+val+('Value for CV must be either True or False'))
- res['CV']=val
+ raise ValueError(
+ 'Unrecognized value for CV in line '
+ + str(lnum)
+ + ': '
+ + val
+ + (
+ 'Value for CV must be either True or False'
+ )
+ )
+ res['CV'] = val
elif arg == 'ADDEDELEVATION':
val = float(val)
res['ADDEDELEVATION'] = float(val)
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
elif main_arg == 'REMOVE_LEAK':
if arg == 'LEAKFACTOR':
val = float(val)
- if val>1 or val<=0:
- raise ValueError('Leak factor must be bigger than 0 and less than or eqal to 1: '+str(val))
+ if val > 1 or val <= 0:
+ raise ValueError(
+ 'Leak factor must be bigger than 0 and less than or eqal to 1: '
+ + str(val)
+ )
res['LEAKFACTOR'] = val
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
-
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
+
elif main_arg == 'COL_CLOSE_PIPE':
- raise ValueError('REPAIR at this stage does not accept any argument')
-
+ raise ValueError( # noqa: TRY003
+ 'REPAIR at this stage does not accept any argument' # noqa: EM101
+ )
+
elif main_arg == 'ISOLATE_DN':
- if arg == 'PIDR': #Post Incident Demand Ratio
-
- if val[0]!='(' or val[-1]!=')' or val.find(',')==-1:
- ValueError("After PIDR the format must be like (CONDIION,VALUE)")
-
+ if arg == 'PIDR': # Post Incident Demand Ratio
+ if (
+ val[0] != '('
+ or val[-1] != ')'
+ or val.find(',') == -1
+ ):
+ ValueError( # noqa: PLW0133
+ 'After PIDR the format must be like (CONDIION,VALUE)'
+ )
+
val = val.strip('(').strip(')')
- val_split=val.split(',')
- _con=val_split[0].upper()
- _con_val=float(val_split[1])
-
- if not (_con=='BG' or _con=='EQ' or _con=='LT' or _con=='BG-EQ' or _con=='LT-EQ'):
- raise ValueError('Condition is not recognized:' + str(_con))
-
+ val_split = val.split(',')
+ _con = val_split[0].upper()
+ _con_val = float(val_split[1])
+
+ if not (
+ _con == 'BG' # noqa: PLR1714
+ or _con == 'EQ'
+ or _con == 'LT'
+ or _con == 'BG-EQ'
+ or _con == 'LT-EQ'
+ ):
+ raise ValueError(
+ 'Condition is not recognized:' + str(_con)
+ )
+
if _con_val < 0:
- raise ValueError('PIDR condition value cannot be less than zero-->'+repr(_con_val))
-
- res['PIDR']=(_con,_con_val)
-
+ raise ValueError(
+ 'PIDR condition value cannot be less than zero-->'
+ + repr(_con_val)
+ )
+
+ res['PIDR'] = (_con, _con_val)
+
elif main_arg == 'REPAIR':
- raise ValueError('REPAIR at this stage does not accept any argument')
-
+ raise ValueError( # noqa: TRY003
+ 'REPAIR at this stage does not accept any argument' # noqa: EM101
+ )
+
elif method_name.upper() == 'DEFAULT':
-
- try:
- arg=int(arg)
- except:
+ try: # noqa: SIM105
+ arg = int(arg)
+ except: # noqa: S110, E722
pass
-
- if main_arg=='METHOD_PROBABILITY':
- val=float(val)
-
- if val<0:
- raise ValueError('Probability cannot be less than zero. '+' In line '+lnum+' probability: '+val)
- elif val>1:
- raise ValueError('Probability cannot be bigger than 1. ' +' In line '+lnum+' probability: '+val)
- temp={'effect_definition_name':job_name, 'method_name':arg,'argument':main_arg,'value':val}
+
+ if main_arg == 'METHOD_PROBABILITY':
+ val = float(val)
+
+ if val < 0:
+ raise ValueError(
+ 'Probability cannot be less than zero. ' # noqa: ISC003
+ + ' In line '
+ + lnum
+ + ' probability: '
+ + val
+ )
+ elif val > 1: # noqa: RET506
+ raise ValueError(
+ 'Probability cannot be bigger than 1. ' # noqa: ISC003
+ + ' In line '
+ + lnum
+ + ' probability: '
+ + val
+ )
+ temp = {
+ 'effect_definition_name': job_name,
+ 'method_name': arg,
+ 'argument': main_arg,
+ 'value': val,
+ }
self.rm.jobs_default.append(temp)
- #temp={'effect_definition_name':effect_name, 'method_name':arg,'argument':'METHOD_PROBABILITY','value':val}
- elif main_arg=='FINALLY':
+ # temp={'effect_definition_name':effect_name, 'method_name':arg,'argument':'METHOD_PROBABILITY','value':val}
+ elif main_arg == 'FINALLY':
if val.upper() == 'NULL':
val = None
else:
val = None
- print('WARNING: At default line in FINALL section, the third argument is not NULL: ' + str(val) + 'The value is ignored antywhere')
+ print( # noqa: T201
+ 'WARNING: At default line in FINAL section, the third argument is not NULL: '
+ + str(val)
+ + 'The value is ignored antywhere'
+ )
self.rm.final_method[job_name] = arg
- elif main_arg=='ONLYONCE':
- try:
+ elif main_arg == 'ONLYONCE':
+ try: # noqa: SIM105
val = float(val)
- except:
+ except: # noqa: S110, E722
pass
-
+
if job_name in self.rm.once:
self.rm.once[job_name].append(val)
else:
- self.rm.once[job_name]=[val]
+ self.rm.once[job_name] = [val]
else:
- raise ValueError('Unrecognized argument in line ' + str(lnum) + ': ' + arg)
-
- flag=True
+ raise ValueError(
+ 'Unrecognized argument in line '
+ + str(lnum)
+ + ': '
+ + arg
+ )
+
+ flag = True
else:
- raise ValueError('Unrecognized argument in line ' + str(lnum) + ': ' + arg)
-
+ raise ValueError(
+ 'Unrecognized argument in line '
+ + str(lnum)
+ + ': '
+ + arg
+ )
+
i += 2
res_list.append(res)
- if flag==False:
- self.rm.jobs.append((job_name, method_name, res_list) )
-
-
- #for self.rm.effects.pruneData()
-
+ if flag == False: # noqa: E712
+ self.rm.jobs.append((job_name, method_name, res_list))
+
+ # for self.rm.effects.pruneData()
+
def _read_file_effect(self, file_info, effect_name):
res = {}
-
+
file_handle = file_info[0]
file_data = file_info[1:]
-
- data = self.rm.files[file_handle]
-
- #columns_to_remove = data.columns.tolist()
+
+ data = self.rm.files[file_handle]
+
+ # columns_to_remove = data.columns.tolist()
aliases = {}
for pair in file_data:
if not pair.find(':'):
- raise ValueError('Error in file info. Not Pair: '+pair)
+ raise ValueError('Error in file info. Not Pair: ' + pair)
_arg, val = pair.split(':')
arg = _arg.upper()
-
+
if arg in res:
- raise ValueError('Argument already added: '+_arg)
-
+ raise ValueError('Argument already added: ' + _arg)
+
if val not in data.columns:
- raise ValueError('Value not in file: '+ val)
- if arg == 'ELEMENT_NAME' or arg == 'METHOD_NAME' or arg == 'METHOD_PROBABILITY':
+ raise ValueError('Value not in file: ' + val)
+ if (
+ arg == 'ELEMENT_NAME' # noqa: PLR1714
+ or arg == 'METHOD_NAME'
+ or arg == 'METHOD_PROBABILITY'
+ ):
aliases[arg] = val
- res[arg]= data[val].to_dict()
-
+ res[arg] = data[val].to_dict()
+
elif arg == 'FIXED_TIME_OVERWRITE':
time_overwrite_data = data[val].to_list()
- #self.rm.jobs._job_list[self.rm.jobs._job_list['effect']==effect_name]
- temp_list_for_effect_name = [effect_name]*data[val].size
- _key = list(zip(temp_list_for_effect_name, data[aliases['METHOD_NAME'] ], data[aliases['ELEMENT_NAME'] ]) )
-
- time_overwrite_data = [{'FIXED_TIME_OVERWRITE':int(time_overwrite_data[i]*3600)} for i in range(len(time_overwrite_data))]
- self.rm.time_overwrite.update(pd.Series(index=_key, data = time_overwrite_data).to_dict())
-
+ # self.rm.jobs._job_list[self.rm.jobs._job_list['effect']==effect_name]
+ temp_list_for_effect_name = [effect_name] * data[val].size
+ _key = list(
+ zip(
+ temp_list_for_effect_name,
+ data[aliases['METHOD_NAME']],
+ data[aliases['ELEMENT_NAME']],
+ )
+ )
+
+ time_overwrite_data = [
+ {'FIXED_TIME_OVERWRITE': int(time_overwrite_data[i] * 3600)}
+ for i in range(len(time_overwrite_data))
+ ]
+ self.rm.time_overwrite.update(
+ pd.Series(index=_key, data=time_overwrite_data).to_dict()
+ )
+
else:
- raise ValueError('Unrecognized argument in pair: '+_arg)
+ raise ValueError('Unrecognized argument in pair: ' + _arg)
res = pd.DataFrame(res)
- #print(res)
- return res
-
-
+ # print(res)
+ return res # noqa: RET504
+
def _read_demand_nodes(self):
- titles = []
+ titles = [] # noqa: F841
ntitle = 0
lnum = 0
- dtemp=[]
- with io.open(self._demand_Node_file_name, 'r', encoding='utf-8') as f:
+ dtemp = []
+ with open(self._demand_Node_file_name, encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
words = line.split()
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.upper().startswith('NODEID'):
+ elif line.upper().startswith('NODEID'): # noqa: RET507
title = words.copy()
- ntitle = len(words) #we need this to confirm that every line has data for every title(column)
+ ntitle = len(
+ words
+ ) # we need this to confirm that every line has data for every title(column)
continue
elif nwords != ntitle:
- raise ValueError('%{fname}s:%(lnum)d: Number of data does not match number of titles')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d: Number of data does not match number of titles' # noqa: EM101
+ )
elif nwords == ntitle:
dtemp.append(words)
else:
- raise ValueError('%{fname}s:%(lnum)d:This error must nnever happen')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d:This error must nnever happen' # noqa: EM101
+ )
self.demand_node = pd.DataFrame(dtemp, columns=title)
-
+
def _read_crew(self):
- titles = []
+ titles = [] # noqa: F841
ntitle = 0
lnum = 0
- dtemp=[]
- with io.open(self._crew_file_name[-1], 'r', encoding='utf-8') as f:
+ dtemp = []
+ with open(self._crew_file_name[-1], encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
words = line.split()
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.upper().startswith('DISTYARDID'):
+ elif line.upper().startswith('DISTYARDID'): # noqa: RET507
title = words.copy()
- ntitle = len(words) #we need this to confirm that every line has data for every title(column)
+ ntitle = len(
+ words
+ ) # we need this to confirm that every line has data for every title(column)
continue
elif nwords != ntitle:
- raise ValueError('%{fname}s:%(lnum)d: Number of data does not match number of titles')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d: Number of data does not match number of titles' # noqa: EM101
+ )
elif nwords == ntitle:
dtemp.append(words)
else:
- raise ValueError('%{fname}s:%(lnum)d:This error must nnever happen')
- self.crew_data[self._crew_file_type[-1]]=pd.DataFrame(dtemp, columns=title)
\ No newline at end of file
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d:This error must nnever happen' # noqa: EM101
+ )
+ self.crew_data[self._crew_file_type[-1]] = pd.DataFrame(
+ dtemp, columns=title
+ )
diff --git a/modules/systemPerformance/REWET/REWET/Input/Settings.py b/modules/systemPerformance/REWET/REWET/Input/Settings.py
index 5c95467e6..6f99723f2 100644
--- a/modules/systemPerformance/REWET/REWET/Input/Settings.py
+++ b/modules/systemPerformance/REWET/REWET/Input/Settings.py
@@ -1,339 +1,582 @@
-import json
+import json # noqa: CPY001, D100, INP001
+import pickle # noqa: S403
import warnings
-import pickle
+
+import numpy as np
import pandas as pd
-import numpy as np
-list_default_headers = ['Scenario Name', 'Pipe Damage', 'Nodal Damage',
- 'Pump Damage', 'Tank Damage', 'Probability']
+list_default_headers = [
+ 'Scenario Name',
+ 'Pipe Damage',
+ 'Nodal Damage',
+ 'Pump Damage',
+ 'Tank Damage',
+ 'Probability',
+]
+
+acceptable_override_list = ['POINTS']
-acceptable_override_list = ["POINTS"]
-class base():
+class base: # noqa: D101
def __init__(self):
- self.settings={}
-
- def __getitem__(self, key):
+ self.settings = {}
+
+ def __getitem__(self, key): # noqa: D105
return self.settings[key]
-
- def __setitem__(self, key, data):
+
+ def __setitem__(self, key, data): # noqa: D105
self.settings[key] = data
-
-class Process_Settings(base):
+class Process_Settings(base): # noqa: D101
def __init__(self):
super().__init__()
"""
simulation settings
"""
- self.settings['RUN_TIME' ] = (5 + 24 * 1) * 3600 #seconds
- self.settings['minimum_simulation_time' ] = (10 + 24 * 2) * 3600 #seconds
- self.settings['simulation_time_step' ] = 3600 #seconds
- self.settings['number_of_damages' ] = 'multiple' #single or multiple. If single, indicate single damage files. If multiple, indicate "pipe_damage_file_list"
- self.settings['result_directory' ] = "Result" #"Net3//Result"
- self.settings['temp_directory' ] = "RunFiles"
- self.settings['save_time_step' ] = True
- self.settings['last_sequence_termination' ] = True #sina needs to be applied in GUI
- self.settings['node_demand_temination' ] = False #sina needs to be applied in GUI
- self.settings['node_demand_termination_time' ] = 3 * 3600 #sina needs to be applied in GUI
- self.settings['node_demand_termination_ratio'] = 0.95 #sina needs to be applied in GUI
- self.settings['record_restoration_agent_logs'] = True #sina needs to be applied in GUI
- self.settings['record_damage_table_logs' ] = True #sina needs to be applied in GUI
-
-
+ self.settings['RUN_TIME'] = (5 + 24 * 1) * 3600 # seconds
+ self.settings['minimum_simulation_time'] = (10 + 24 * 2) * 3600 # seconds
+ self.settings['simulation_time_step'] = 3600 # seconds
+ self.settings['number_of_damages'] = (
+ 'multiple' # single or multiple. If single, indicate single damage files. If multiple, indicate "pipe_damage_file_list"
+ )
+ self.settings['result_directory'] = 'Result' # "Net3//Result"
+ self.settings['temp_directory'] = 'RunFiles'
+ self.settings['save_time_step'] = True
+ self.settings['last_sequence_termination'] = (
+ True # sina needs to be applied in GUI
+ )
+ self.settings['node_demand_temination'] = (
+ False # sina needs to be applied in GUI
+ )
+ self.settings['node_demand_termination_time'] = (
+ 3 * 3600
+ ) # sina needs to be applied in GUI
+ self.settings['node_demand_termination_ratio'] = (
+ 0.95 # sina needs to be applied in GUI
+ )
+ self.settings['record_restoration_agent_logs'] = (
+ True # sina needs to be applied in GUI
+ )
+ self.settings['record_damage_table_logs'] = (
+ True # sina needs to be applied in GUI
+ )
+
"""
Hydraulic settings
"""
- self.settings['WN_INP' ] = "Example/net3.inp" #'giraffe386-4-1.inp' #"Anytown.inp"#'giraffe386-4-1.inp' #"Net3/net3.inp"
- self.settings['demand_ratio' ] = 1
- self.settings['solver' ] = 'ModifiedEPANETV2.2' # sina needs to be implemented
- #self.settings['hydraulic_time_step'] = 3600
- self.settings['solver_type' ] = 'ModifiedEPANETV2.2'
-
+ self.settings['WN_INP'] = (
+ 'Example/net3.inp' # 'giraffe386-4-1.inp' #"Anytown.inp"#'giraffe386-4-1.inp' #"Net3/net3.inp"
+ )
+ self.settings['demand_ratio'] = 1
+ self.settings['solver'] = (
+ 'ModifiedEPANETV2.2' # sina needs to be implemented
+ )
+ # self.settings['hydraulic_time_step'] = 3600
+ self.settings['solver_type'] = 'ModifiedEPANETV2.2'
+
"""
Damage settings
"""
- self.settings['pipe_damage_file_list' ] = "Example/example_list.xlsx"#"Nafiseh Damage Data/9_final_akhar/list_1_final.xlsx" #"preprocess/list2-3.xlsx"#"preprocess/list2-3.xlsx" #"list_akhar_with_prob_pgv_epicenter_1.xlsx"#"preprocess/list2-3.xlsx" #"Net3/list.xlsx" #"preprocess/list2-3.xlsx" #"list_W147_6.xlsx" #'Nafiseh Damage Data/list.xlsx'
- self.settings['pipe_damage_file_directory'] = "Example\Damages" #'Nafiseh Damage Data/9_final_akhar'#"" #'Net3' #'Nafiseh Damage Data/out'"X:\\Sina Naeimi\\anytown_damage\\"
- self.settings['pump_damage_relative_time' ] = True #needs to be implemented in the code
- self.settings['tank_damage_relative_time' ] = True #needs to be implemented in teh code
-
+ self.settings['pipe_damage_file_list'] = (
+ 'Example/example_list.xlsx' # "Nafiseh Damage Data/9_final_akhar/list_1_final.xlsx" #"preprocess/list2-3.xlsx"#"preprocess/list2-3.xlsx" #"list_akhar_with_prob_pgv_epicenter_1.xlsx"#"preprocess/list2-3.xlsx" #"Net3/list.xlsx" #"preprocess/list2-3.xlsx" #"list_W147_6.xlsx" #'Nafiseh Damage Data/list.xlsx'
+ )
+ self.settings['pipe_damage_file_directory'] = (
+ r'Example\Damages' # 'Nafiseh Damage Data/9_final_akhar'#"" #'Net3' #'Nafiseh Damage Data/out'"X:\\Sina Naeimi\\anytown_damage\\"
+ )
+ self.settings['pump_damage_relative_time'] = (
+ True # needs to be implemented in the code
+ )
+ self.settings['tank_damage_relative_time'] = (
+ True # needs to be implemented in the code
+ )
+
"""
Restoration settings
"""
- self.settings['Restoration_on' ] = True
- self.settings['minimum_job_time'] = 3600 # sina needs to be implemented
-
-
+ self.settings['Restoration_on'] = True
+ self.settings['minimum_job_time'] = 3600 # sina needs to be implemented
+
"""
None GUI settings
"""
- #self.settings['job_assign_time_limit' ]=None # time in seconds or None
- self.settings['maximun_worker_idle_time' ] = 60
- self.settings['number_of_proccessor' ] = 1
-
- self.settings['dmg_rst_data_save' ] = True
- self.settings['Parameter_override' ] = True #'starter/settings.xlsx' #this is for settings sensitivity analysis
- self.settings['mpi_resume' ] = True #ignores the scenarios that are done
- self.settings['ignore_empty_damage' ] = False
- self.settings['result_details' ] = 'extended'
- self.settings['negative_node_elmination' ] = True
- self.settings['nne_flow_limit' ] = 0.5
- self.settings['nne_pressure_limit' ] = -5
- self.settings['Virtual_node' ] = True
- self.settings['damage_node_model' ] = 'equal_diameter_emitter' #"equal_diameter_reservoir"
-
- self.settings['limit_result_file_size' ] = -1 #in Mb. 0 means no limit
-
-
-class Scenario_Settings(base):
+ # self.settings['job_assign_time_limit' ]=None # time in seconds or None
+ self.settings['maximun_worker_idle_time'] = 60
+ self.settings['number_of_proccessor'] = 1
+
+ self.settings['dmg_rst_data_save'] = True
+ self.settings['Parameter_override'] = (
+ True # 'starter/settings.xlsx' #this is for settings sensitivity analysis
+ )
+ self.settings['mpi_resume'] = True # ignores the scenarios that are done
+ self.settings['ignore_empty_damage'] = False
+ self.settings['result_details'] = 'extended'
+ self.settings['negative_node_elmination'] = True
+ self.settings['nne_flow_limit'] = 0.5
+ self.settings['nne_pressure_limit'] = -5
+ self.settings['Virtual_node'] = True
+ self.settings['damage_node_model'] = (
+ 'equal_diameter_emitter' # "equal_diameter_reservoir"
+ )
+
+ self.settings['limit_result_file_size'] = -1 # in Mb. 0 means no limit
+
+
+class Scenario_Settings(base): # noqa: D101
def __init__(self):
super().__init__()
"""
Hydraulic settings
"""
- self.settings['minimum_pressure' ] = 8
- self.settings['required_pressure' ] = 25
- self.settings['pressure_exponent' ] = 0.75 #sina add it to teh code and GUI
- #Sina also take care of the nodal damage formula in terms of exponents [Urgent]
- self.settings['hydraulic_time_step' ] = 900
-
+ self.settings['minimum_pressure'] = 8
+ self.settings['required_pressure'] = 25
+ self.settings['pressure_exponent'] = 0.75 # sina add it to the code and GUI
+ # Sina also take care of the nodal damage formula in terms of exponents [Urgent]
+ self.settings['hydraulic_time_step'] = 900
+
"""
Damage settings
"""
- self.settings['Pipe_damage_input_method' ] = 'excel' #excel or pickle
- self.settings['pipe_damage_model' ] = {"CI":{"alpha":-0.0038, "beta":0.1096, "gamma":0.0196, "a":2, "b":1 }, "DI":{"alpha":-0.0079, "beta":0.0805, "gamma":0.0411, "a":2, "b":1 }, "STL":{"alpha":-0.009, "beta":0.0808, "gamma":0.0472, "a":2, "b":1 }, "CON":{"alpha":-0.0083, "beta":0.0738, "gamma":0.0431, "a":2, "b":1 }, "RS":{"alpha":-0.0088, "beta":0.0886, "gamma":0.0459, "a":2, "b":1 } } # sina needs to be implemented
- self.settings['default_pipe_damage_model' ] = {"alpha":-0.0038, "beta":0.1096, "gamma":0.0196, "a":2, "b":1 }
- self.settings['node_damage_model' ] = {'x':0.9012,'a':0.0036, 'aa':1, 'b':0, 'bb':0, 'c':-0.877, 'cc':1, 'd':0, 'dd':0, 'e':0.0248, 'ee1':1, 'ee2':1, 'f':0, 'ff1':0, 'ff2':0, "damage_node_model": "equal_diameter_emitter"} # sina needs to be implemented
- #Sina, there is no x in the GUI. Impelment it
+ self.settings['Pipe_damage_input_method'] = 'excel' # excel or pickle
+ self.settings['pipe_damage_model'] = {
+ 'CI': {
+ 'alpha': -0.0038,
+ 'beta': 0.1096,
+ 'gamma': 0.0196,
+ 'a': 2,
+ 'b': 1,
+ },
+ 'DI': {
+ 'alpha': -0.0079,
+ 'beta': 0.0805,
+ 'gamma': 0.0411,
+ 'a': 2,
+ 'b': 1,
+ },
+ 'STL': {
+ 'alpha': -0.009,
+ 'beta': 0.0808,
+ 'gamma': 0.0472,
+ 'a': 2,
+ 'b': 1,
+ },
+ 'CON': {
+ 'alpha': -0.0083,
+ 'beta': 0.0738,
+ 'gamma': 0.0431,
+ 'a': 2,
+ 'b': 1,
+ },
+ 'RS': {
+ 'alpha': -0.0088,
+ 'beta': 0.0886,
+ 'gamma': 0.0459,
+ 'a': 2,
+ 'b': 1,
+ },
+ } # sina needs to be implemented
+ self.settings['default_pipe_damage_model'] = {
+ 'alpha': -0.0038,
+ 'beta': 0.1096,
+ 'gamma': 0.0196,
+ 'a': 2,
+ 'b': 1,
+ }
+ self.settings['node_damage_model'] = {
+ 'x': 0.9012,
+ 'a': 0.0036,
+ 'aa': 1,
+ 'b': 0,
+ 'bb': 0,
+ 'c': -0.877,
+ 'cc': 1,
+ 'd': 0,
+ 'dd': 0,
+ 'e': 0.0248,
+ 'ee1': 1,
+ 'ee2': 1,
+ 'f': 0,
+ 'ff1': 0,
+ 'ff2': 0,
+ 'damage_node_model': 'equal_diameter_emitter',
+ } # sina needs to be implemented
+ # Sina, there is no x in the GUI. Implement it
"""
Restoration settings
- """
- self.settings['Restoraion_policy_type' ] = 'script' # sina needs to be implemented in the code
- self.settings['Restortion_config_file' ] = "Example/exampe_config.txt"#"config-ghab-az-tayid.txt" #'X:\\Sina Naeimi\\anytown_damage\\config-base_base.txt'#'config-base_hydsig.txt' #'Net3/config.txt' #
- self.settings['pipe_damage_discovery_model' ] = {'method': 'leak_based', 'leak_amount': 0.025, 'leak_time': 3600*12} # sina needs to be implemented
- self.settings['node_damage_discovery_model' ] = {'method': 'leak_based', 'leak_amount': 0.001, 'leak_time': 3600*12} # sina needs to be implemented
- self.settings['pump_damage_discovery_model' ] = {'method': 'time_based', 'time_discovery_ratio': pd.Series([1], index = [3600*n for n in [0]])} # sina needs to be implemented
- self.settings['tank_damage_discovery_model' ] = {'method': 'time_based', 'time_discovery_ratio': pd.Series([1], index = [3600*n for n in [0]])} # sina needs to be implemented
- self.settings['Gnode_damage_discovery_model' ] = {'method': 'time_based', 'time_discovery_ratio': pd.Series([1], index = [3600*n for n in [0]])}# Sina GNode Discovery is not here! Must be appleid in teh GUI
- self.settings['reservoir_damage_discovery_model'] = {'method': 'time_based', 'time_discovery_ratio': pd.Series([1], index = [3600*n for n in [0]])}# Sina GNode Discovery is not here! Must be appleid in teh GUI
- self.settings['crew_out_of_zone_travel' ] = False # sina needs to be implemented in the code
- self.settings['crew_travel_speed' ] = 16.66666 # unit: ft/s approximately 18 km/h. The unit is [coordinate unit] per seconds. # sina needs to be implemented in the code
-
- self.settings['equavalant_damage_diameter' ] = 1
- self.settings['pipe_damage_diameter_factor' ] = 1
-
-
-class Settings():
+ """ # noqa: W291
+ self.settings['Restoraion_policy_type'] = (
+ 'script' # sina needs to be implemented in the code
+ )
+ self.settings['Restortion_config_file'] = (
+ 'Example/exampe_config.txt' # "config-ghab-az-tayid.txt" #'X:\\Sina Naeimi\\anytown_damage\\config-base_base.txt'#'config-base_hydsig.txt' #'Net3/config.txt' #
+ )
+ self.settings['pipe_damage_discovery_model'] = {
+ 'method': 'leak_based',
+ 'leak_amount': 0.025,
+ 'leak_time': 3600 * 12,
+ } # sina needs to be implemented
+ self.settings['node_damage_discovery_model'] = {
+ 'method': 'leak_based',
+ 'leak_amount': 0.001,
+ 'leak_time': 3600 * 12,
+ } # sina needs to be implemented
+ self.settings['pump_damage_discovery_model'] = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pd.Series([1], index=[3600 * n for n in [0]]),
+ } # sina needs to be implemented
+ self.settings['tank_damage_discovery_model'] = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pd.Series([1], index=[3600 * n for n in [0]]),
+ } # sina needs to be implemented
+ self.settings['Gnode_damage_discovery_model'] = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pd.Series([1], index=[3600 * n for n in [0]]),
+ } # Sina GNode Discovery is not here! Must be appleid in the GUI
+ self.settings['reservoir_damage_discovery_model'] = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pd.Series([1], index=[3600 * n for n in [0]]),
+ } # Sina GNode Discovery is not here! Must be appleid in the GUI
+ self.settings['crew_out_of_zone_travel'] = (
+ False # sina needs to be implemented in the code
+ )
+ self.settings['crew_travel_speed'] = (
+ 16.66666 # unit: ft/s approximately 18 km/h. The unit is [coordinate unit] per seconds. # sina needs to be implemented in the code
+ )
+
+ self.settings['equavalant_damage_diameter'] = 1
+ self.settings['pipe_damage_diameter_factor'] = 1
+
+
+class Settings: # noqa: D101
def __init__(self):
- self.process = Process_Settings()
- self.scenario = Scenario_Settings()
+ self.process = Process_Settings()
+ self.scenario = Scenario_Settings()
self.overrides = {}
-
- def __setitem__(self, key, data):
+
+ def __setitem__(self, key, data): # noqa: D105
if key in self.process.settings:
self.process.settings[key] = data
elif key in self.scenario.settings:
self.scenario.settings[key] = data
else:
- raise AttributeError(repr(key) + " is not in the Settings.")
-
- def __getitem__(self, key):
-
+ raise AttributeError(repr(key) + ' is not in the Settings.')
+
+ def __getitem__(self, key): # noqa: D105
if key in self.process.settings:
- if self.scenario != None:
+ if self.scenario != None: # noqa: E711
if key in self.scenario.settings:
- raise ValueError(str(key) + " in the both process and scneario settings.")
-
+ raise ValueError(
+ str(key) + ' in both the process and scenario settings.'
+ )
+
return self.process.settings[key]
- elif self.scenario != None:
+ elif self.scenario != None: # noqa: RET505, E711
if key in self.scenario.settings:
return self.scenario.settings[key]
-
- raise ValueError(str(key) + " NOT in either process and scneario settings.")
-
- def __contains__(self, key):
+
+ raise ValueError(str(key) + ' NOT in either process and scenario settings.')
+
+ def __contains__(self, key): # noqa: D105
if key in self.process.settings:
return True
- elif self.scenario != None:
+ elif self.scenario != None: # noqa: RET505, E711
if key in self.scenario.settings:
return True
-
+
return False
-
- def importJsonSettings(self, json_file_path):
- """read a settinsg json file and import the data
+
+ def importJsonSettings(self, json_file_path): # noqa: N802
+ """Read a settinsg json file and import the data
Args:
+ ----
json_file_path (path): JSON file path
- """
- with open(json_file_path, "rt") as f:
+
+ """ # noqa: D400
+ with open(json_file_path) as f: # noqa: PLW1514, PTH123
settings_data = json.load(f)
-
+
if not isinstance(settings_data, dict):
- raise ValueError("Wrong JSON file type for teh settings. The settings JSOn file must be an OBJECT file type.")
-
+ raise ValueError( # noqa: DOC501, TRY003, TRY004
+ 'Wrong JSON file type for the settings. The settings JSOn file must be an OBJECT file type.' # noqa: EM101
+ )
+
for key, val in settings_data.items():
if key not in self:
- raise ValueError(f"REWET settinsg does not have \"{key}\" as a settings key")
-
- print(key, val)
- if key in ["pipe_damage_discovery_model", "node_damage_discovery_model",\
- "pump_damage_discovery_model", "tank_damage_discovery_model"]\
- and val["method"] == 'time_based':
- val["time_discovery_ratio"] = pd.Series([line[0] for line in val["time_discovery_ratio"]], index = [line[1] for line in val["time_discovery_ratio"]])
-
+ raise ValueError( # noqa: DOC501, TRY003
+ f'REWET settinsg does not have "{key}" as a settings key' # noqa: EM102
+ )
+
+ print(key, val) # noqa: T201
+ if (
+ key
+ in [ # noqa: PLR6201
+ 'pipe_damage_discovery_model',
+ 'node_damage_discovery_model',
+ 'pump_damage_discovery_model',
+ 'tank_damage_discovery_model',
+ ]
+ and val['method'] == 'time_based'
+ ):
+ val['time_discovery_ratio'] = pd.Series(
+ [line[0] for line in val['time_discovery_ratio']],
+ index=[line[1] for line in val['time_discovery_ratio']],
+ )
+
self[key] = val
-
- def importProject(self, project_addr):
- with open(project_addr, 'rb') as f:
- project = pickle.load(f)
- #for k in project.project_settings.scenario.settings:
- #new_value = project.project_settings.scenario[k]
- #old_value = self.scenario[k]
- #print(k + ": " + repr(new_value) + " --> " + repr(old_value) + "\n"+"-----" + repr(type(new_value)) )
- self.process = project.project_settings.process
+
+ def importProject(self, project_addr): # noqa: N802, D102
+ with open(project_addr, 'rb') as f: # noqa: PTH123
+ project = pickle.load(f) # noqa: S301
+ # for k in project.project_settings.scenario.settings:
+ # new_value = project.project_settings.scenario[k]
+ # old_value = self.scenario[k]
+ # print(k + ": " + repr(new_value) + " --> " + repr(old_value) + "\n"+"-----" + repr(type(new_value)) )
+ self.process = project.project_settings.process
self.scenario = project.project_settings.scenario
-
- def initializeScenarioSettings(self, scenario_index):
- if self.process['Parameter_override'] == False:
+
+ def initializeScenarioSettings(self, scenario_index): # noqa: C901, N802, D102
+ if self.process['Parameter_override'] == False: # noqa: E712
return
-
- list_file = pd.read_excel(self['pipe_damage_file_list'])
- columns = list_file.columns
+
+ list_file = pd.read_excel(self['pipe_damage_file_list'])
+ columns = list_file.columns
parametrs_list = columns.drop(list_default_headers)
-
+
for parameter_name in parametrs_list:
- #to prevent unnamed collumns apear in the warnings
- if "Unnamed" in parameter_name:
+ # to prevent unnamed columns appearing in the warnings
+ if 'Unnamed' in parameter_name:
continue
override_value = list_file.loc[scenario_index, parameter_name]
- scenario_name = list_file.loc[scenario_index, 'Scenario Name']
-
-
+ scenario_name = list_file.loc[scenario_index, 'Scenario Name']
+
if parameter_name in self:
-
try:
- if type(override_value) !=str and np.isnan(override_value):
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The value for " + repr(parameter_name) + " is empty. The override is IGNORED!")
+ if type(override_value) != str and np.isnan(override_value): # noqa: E721
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The value for '
+ + repr(parameter_name)
+ + ' is empty. The override is IGNORED!'
+ )
continue
- except:
+ except: # noqa: S110, E722
pass
-
- if override_value == "":
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The value for " + repr(parameter_name) + " is empty. The override is IGNORED!")
+
+ if override_value == '': # noqa: PLC1901
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The value for '
+ + repr(parameter_name)
+ + ' is empty. The override is IGNORED!'
+ )
continue
-
+
self[parameter_name] = override_value
else:
- splited_parameter_name = parameter_name.split(":")
+ splited_parameter_name = parameter_name.split(':')
number_of_words = len(splited_parameter_name)
-
+
override_key1 = splited_parameter_name[0]
override_key2 = splited_parameter_name[1]
-
- if number_of_words != 2:
- raise ValueError("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The parameter " + repr(parameter_name) + " is not an acceptable parameter")
-
- if override_key1 == None:
- raise ValueError("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + repr(parameter_name) + " is not an acceptable parameter")
-
+
+ if number_of_words != 2: # noqa: PLR2004
+ raise ValueError(
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The parameter '
+ + repr(parameter_name)
+ + ' is not an acceptable parameter'
+ )
+
+ if override_key1 == None: # noqa: E711
+ raise ValueError(
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + repr(parameter_name)
+ + ' is not an acceptable parameter'
+ )
+
if override_key1.upper() not in acceptable_override_list:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + repr(override_key1) + " is not an acceptable parameter. The override is IGNORED!" + "\n" + "Acceptable override parameters are "+ repr(acceptable_override_list))
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + repr(override_key1)
+ + ' is not an acceptable parameter. The override is IGNORED!'
+ + '\n'
+ + 'Acceptable override parameters are '
+ + repr(acceptable_override_list)
+ )
continue
try:
- if type(override_value) !=str and np.isnan(override_value):
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The value for " + repr(parameter_name) + " is empty. The override is IGNORED!")
+ if type(override_value) != str and np.isnan(override_value): # noqa: E721
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The value for '
+ + repr(parameter_name)
+ + ' is empty. The override is IGNORED!'
+ )
continue
- except:
+ except: # noqa: S110, E722
pass
-
- if override_value == "":
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The value for " + repr(parameter_name) + " is empty. The override is IGNORED!")
+
+ if override_value == '': # noqa: PLC1901
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The value for '
+ + repr(parameter_name)
+ + ' is empty. The override is IGNORED!'
+ )
continue
-
- if override_key1.upper() == "POINTS":
- if override_key2 == None:
- raise ValueError("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "You should provide a Points Group Name for POINTS override key. WARNING: If POINTS Group Name missmatch, it may not take any efffect" + "\n")
-
- point_list = self.getOverridePointsList(override_value, scenario_name)
+
+ if override_key1.upper() == 'POINTS':
+ if override_key2 == None: # noqa: E711
+ raise ValueError(
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'You should provide a Points Group Name for POINTS override key. WARNING: If POINTS Group Name mismatch, it may not take any effect'
+ + '\n'
+ )
+
+ point_list = self.getOverridePointsList(
+ override_value, scenario_name
+ )
if len(point_list) > 0:
-
- if "POINTS" in self.overrides:
- self.overrides["POINTS"][override_key2] = point_list
+ if 'POINTS' in self.overrides:
+ self.overrides['POINTS'][override_key2] = point_list
else:
- self.overrides["POINTS"] = {override_key2:point_list}
+ self.overrides['POINTS'] = {override_key2: point_list}
else:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "The Override Point Group has no valid input; thus, the override is ignored!")
-
-# =============================================================================
-# elif override_key1.upper() == "CREWSPEED":
-# if override_key2 == None:
-# raise ValueError("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "You should provide a Crew Speed for CREWSPEED override key." + "\n")
-#
-# crew_speed = self.getOverrideCrewSpeed(override_value, scenario_name)
-# if crew_speed != None:
-# self.overrides["CREWSPEED"] = crew_speed
-# else:
-# warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "SPEEDCREW is not valid; thus, the override is ignored!")
-# =============================================================================
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + 'The Override Point Group has no valid input; thus, the override is ignored!'
+ )
+
+ # =============================================================================
+ # elif override_key1.upper() == "CREWSPEED":
+ # if override_key2 == None:
+ # raise ValueError("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "You should provide a Crew Speed for CREWSPEED override key." + "\n")
+ #
+ # crew_speed = self.getOverrideCrewSpeed(override_value, scenario_name)
+ # if crew_speed != None:
+ # self.overrides["CREWSPEED"] = crew_speed
+ # else:
+ # warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + "SPEEDCREW is not valid; thus, the override is ignored!")
+ # =============================================================================
else:
- raise ValueError("Unknown overrise key")
+ raise ValueError('Unknown overrise key') # noqa: EM101, TRY003
-
- def getOverridePointsList(self, points_list_str, scenario_name):
+ def getOverridePointsList(self, points_list_str, scenario_name): # noqa: D102, N802, PLR6301
point_list = []
-
+
points_list_str = points_list_str.strip()
points_list_str = points_list_str.split()
-
+
for word in points_list_str:
- if ":" not in word:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + word + " must be two numbeers speerated by one ':' showing X:Y coordinate. "+ word + " is ignored!")
+ if ':' not in word:
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + word
+ + " must be two numbeers speerated by one ':' showing X:Y coordinate. "
+ + word
+ + ' is ignored!'
+ )
continue
-
- splited_word = word.split(":")
-
- if len(splited_word) > 2:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + word + " must be two numbeers speerated by ONE ':' showing X:Y coordinate. "+ word + " is ignored!")
+
+ splited_word = word.split(':')
+
+ if len(splited_word) > 2: # noqa: PLR2004
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + word
+ + " must be two numbeers speerated by ONE ':' showing X:Y coordinate. "
+ + word
+ + ' is ignored!'
+ )
continue
-
+
x_coord = splited_word[0]
y_coord = splited_word[1]
-
+
try:
x_coord = float(x_coord)
- except:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + x_coord + " in " + word +" must be a number speerated by ONE ':' showing X:Y coordinate. "+ word + " is ignored!")
+ except: # noqa: E722
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + x_coord
+ + ' in '
+ + word
+ + " must be a number speerated by ONE ':' showing X:Y coordinate. "
+ + word
+ + ' is ignored!'
+ )
continue
-
+
try:
y_coord = float(y_coord)
- except:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + y_coord + " in " + word +" must be a number speerated by ONE ':' showing X:Y coordinate. "+ word + " is ignored!")
+ except: # noqa: E722
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + y_coord
+ + ' in '
+ + word
+ + " must be a number speerated by ONE ':' showing X:Y coordinate. "
+ + word
+ + ' is ignored!'
+ )
continue
-
- point_list.append((x_coord, y_coord) )
-
+
+ point_list.append((x_coord, y_coord))
+
return point_list
-
- def getOverrideCrewSpeed(self, crew_speed_str, scenario_name):
-
+
+ def getOverrideCrewSpeed(self, crew_speed_str, scenario_name): # noqa: D102, N802, PLR6301
crew_speed_str = crew_speed_str.strip()
-
+
if len(crew_speed_str.split()) > 1:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + crew_speed_str + " must be ONE single number. Space detected!")
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + crew_speed_str
+ + ' must be ONE single number. Space detected!'
+ )
return None
-
-
+
try:
crew_speed = float(crew_speed_str)
- except:
- warnings.warn("REWET Input ERROR in scenario: " + repr(scenario_name) + "\n" + crew_speed +" must be a number.")
+ except: # noqa: E722
+ warnings.warn( # noqa: B028
+ 'REWET Input ERROR in scenario: '
+ + repr(scenario_name)
+ + '\n'
+ + crew_speed
+ + ' must be a number.'
+ )
return None
-
+
return crew_speed
-
diff --git a/modules/systemPerformance/REWET/REWET/Main_GUI.py b/modules/systemPerformance/REWET/REWET/Main_GUI.py
index b61be572d..202985143 100644
--- a/modules/systemPerformance/REWET/REWET/Main_GUI.py
+++ b/modules/systemPerformance/REWET/REWET/Main_GUI.py
@@ -1,14 +1,14 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 21:46:04 2022
+"""Created on Thu Nov 10 21:46:04 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
+
import os
import sys
-if __name__ == "__main__":
- from GUI.Opening_Designer import Project
+
+if __name__ == '__main__':
from GUI.Opening_Designer import Opening_Designer
+
opening_designer = Opening_Designer()
- print(os.getpid())
- sys.exit(opening_designer.run() )
\ No newline at end of file
+ print(os.getpid()) # noqa: T201
+ sys.exit(opening_designer.run())
diff --git a/modules/systemPerformance/REWET/REWET/Output/Crew_Report.py b/modules/systemPerformance/REWET/REWET/Output/Crew_Report.py
index ef65f7035..aaa12b950 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Crew_Report.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Crew_Report.py
@@ -1,147 +1,195 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Oct 27 15:45:10 2022
+"""Created on Thu Oct 27 15:45:10 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
import pandas as pd
-class Crew_Report():
+
+class Crew_Report: # noqa: D101
def __init__(self):
pass
-
- def getCrewForTime(self, scn_name, time):
+
+ def getCrewForTime(self, scn_name, time): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
-
- crew_table = reg.restoration_log_book._agent_state_log_book
+
+ crew_table = reg.restoration_log_book._agent_state_log_book # noqa: SLF001
crew_table = crew_table.set_index('Time')
crew_table = crew_table.loc[time]
- return crew_table
-
- def getCrewTableAt(self, scn_name, time, crew_type_name, crew_zone=None):
+ return crew_table # noqa: RET504
+
+ def getCrewTableAt(self, scn_name, time, crew_type_name, crew_zone=None): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
- #crew_type = self.getCrewForTime(scn_name, time)
+ # crew_type = self.getCrewForTime(scn_name, time)
crew_table = reg.restoration_log_book.crew_history[time]
- typed_crew_table = crew_table[crew_table['type']==crew_type_name]
-
- if type(crew_zone) != type(None):
- if type(crew_zone) == str:
- typed_crew_table = typed_crew_table[typed_crew_table['group']==crew_zone]
- elif type(crew_zone) == list:
+ typed_crew_table = crew_table[crew_table['type'] == crew_type_name]
+
+ if crew_zone is not None:
+ if type(crew_zone) == str: # noqa: E721
+ typed_crew_table = typed_crew_table[
+ typed_crew_table['group'] == crew_zone
+ ]
+ elif type(crew_zone) == list: # noqa: E721
i = 0
for crew_zone_value in crew_zone:
if i == 0:
- res = typed_crew_table['group']==crew_zone_value
+ res = typed_crew_table['group'] == crew_zone_value
else:
- res = (typed_crew_table['group']==crew_zone_value) | res
- i += 1
+ res = (typed_crew_table['group'] == crew_zone_value) | res
+ i += 1 # noqa: SIM113
typed_crew_table = typed_crew_table[res]
else:
- raise ValueError("Unknown crew_zone type: " + repr(type(crew_zone) ) )
-
+ raise ValueError('Unknown crew_zone type: ' + repr(type(crew_zone)))
+
return typed_crew_table
-
- def getCrewAvailabilityThroughTime(self, scn_name, crew_type_name, crew_zone=None):
+
+ def getCrewAvailabilityThroughTime( # noqa: N802, D102
+ self,
+ scn_name,
+ crew_type_name,
+ crew_zone=None,
+ ):
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
crew_table = reg.restoration_log_book.crew_history
time_list = list(crew_table.keys())
time_list.sort()
-
+
crew_number = pd.Series()
-
+
for time in time_list:
- crew_table_time = self.getCrewTableAt(scn_name, time, crew_type_name, crew_zone)
- total_number = len(crew_table_time)
- available_number_time = crew_table_time[(crew_table_time['available']==True) | (crew_table_time['active']==True)]
+ crew_table_time = self.getCrewTableAt(
+ scn_name, time, crew_type_name, crew_zone
+ )
+ total_number = len(crew_table_time)
+ available_number_time = crew_table_time[
+ (crew_table_time['available'] == True) # noqa: E712
+ | (crew_table_time['active'] == True) # noqa: E712
+ ]
crew_number.loc[time] = len(available_number_time)
-
+
return total_number, crew_number
-
- def getCrewOnShiftThroughTime(self, scn_name, crew_type_name, crew_zone=None, not_on_shift=False):
+
+ def getCrewOnShiftThroughTime( # noqa: N802, D102
+ self,
+ scn_name,
+ crew_type_name,
+ crew_zone=None,
+ not_on_shift=False, # noqa: FBT002
+ ):
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
crew_table = reg.restoration_log_book.crew_history
time_list = list(crew_table.keys())
time_list.sort()
-
+
crew_number = pd.Series()
-
+
for time in time_list:
- crew_table_time = self.getCrewTableAt(scn_name, time, crew_type_name, crew_zone)
- total_number = len(crew_table_time)
+ crew_table_time = self.getCrewTableAt(
+ scn_name, time, crew_type_name, crew_zone
+ )
+ total_number = len(crew_table_time)
- if not_on_shift==False:
- available_number_time = crew_table_time[crew_table_time['active']==True]
- elif not_on_shift==True:
- available_number_time = crew_table_time[crew_table_time['active']==False]
+ if not_on_shift == False: # noqa: E712
+ available_number_time = crew_table_time[
+ crew_table_time['active'] == True # noqa: E712
+ ]
+ elif not_on_shift == True: # noqa: E712
+ available_number_time = crew_table_time[
+ crew_table_time['active'] == False # noqa: E712
+ ]
else:
- raise ValueError("Unnown not on shift" + repr(not_on_shift))
+ raise ValueError('Unnown not on shift' + repr(not_on_shift))
crew_number.loc[time] = len(available_number_time)
-
+
return total_number, crew_number
-
- def getCrewWorkingThroughTime(self, scn_name, crew_type_name, crew_zone=None, not_on_working=False):
+
+ def getCrewWorkingThroughTime( # noqa: N802, D102
+ self,
+ scn_name,
+ crew_type_name,
+ crew_zone=None,
+ not_on_working=False, # noqa: FBT002
+ ):
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
crew_table = reg.restoration_log_book.crew_history
time_list = list(crew_table.keys())
time_list.sort()
-
+
crew_number = pd.Series()
-
+
for time in time_list:
- crew_table_time = self.getCrewTableAt(scn_name, time, crew_type_name, crew_zone)
- total_number = len(crew_table_time)
- #available_number_time = crew_table_time[crew_table_time['available']==True]
- available_number_time = crew_table_time[crew_table_time['active']==True]
- if not_on_working==False:
- available_number_time = available_number_time[available_number_time['ready']==False]
- elif not_on_working==True:
- available_number_time = available_number_time[available_number_time['ready']==True]
+ crew_table_time = self.getCrewTableAt(
+ scn_name, time, crew_type_name, crew_zone
+ )
+ total_number = len(crew_table_time)
+ # available_number_time = crew_table_time[crew_table_time['available']==True]
+ available_number_time = crew_table_time[
+ crew_table_time['active'] == True # noqa: E712
+ ]
+ if not_on_working == False: # noqa: E712
+ available_number_time = available_number_time[
+ available_number_time['ready'] == False # noqa: E712
+ ]
+ elif not_on_working == True: # noqa: E712
+ available_number_time = available_number_time[
+ available_number_time['ready'] == True # noqa: E712
+ ]
else:
- raise ValueError("Unnown not on shift" + repr(not_on_working))
+ raise ValueError('Unnown not on shift' + repr(not_on_working))
crew_number.loc[time] = len(available_number_time)
-
+
return total_number, crew_number
-
-
- def getCrewCompleteStatusReport(self, scn_name, crew_type_name, crew_zone=None):
+
+ def getCrewCompleteStatusReport(self, scn_name, crew_type_name, crew_zone=None): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
crew_table = reg.restoration_log_book.crew_history
time_list = list(crew_table.keys())
time_list.sort()
-
- crew_report = pd.DataFrame(index=time_list, columns=["Reported", "Not-reported", "Total_not-reported", "on-duty", "off-duty", "idle", "busy"], data=0)
-
+
+ crew_report = pd.DataFrame(
+ index=time_list,
+ columns=[
+ 'Reported',
+ 'Not-reported',
+ 'Total_not-reported',
+ 'on-duty',
+ 'off-duty',
+ 'idle',
+ 'busy',
+ ],
+ data=0,
+ )
+
for time in time_list:
- crew_table_time = self.getCrewTableAt(scn_name, time, crew_type_name, crew_zone)
+ crew_table_time = self.getCrewTableAt(
+ scn_name, time, crew_type_name, crew_zone
+ )
- for agent_index , agent_row in crew_table_time.iterrows():
-
+ for agent_index, agent_row in crew_table_time.iterrows(): # noqa: B007
if agent_row['data'].isOnShift(time):
crew_report.loc[time, 'on-duty'] += 1
else:
crew_report.loc[time, 'off-duty'] += 1
-
- #iAvailable = agent_row['available']
+
+ # iAvailable = agent_row['available']
if agent_row['available'] or agent_row['active']:
crew_report.loc[time, 'Reported'] += 1
- if agent_row["active"] and agent_row["ready"]:
+ if agent_row['active'] and agent_row['ready']:
crew_report.loc[time, 'idle'] += 1
- elif agent_row["active"] and agent_row["ready"]==False:
+ elif agent_row['active'] and agent_row['ready'] == False: # noqa: E712
crew_report.loc[time, 'busy'] += 1
else:
crew_report.loc[time, 'Total_not-reported'] += 1
if agent_row['data'].isOnShift(time):
crew_report.loc[time, 'Not-reported'] += 1
- if agent_row['active'] == True:
- print("time=" + str(time))
- print(agent_row)
+ if agent_row['active'] == True: # noqa: E712
+ print('time=' + str(time)) # noqa: T201
+ print(agent_row) # noqa: T201
-
- return crew_report
\ No newline at end of file
+ return crew_report
diff --git a/modules/systemPerformance/REWET/REWET/Output/Curve.py b/modules/systemPerformance/REWET/REWET/Output/Curve.py
index a499d1520..879bbe6e4 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Curve.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Curve.py
@@ -1,297 +1,376 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Oct 25 14:30:01 2022
+"""Created on Tue Oct 25 14:30:01 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
import pandas as pd
+
from .Helper import hhelper
-class Curve():
+
+class Curve: # noqa: D101
def __init__():
pass
-
- def getPipeStatusByAction(self, scn_name ,action):
+
+ def getPipeStatusByAction(self, scn_name, action): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
- sequence = reg.retoration_data['sequence']["PIPE"]
+ sequence = reg.retoration_data['sequence']['PIPE']
if action not in sequence:
- raise ValueError("the action is not in the sequence: "+str(action))
- pipe_damage_table_time_series = reg._pipe_damage_table_time_series
+ raise ValueError('the action is not in the sequence: ' + str(action))
+ pipe_damage_table_time_series = reg._pipe_damage_table_time_series # noqa: SLF001
time_action_done = {}
- for time in pipe_damage_table_time_series:
+ for time in pipe_damage_table_time_series:
current_pipe_damage_table = pipe_damage_table_time_series[time]
- current_action_damage = current_pipe_damage_table[action]
- number_of_all = len(current_action_damage)
+ current_action_damage = current_pipe_damage_table[action]
+ number_of_all = len(current_action_damage)
if number_of_all < 1:
continue
- current_action_damage = current_action_damage[~current_action_damage.isna()]
- current_action_damage_true = current_action_damage[current_action_damage==True]
- unique_done_orginal_element_list = (current_pipe_damage_table.loc[current_action_damage_true.index]["Orginal_element"]).unique().tolist()
- current_pipe_damage_table = current_pipe_damage_table.set_index("Orginal_element")
- current_action_damage = current_pipe_damage_table.loc[unique_done_orginal_element_list]
-
- number_of_done = len(current_action_damage)
- time_action_done[time] = number_of_done / number_of_all
-
+ current_action_damage = current_action_damage[
+ ~current_action_damage.isna()
+ ]
+ current_action_damage_true = current_action_damage[
+ current_action_damage == True # noqa: E712
+ ]
+ unique_done_orginal_element_list = (
+ (
+ current_pipe_damage_table.loc[current_action_damage_true.index][
+ 'Orginal_element'
+ ]
+ )
+ .unique()
+ .tolist()
+ )
+ current_pipe_damage_table = current_pipe_damage_table.set_index(
+ 'Orginal_element'
+ )
+ current_action_damage = current_pipe_damage_table.loc[
+ unique_done_orginal_element_list
+ ]
+
+ number_of_done = len(current_action_damage)
+ time_action_done[time] = number_of_done / number_of_all
+
return pd.Series(time_action_done)
-
- def getNodeStatusByAction(self, scn_name, action):
+
+ def getNodeStatusByAction(self, scn_name, action): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
- sequence = reg.retoration_data['sequence']["DISTNODE"]
+ sequence = reg.retoration_data['sequence']['DISTNODE']
if action not in sequence:
- raise ValueError("the action is not in the sequence: "+str(action))
- node_damage_table_time_series = reg._node_damage_table_time_series
+ raise ValueError('the action is not in the sequence: ' + str(action))
+ node_damage_table_time_series = reg._node_damage_table_time_series # noqa: SLF001
time_action_done = {}
for time in node_damage_table_time_series:
current_node_damage_table = node_damage_table_time_series[time]
- current_action_damage = current_node_damage_table[action]
- number_of_all = len(current_action_damage)
+ current_action_damage = current_node_damage_table[action]
+ number_of_all = len(current_action_damage)
if number_of_all < 1:
continue
- current_action_damage = current_action_damage[~current_action_damage.isna()]
- current_action_damage_true = current_action_damage[current_action_damage==True]
- unique_done_orginal_element_list = (current_node_damage_table.loc[current_action_damage_true.index]["Orginal_element"]).unique().tolist()
- current_node_damage_table = current_node_damage_table.set_index("Orginal_element")
- current_action_damage = current_node_damage_table.loc[unique_done_orginal_element_list]
-
- number_of_done = len(current_action_damage)
- time_action_done[time] = number_of_done / number_of_all
-
+ current_action_damage = current_action_damage[
+ ~current_action_damage.isna()
+ ]
+ current_action_damage_true = current_action_damage[
+ current_action_damage == True # noqa: E712
+ ]
+ unique_done_orginal_element_list = (
+ (
+ current_node_damage_table.loc[current_action_damage_true.index][
+ 'Orginal_element'
+ ]
+ )
+ .unique()
+ .tolist()
+ )
+ current_node_damage_table = current_node_damage_table.set_index(
+ 'Orginal_element'
+ )
+ current_action_damage = current_node_damage_table.loc[
+ unique_done_orginal_element_list
+ ]
+
+ number_of_done = len(current_action_damage)
+ time_action_done[time] = number_of_done / number_of_all
+
return pd.Series(time_action_done)
-
- def getPumpStatus(self, scn_name):
+
+ def getPumpStatus(self, scn_name): # noqa: N802, D102
self.loadScneariodata(scn_name)
- res = self.data[scn_name]
- reg = self.registry[scn_name]
- time_list = res.node['demand'].index
- pump_damage = reg.damage.damaged_pumps
+ res = self.data[scn_name]
+ reg = self.registry[scn_name]
+ time_list = res.node['demand'].index
+ pump_damage = reg.damage.damaged_pumps
pump_damage_time = pump_damage.index
-
+
time_action_done = {}
for time in time_list:
- done_list = pump_damage_time[pump_damage_time>=time]
+ done_list = pump_damage_time[pump_damage_time >= time]
time_action_done[time] = len(done_list) / len(pump_damage_time)
-
+
return pd.Series(time_action_done)
-
- def getTankStatus(self, scn_name):
+
+ def getTankStatus(self, scn_name): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
time_list = reg.time_list
tank_damage = reg.damage.tamk_damage
tank_damage_time = tank_damage.index
-
+
time_action_done = {}
for time in time_list:
- done_list = tank_damage_time[tank_damage_time>=time]
+ done_list = tank_damage_time[tank_damage_time >= time]
time_action_done[time] = len(done_list) / len(tank_damage_time)
-
+
return pd.Series(time_action_done)
-
- def getInputWaterFlowCurve(self, scn_name, tank_name_list=None, reservoir_name_list = None, mode='all'):
+
+ def getInputWaterFlowCurve( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ tank_name_list=None,
+ reservoir_name_list=None,
+ mode='all',
+ ):
self.loadScneariodata(scn_name)
res = self.data[scn_name]
-
- if tank_name_list==None:
+
+ if tank_name_list == None: # noqa: E711
tank_name_list = self.wn.tank_name_list
-
+
not_known_tank = set(tank_name_list) - set(self.wn.tank_name_list)
if len(not_known_tank) > 0:
- raise ValueError("The folliwng tanks in the input are not known in the water network" + repr(tank_name_list))
-
- if reservoir_name_list==None:
+ raise ValueError(
+ 'The following tanks in the input are not known in the water network'
+ + repr(tank_name_list)
+ )
+
+ if reservoir_name_list == None: # noqa: E711
reservoir_name_list = self.wn.reservoir_name_list
-
- not_known_reservoir = set(reservoir_name_list) - set(self.wn.reservoir_name_list)
+
+ not_known_reservoir = set(reservoir_name_list) - set(
+ self.wn.reservoir_name_list
+ )
if len(not_known_reservoir) > 0:
- raise ValueError("The folliwng reservoirs in the input are not known in the water network" + repr(reservoir_name_list))
-
+ raise ValueError(
+ 'The following reservoirs in the input are not known in the water network'
+ + repr(reservoir_name_list)
+ )
+
outbound_flow = pd.Series(0, index=res.node['demand'].index)
- inbound_flow = pd.Series(0, index=res.node['demand'].index)
- #inbound_flow = 0
- #outbound_flow = 0
-
- waterFlow = None
-
+ inbound_flow = pd.Series(0, index=res.node['demand'].index)
+ # inbound_flow = 0
+ # outbound_flow = 0
+
+ waterFlow = None # noqa: N806
+
for tank_name in tank_name_list:
if tank_name in res.node['demand'].columns:
flow_in_time = res.node['demand'][tank_name]
else:
continue
for time, flow in flow_in_time.iteritems():
- #print(flow)
+ # print(flow)
if flow > 0:
outbound_flow.loc[time] += -1 * flow
elif flow < 0:
- inbound_flow.loc[time] += -1 * flow
-
- if mode == "all":
- waterFlow = outbound_flow + inbound_flow
+ inbound_flow.loc[time] += -1 * flow
+
+ if mode == 'all':
+ waterFlow = outbound_flow + inbound_flow # noqa: N806
elif mode == 'out':
- waterFlow = outbound_flow
+ waterFlow = outbound_flow # noqa: N806
elif mode == 'in':
- waterFlow = inbound_flow
+ waterFlow = inbound_flow # noqa: N806
else:
- raise ValueError("Unnown mode: "+repr(mode))
-
+ raise ValueError('Unnown mode: ' + repr(mode))
+
for reservoir_name in reservoir_name_list:
if reservoir_name in res.node['demand'].columns:
flow_in_time = res.node['demand'][reservoir_name]
else:
continue
for time, flow in flow_in_time.iteritems():
- #print(flow)
+ # print(flow)
if flow > 0:
outbound_flow.loc[time] += -1 * flow
elif flow < 0:
- inbound_flow.loc[time] += -1 * flow
-
- if mode == "all":
- waterFlow = outbound_flow + inbound_flow
+ inbound_flow.loc[time] += -1 * flow
+
+ if mode == 'all':
+ waterFlow = outbound_flow + inbound_flow # noqa: N806
elif mode == 'out':
- waterFlow = outbound_flow
+ waterFlow = outbound_flow # noqa: N806
elif mode == 'in':
- waterFlow = inbound_flow
+ waterFlow = inbound_flow # noqa: N806
else:
- raise ValueError("Unnown mode: "+repr(mode))
-
+ raise ValueError('Unnown mode: ' + repr(mode))
+
return waterFlow
-
- def getOveralDemandSatisfied(self, scn_name, pure=False):
+
+ def getOveralDemandSatisfied(self, scn_name, pure=False): # noqa: FBT002, N802, D102
self.loadScneariodata(scn_name)
- if pure == False:
+ if pure == False: # noqa: E712
demand_node_name_list = self.demand_node_name_list
else:
demand_node_name_list = []
for node_name in self.wn.junction_name_list:
- if self.wn.get_node(node_name).demand_timeseries_list[0].base_value > 0:
+ if (
+ self.wn.get_node(node_name).demand_timeseries_list[0].base_value
+ > 0
+ ):
demand_node_name_list.append(node_name)
-
- sat_node_demands = self.data[scn_name].node['demand'].filter(demand_node_name_list)
- #sat_node_demands = sat_node_demands.applymap(hhelper)
+
+ sat_node_demands = (
+ self.data[scn_name].node['demand'].filter(demand_node_name_list)
+ )
+ # sat_node_demands = sat_node_demands.applymap(hhelper)
s = sat_node_demands.sum(axis=1)
-
- return s
- def getWaterLeakingFromNode(self, scn_name):
+ return s # noqa: RET504
+
+ def getWaterLeakingFromNode(self, scn_name): # noqa: N802, D102
self.loadScneariodata(scn_name)
res = self.data[scn_name]
sum_amount = 0
try:
res = res.node['leak']
sum_amount = res.sum(axis=1)
- except:
+ except: # noqa: E722
sum_amount = 0
- return sum_amount
-
- def getWaterLeakingFromPipe(self, scn_name, mode='all'):
+ return sum_amount
+
+ def getWaterLeakingFromPipe(self, scn_name, mode='all'): # noqa: N802, D102
self.loadScneariodata(scn_name)
reg = self.registry[scn_name]
res = self.data[scn_name]
-
- damage_location_list = reg._pipe_damage_table
-
- if mode == 'leak':
- damage_location_list = damage_location_list[damage_location_list['damage_type'] == mode]
- elif mode == 'break':
- damage_location_list = damage_location_list[damage_location_list['damage_type'] == mode]
+
+ damage_location_list = reg._pipe_damage_table # noqa: SLF001
+
+ if mode == 'leak' or mode == 'break': # noqa: PLR1714
+ damage_location_list = damage_location_list[
+ damage_location_list['damage_type'] == mode
+ ]
elif mode == 'all':
pass
else:
- raise ValueError("The mode is not recognized: " + repr(mode) )
-
-
- break_damage_table = damage_location_list[damage_location_list['damage_type']=='break']
- pipe_B_list = self.registry[scn_name]._pipe_break_history.loc[break_damage_table.index, 'Node_B']
-
+ raise ValueError('The mode is not recognized: ' + repr(mode))
+
+ break_damage_table = damage_location_list[
+ damage_location_list['damage_type'] == 'break'
+ ]
+ pipe_B_list = self.registry[scn_name]._pipe_break_history.loc[ # noqa: SLF001, N806
+ break_damage_table.index, 'Node_B'
+ ]
+
damage_location_list = damage_location_list.index
wanted_nodes = pipe_B_list.to_list()
wanted_nodes.extend(damage_location_list.to_list())
-
- available_nodes = set( res.node['demand'].columns )
- wanted_nodes = set( wanted_nodes )
-
+
+ available_nodes = set(res.node['demand'].columns)
+ wanted_nodes = set(wanted_nodes)
+
not_available_nodes = wanted_nodes - available_nodes
- available_nodes = wanted_nodes - not_available_nodes
-
- leak_from_pipe = res.node['demand'][available_nodes]
-
- leak = leak_from_pipe < -0.1
+ available_nodes = wanted_nodes - not_available_nodes
+
+ leak_from_pipe = res.node['demand'][available_nodes]
+
+ leak = leak_from_pipe < -0.1 # noqa: PLR2004
if leak.any().any():
- raise ValueError("There is negative leak")
-
+ raise ValueError('There is negative leak') # noqa: EM101, TRY003
+
return leak_from_pipe.sum(axis=1)
-
- def getSystemServiceabilityIndexCurve(self, scn_name, iPopulation="No"):
+
+ def getSystemServiceabilityIndexCurve(self, scn_name, iPopulation='No'): # noqa: N802, N803, D102
s4 = self.getRequiredDemandForAllNodesandtime(scn_name)
- sat_node_demands = self.data[scn_name].node['demand'].filter(self.demand_node_name_list)
+ sat_node_demands = (
+ self.data[scn_name].node['demand'].filter(self.demand_node_name_list)
+ )
sat_node_demands = sat_node_demands.applymap(hhelper)
-
- if iPopulation=="Yes":
- s4 = s4 * self._population_data
- sat_node_demands = sat_node_demands * self._population_data
- elif iPopulation=="No":
+
+ if iPopulation == 'Yes':
+ s4 = s4 * self._population_data # noqa: PLR6104
+ sat_node_demands = sat_node_demands * self._population_data # noqa: PLR6104
+ elif iPopulation == 'No':
pass
else:
- raise ValueError("unknown iPopulation value: "+repr(iPopulation))
-
- s=sat_node_demands.sum(axis=1)/s4.sum(axis=1)
-
+ raise ValueError('unknown iPopulation value: ' + repr(iPopulation))
+
+ s = sat_node_demands.sum(axis=1) / s4.sum(axis=1)
+
for time_index, val in s.iteritems():
if val < 0:
- val = 0
+ val = 0 # noqa: PLW2901
elif val > 1:
- val = 1
+ val = 1 # noqa: PLW2901
s.loc[time_index] = val
-
+
return s
-
- def getBSCIndexPopulation_4(self, scn_name, bsc="DL", iPopulation=False, ratio= False, consider_leak=False, leak_ratio=1):
- if bsc == "DL":
- return self.getDLIndexPopulation_4(scn_name,
- iPopulation=iPopulation,
- ratio= ratio,
- consider_leak=consider_leak,
- leak_ratio=leak_ratio)
- elif bsc == "QN":
- return self.getQNIndexPopulation_4(scn_name,
- iPopulation=iPopulation,
- ratio=ratio,
- consider_leak=consider_leak,
- leak_ratio=leak_ratio)
+
+ def getBSCIndexPopulation_4( # noqa: N802, D102
+ self,
+ scn_name,
+ bsc='DL',
+ iPopulation=False, # noqa: FBT002, N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=1,
+ ):
+ if bsc == 'DL':
+ return self.getDLIndexPopulation_4(
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
+ elif bsc == 'QN': # noqa: RET505
+ return self.getQNIndexPopulation_4(
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
else:
- raise ValueError(f"BSC input is not recognizable: {bsc}")
-
- def getDLIndexPopulation_4(self, scn_name , iPopulation="No",ratio= False, consider_leak=False, leak_ratio=1):
- if type(leak_ratio) != float:
+ raise ValueError(f'BSC input is not recognizable: {bsc}') # noqa: EM102, TRY003
+
+ def getDLIndexPopulation_4( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ iPopulation='No', # noqa: N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=1,
+ ):
+ if type(leak_ratio) != float: # noqa: E721
leak_ratio = float(leak_ratio)
-
+
self.loadScneariodata(scn_name)
res = self.data[scn_name]
-
- if type(self._population_data) == type(None) or iPopulation==False:
+
+ if type(self._population_data) == type(None) or iPopulation == False: # noqa: E712, E721
pop = pd.Series(index=self.demand_node_name_list, data=1)
- elif type(self._population_data) == type(None) and iPopulation==True:
- raise ValueError("Population data is not available")
+ elif type(self._population_data) == type(None) and iPopulation == True: # noqa: E712, E721
+ raise ValueError('Population data is not available') # noqa: EM101, TRY003
else:
pop = self._population_data
-
+
total_pop = pop.sum()
-
+
result = []
refined_result = res.node['demand'][self.demand_node_name_list]
demands = self.getRequiredDemandForAllNodesandtime(scn_name)
demands = demands[self.demand_node_name_list]
-
- union_ = set(res.node['leak'].columns).union(set(self.demand_node_name_list)) -(set(res.node['leak'].columns) - set(self.demand_node_name_list)) - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
+
+ union_ = (
+ set(res.node['leak'].columns).union(set(self.demand_node_name_list))
+ - (set(res.node['leak'].columns) - set(self.demand_node_name_list))
+ - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
+ )
union_ = list(union_)
- leak_res = res.node['leak'][union_]
-
+ leak_res = res.node['leak'][union_]
+
leak_data = []
-
+
if consider_leak:
for name in leak_res:
demand_name = demands[name]
@@ -299,54 +378,65 @@ def getDLIndexPopulation_4(self, scn_name , iPopulation="No",ratio= False, consi
time_list = set(leak_res[name].dropna().index)
time_list_drop = set(demands.index) - time_list
demand_name = demand_name.drop(time_list_drop)
- leak_more_than_criteria = leak_res_name >= leak_ratio * demand_name
+ leak_more_than_criteria = leak_res_name >= leak_ratio * demand_name
if leak_more_than_criteria.any(0):
leak_data.append(leak_more_than_criteria)
leak_data = pd.DataFrame(leak_data).transpose()
-
+
s = refined_result > demands * 0.1
for name in s:
if name in leak_data.columns:
leak_data_name = leak_data[name]
for time in leak_data_name.index:
- if leak_data_name.loc[time] == True:
+ if leak_data_name.loc[time] == True: # noqa: E712
s.loc[time, name] = False
-
- s = s * pop[s.columns]
-
- if ratio==False:
+
+ s = s * pop[s.columns] # noqa: PLR6104
+
+ if ratio == False: # noqa: E712
total_pop = 1
else:
total_pop = pop.sum()
-
- result = s.sum(axis=1)/total_pop
-
- return result
-
- def getQNIndexPopulation_4(self, scn_name, iPopulation=False, ratio=False, consider_leak=False, leak_ratio=0.75):
- if type(leak_ratio) != float:
+
+ result = s.sum(axis=1) / total_pop
+
+ return result # noqa: RET504
+
+ def getQNIndexPopulation_4( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ iPopulation=False, # noqa: FBT002, N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=0.75,
+ ):
+ if type(leak_ratio) != float: # noqa: E721
leak_ratio = float(leak_ratio)
-
+
self.loadScneariodata(scn_name)
res = self.data[scn_name]
- if type(self._population_data) == type(None) or iPopulation==False:
+ if type(self._population_data) == type(None) or iPopulation == False: # noqa: E712, E721
pop = pd.Series(index=self.demand_node_name_list, data=1)
- elif type(self._population_data) == type(None) and iPopulation==True:
- raise ValueError("Population data is not available")
+ elif type(self._population_data) == type(None) and iPopulation == True: # noqa: E712, E721
+ raise ValueError('Population data is not available') # noqa: EM101, TRY003
else:
pop = self._population_data
-
+
result = []
- union_ = set(res.node['leak'].columns).union(set(self.demand_node_name_list)) -(set(res.node['leak'].columns) - set(self.demand_node_name_list)) - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
+ union_ = (
+ set(res.node['leak'].columns).union(set(self.demand_node_name_list))
+ - (set(res.node['leak'].columns) - set(self.demand_node_name_list))
+ - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
+ )
union_ = list(union_)
refined_result = res.node['demand'][self.demand_node_name_list]
demands = self.getRequiredDemandForAllNodesandtime(scn_name)
- demands = demands[self.demand_node_name_list]
-
- leak_res = res.node['leak'][union_]
+ demands = demands[self.demand_node_name_list]
+
+ leak_res = res.node['leak'][union_]
leak_data = []
- if consider_leak:
+ if consider_leak:
for name in leak_res:
demand_name = demands[name]
leak_res_name = leak_res[name].dropna()
@@ -357,66 +447,106 @@ def getQNIndexPopulation_4(self, scn_name, iPopulation=False, ratio=False, consi
if leak_more_than_criteria.any(0):
leak_data.append(leak_more_than_criteria)
leak_data = pd.DataFrame(leak_data).transpose()
-
- s = refined_result + 0.00000001 >= demands #sina bug
-
+
+ s = refined_result + 0.00000001 >= demands # sina bug
+
for name in s:
if name in leak_data.columns:
leak_data_name = leak_data[name]
for time in leak_data_name.index:
- if leak_data_name.loc[time] == True:
+ if leak_data_name.loc[time] == True: # noqa: E712
s.loc[time, name] = False
- s = s * pop[s.columns]
- if ratio==False:
+ s = s * pop[s.columns] # noqa: PLR6104
+ if ratio == False: # noqa: E712
total_pop = 1
else:
total_pop = pop.sum()
-
+
result = s.sum(axis=1) / total_pop
-
- return result
-
- def getQuantityExceedanceCurve(self, iPopulation="No", ratio=False, consider_leak=False, leak_ratio=0.75, result_type='mean', daily=False, min_time=0, max_time=999999999999999):
- all_scenarios_qn_data = self.AS_getQNIndexPopulation(iPopulation="No", ratio=ratio, consider_leak=consider_leak, leak_ratio=leak_ratio)
- exceedance_curve = self.PR_getCurveExcedence(all_scenarios_qn_data, result_type=result_type, daily=daily, min_time=min_time, max_time=max_time)
- columns_list = exceedance_curve.columns.to_list()
-
+
+ return result # noqa: RET504
+
+ def getQuantityExceedanceCurve( # noqa: N802, D102
+ self,
+ iPopulation='No', # noqa: ARG002, N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=0.75,
+ result_type='mean',
+ daily=False, # noqa: FBT002
+ min_time=0,
+ max_time=999999999999999,
+ ):
+ all_scenarios_qn_data = self.AS_getQNIndexPopulation(
+ iPopulation='No',
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
+ exceedance_curve = self.PR_getCurveExcedence(
+ all_scenarios_qn_data,
+ result_type=result_type,
+ daily=daily,
+ min_time=min_time,
+ max_time=max_time,
+ )
+ columns_list = exceedance_curve.columns.to_list()
+
dmg_vs_ep_list = {}
-
+
for i in range(0, len(columns_list), 2):
dmg_col = columns_list[i]
- ep_col = columns_list[i+1]
+ ep_col = columns_list[i + 1]
dmg_vs_ep_list[dmg_col] = ep_col
res = {}
-
- for dmg_col in dmg_vs_ep_list:
- ep_col = dmg_vs_ep_list[dmg_col]
+
+ for dmg_col in dmg_vs_ep_list: # noqa: PLC0206
+ ep_col = dmg_vs_ep_list[dmg_col]
exceedance_curve_temp = exceedance_curve.set_index(dmg_col)
exceedance_curve_temp = exceedance_curve_temp[ep_col]
- res[dmg_col] = exceedance_curve_temp
-
+ res[dmg_col] = exceedance_curve_temp
+
return res
-
- def getDeliveryExceedanceCurve(self, iPopulation="No", ratio=False, consider_leak=False, leak_ratio=0.75, result_type='mean', daily=False, min_time=0, max_time=999999999999999):
- all_scenarios_qn_data = self.AS_getDLIndexPopulation(iPopulation=iPopulation, ratio=ratio, consider_leak=consider_leak, leak_ratio=leak_ratio)
- exceedance_curve = self.PR_getCurveExcedence(all_scenarios_qn_data, result_type=result_type, daily=daily, min_time=min_time, max_time=max_time)
- columns_list = exceedance_curve.columns.to_list()
-
+
+ def getDeliveryExceedanceCurve( # noqa: N802, D102
+ self,
+ iPopulation='No', # noqa: N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=0.75,
+ result_type='mean',
+ daily=False, # noqa: FBT002
+ min_time=0,
+ max_time=999999999999999,
+ ):
+ all_scenarios_qn_data = self.AS_getDLIndexPopulation(
+ iPopulation=iPopulation,
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
+ exceedance_curve = self.PR_getCurveExcedence(
+ all_scenarios_qn_data,
+ result_type=result_type,
+ daily=daily,
+ min_time=min_time,
+ max_time=max_time,
+ )
+ columns_list = exceedance_curve.columns.to_list()
+
dmg_vs_ep_list = {}
-
+
for i in range(0, len(columns_list), 2):
dmg_col = columns_list[i]
- ep_col = columns_list[i+1]
+ ep_col = columns_list[i + 1]
dmg_vs_ep_list[dmg_col] = ep_col
res = {}
-
- for dmg_col in dmg_vs_ep_list:
- ep_col = dmg_vs_ep_list[dmg_col]
+
+ for dmg_col in dmg_vs_ep_list: # noqa: PLC0206
+ ep_col = dmg_vs_ep_list[dmg_col]
exceedance_curve_temp = exceedance_curve.set_index(dmg_col)
exceedance_curve_temp = exceedance_curve_temp[ep_col]
- res[dmg_col] = exceedance_curve_temp
-
+ res[dmg_col] = exceedance_curve_temp
+
return res
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/Output/GUI_Curve_API.py b/modules/systemPerformance/REWET/REWET/Output/GUI_Curve_API.py
index bdae65279..0416960ad 100644
--- a/modules/systemPerformance/REWET/REWET/Output/GUI_Curve_API.py
+++ b/modules/systemPerformance/REWET/REWET/Output/GUI_Curve_API.py
@@ -1,16 +1,15 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 19:12:46 2022
+"""Created on Thu Nov 10 19:12:46 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
+
+import pickle # noqa: S403
-import pickle
-def getDummyDataForQNExeedanceCurve():
- with open('qn_data.pkl', 'rb') as f:
- dummy_data = pickle.load(f)
- return dummy_data
+def getDummyDataForQNExeedanceCurve(): # noqa: N802, D103
+ with open('qn_data.pkl', 'rb') as f: # noqa: PTH123
+ dummy_data = pickle.load(f) # noqa: S301
+ return dummy_data # noqa: RET504
"""
@@ -19,14 +18,14 @@ def getDummyDataForQNExeedanceCurve():
"""
-
"""
This section is for multi-scenarios (probabilistic) results.
"""
-def QNExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
- """
- gets Project Result object, and returns Exceedance probability and Quantity
+
+
+def QNExceedanceCurve(pr, percentage_list, time_type, time_shift=0): # noqa: N802
+ """Gets Project Result object, and returns Exceedance probability and Quantity
outage for the given percentages. Caution: the current version only accept
one percentage in the percentage list.
@@ -43,22 +42,26 @@ def QNExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
-------
None.
- """
+ """ # noqa: D205, D401
data = getDummyDataForQNExeedanceCurve()
if len(percentage_list) > 1:
- raise ValueError("the current version only accept one percentage in the percentage list")
-
- if type(time_shift) != int:
- raise ValueError("Time shift must be integer type: "+repr(type(time_shift)) + ".")
-
+ raise ValueError( # noqa: DOC501, TRY003
+ 'the current version only accept one percentage in the percentage list' # noqa: EM101
+ )
+
+ if type(time_shift) != int: # noqa: E721
+ raise ValueError( # noqa: DOC501
+ 'Time shift must be integer type: ' + repr(type(time_shift)) + '.'
+ )
+
if time_shift < 0:
- raise ValueError("Time shift ust be bigger than or equal to zero.")
-
+ raise ValueError('Time shift ust be bigger than or equal to zero.') # noqa: DOC501, EM101, TRY003
+
res = {}
for percentage in percentage_list:
temp_res = pr.PR_getBSCPercentageExcedanceCurce(data, percentage)
-
+
if time_type.lower() == 'seconds':
pass
elif time_type.lower() == 'hour':
@@ -66,14 +69,14 @@ def QNExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
elif time_type.lower() == 'day':
pr.convertTimeSecondToDay(temp_res, 'restore_time', time_shift)
else:
- raise ValueError("Uknown time_type: "+repr(time_type))
-
+ raise ValueError('Uknown time_type: ' + repr(time_type)) # noqa: DOC501
+
res[percentage] = temp_res
return res
-def DLExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
- """
- gets Project Result object, and returns Exceedance probability and Delivery
+
+def DLExceedanceCurve(pr, percentage_list, time_type, time_shift=0): # noqa: N802
+ """Gets Project Result object, and returns Exceedance probability and Delivery
outage for the given percentages. Caution: the current version only accept
one percentage in the percentage list.
@@ -90,22 +93,26 @@ def DLExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
-------
None.
- """
+ """ # noqa: D205, D401
data = getDummyDataForQNExeedanceCurve()
if len(percentage_list) > 1:
- raise ValueError("the current version only accept one percentage in the percentage list")
-
- if type(time_shift) != int:
- raise ValueError("Time shift must be integer type: "+repr(type(time_shift)) + ".")
-
+ raise ValueError( # noqa: DOC501, TRY003
+ 'the current version only accept one percentage in the percentage list' # noqa: EM101
+ )
+
+ if type(time_shift) != int: # noqa: E721
+ raise ValueError( # noqa: DOC501
+ 'Time shift must be integer type: ' + repr(type(time_shift)) + '.'
+ )
+
if time_shift < 0:
- raise ValueError("Time shift ust be bigger than or equal to zero.")
-
+ raise ValueError('Time shift ust be bigger than or equal to zero.') # noqa: DOC501, EM101, TRY003
+
res = {}
for percentage in percentage_list:
temp_res = pr.PR_getBSCPercentageExcedanceCurce(data, percentage)
-
+
if time_type.lower() == 'seconds':
pass
elif time_type.lower() == 'hour':
@@ -113,10 +120,7 @@ def DLExceedanceCurve(pr, percentage_list, time_type, time_shift=0):
elif time_type.lower() == 'day':
pr.convertTimeSecondToDay(temp_res, 'restore_time', time_shift)
else:
- raise ValueError("Uknown time_type: "+repr(time_type))
-
+ raise ValueError('Uknown time_type: ' + repr(time_type)) # noqa: DOC501
+
res[percentage] = temp_res
return res
-
-
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/Output/Helper.py b/modules/systemPerformance/REWET/REWET/Output/Helper.py
index 6cb73eee3..e4686cc17 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Helper.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Helper.py
@@ -1,60 +1,65 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Oct 24 18:10:31 2022
+"""Created on Mon Oct 24 18:10:31 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-#import numba
-import pandas as pd
-import numpy as np
-from functools import reduce # Valid in Python 2.6+, required in Python 3
+# import numba
import operator
+from functools import reduce # Valid in Python 2.6+, required in Python 3
+
+import numpy as np
-def hhelper(x):
- if x<0:
+
+def hhelper(x): # noqa: D103
+ if x < 0:
return 0
- else:
+ else: # noqa: RET505
return x
-
-#@numba.jit()
-def EPHelper(prob_mat, old):
- if old==False:#prob_mat = prob_mat.tolist()
- #one_minus_p_list = 1-prob_mat
- one_minus_p_list = [1-p for p in prob_mat]
- pi_one_minus_p_list = [1- reduce(operator.mul, one_minus_p_list[:i+1], 1) for i in range(0, len(one_minus_p_list))]
- #pi_one_minus_p_list = [rr.apply(lambda x: [x[i] * x[1], raw=True)
- return pi_one_minus_p_list
- #pi_one_minus_p_list.iloc[0] = one_minus_p_list.iloc[0]
-
- #return (pd.Series(1.00, index=pi_one_minus_p_list.index) - pi_one_minus_p_list, prob_mat)
- else:
+
+
+# @numba.jit()
+def EPHelper(prob_mat, old): # noqa: N802, D103
+ if old == False: # prob_mat = prob_mat.tolist() # noqa: E712
+ # one_minus_p_list = 1-prob_mat
+ one_minus_p_list = [1 - p for p in prob_mat]
+ pi_one_minus_p_list = [
+ 1 - reduce(operator.mul, one_minus_p_list[: i + 1], 1)
+ for i in range(len(one_minus_p_list))
+ ]
+ # pi_one_minus_p_list = [rr.apply(lambda x: [x[i] * x[1], raw=True)
+ return pi_one_minus_p_list # noqa: RET504
+ # pi_one_minus_p_list.iloc[0] = one_minus_p_list.iloc[0]
+
+ # return (pd.Series(1.00, index=pi_one_minus_p_list.index) - pi_one_minus_p_list, prob_mat)
+ else: # noqa: RET505
ep_mat = np.ndarray(prob_mat.size)
for i in np.arange(prob_mat.size):
- j=0
+ j = 0
pi_one_minus_p = 1
while j <= i:
p = prob_mat[j]
one_minus_p = 1 - p
pi_one_minus_p *= one_minus_p
j += 1
- ep_mat[i] = 1- pi_one_minus_p
+ ep_mat[i] = 1 - pi_one_minus_p
return ep_mat
-def helper_outageMap(pandas_list):
+
+def helper_outageMap(pandas_list): # noqa: N802, D103
false_found_flag = False
b_list = pandas_list.tolist()
i = 0
for b_value in b_list:
- if b_value == False:
- false_found_flag = True
- break
- i += 1
-
- return false_found_flag, i
-
-def hhelper(x):
- if x<0:
+ if b_value == False: # noqa: E712
+ false_found_flag = True
+ break
+ i += 1
+
+ return false_found_flag, i
+
+
+def hhelper(x): # noqa: D103, F811
+ if x < 0:
return 0
- else:
- return x
\ No newline at end of file
+ else: # noqa: RET505
+ return x
diff --git a/modules/systemPerformance/REWET/REWET/Output/Map.py b/modules/systemPerformance/REWET/REWET/Output/Map.py
index b17e93bac..56d923a70 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Map.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Map.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Oct 24 09:43:00 2022
+"""Created on Mon Oct 24 09:43:00 2022
This file includes all Map Related Results.
Class:
@@ -15,145 +13,172 @@
}
@author: snaeimi
-"""
+""" # noqa: CPY001, D205, INP001
import warnings
+
import geopandas as gpd
-import pandas as pd
-import numpy as np
+import numpy as np
+import pandas as pd
import shapely
-import Output.Helper as Helper
-import initial
-#import time
+from Output import Helper
+
+# import time
+
-class Map():
+class Map: # noqa: D101
def __init__(self):
pass
- #def loadShapeFile(shapeFileAddr='Northridge\GIS\Demand\demand_polygons.shp'):
- def loadShapeFile(self, shapeFileAddr='Northridge\GIS\Demand\demand_polygons.shp'):
+
+ # def loadShapeFile(shapeFileAddr='Northridge\GIS\Demand\demand_polygons.shp'):
+ def loadShapeFile( # noqa: D102, N802, PLR6301
+ self,
+ shapeFileAddr=r'Northridge\GIS\Demand\demand_polygons.shp', # noqa: N803
+ ):
shape_file = gpd.read_file(shapeFileAddr)
- return shape_file
-
- def joinTwoShapeFiles(self, first, second):
+ return shape_file # noqa: RET504
+ def joinTwoShapeFiles(self, first, second): # noqa: D102, N802, PLR6301
second = second.set_crs(crs=first.crs)
joined_map = gpd.sjoin(first, second)
-
- return joined_map
-
- def createGeopandasPointDataFrameForNodes(self):
+
+ return joined_map # noqa: RET504
+
+ def createGeopandasPointDataFrameForNodes(self): # noqa: N802, D102
s = gpd.GeoDataFrame(index=self.demand_node_name_list)
- point_list = []
+ point_list = []
point_name_list = []
-
+
for name in self.demand_node_name_list:
coord = self.wn.get_node(name).coordinates
- point_list.append(shapely.geometry.Point(coord[0],coord[1]))
+ point_list.append(shapely.geometry.Point(coord[0], coord[1]))
point_name_list.append(name)
s.geometry = point_list
return s
-
- def getDLQNExceedenceProbabilityMap(self, data_frame, ihour , param):
+
+ def getDLQNExceedenceProbabilityMap(self, data_frame, ihour, param): # noqa: N802, D102
data = data_frame.transpose()
- scn_prob_list = self.scenario_prob
- #DLQN_dmg = pd.DataFrame(data=0, index=data.index, columns=data.columns)
-
+ scn_prob_list = self.scenario_prob
+ # DLQN_dmg = pd.DataFrame(data=0, index=data.index, columns=data.columns)
+
scn_prob = [scn_prob_list[scn_name] for scn_name in data.index]
data['prob'] = scn_prob
-
+
res_dict_list = []
tt = 0
if ihour:
for node_name in data_frame.index:
- loop_dmg = data[[node_name,'prob']]
+ loop_dmg = data[[node_name, 'prob']]
loop_dmg = loop_dmg.sort_values(node_name, ascending=False)
- #t1 = time.time()
- #loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy())
- loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy(), old=False)
- #loop_ep_2 = Helper.EPHelper(loop_dmg['prob'].to_numpy(), old=True)
- #return (loop_ep, loop_ep_2)
- #t2 = time.time()
- #dt = t2-t1
- #tt += dt
+ # t1 = time.time()
+ # loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy())
+ loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy(), old=False)
+ # loop_ep_2 = Helper.EPHelper(loop_dmg['prob'].to_numpy(), old=True)
+ # return (loop_ep, loop_ep_2)
+ # t2 = time.time()
+ # dt = t2-t1
+ # tt += dt
loop_dmg['ep'] = loop_ep
inter_ind = param
if inter_ind >= loop_dmg['ep'].max():
- max_ind = loop_dmg[loop_dmg['ep'] == loop_dmg['ep'].max()].index[0]
- #max_ind = loop_dmg.idxmax()
+ max_ind = loop_dmg[loop_dmg['ep'] == loop_dmg['ep'].max()].index[
+ 0
+ ]
+ # max_ind = loop_dmg.idxmax()
inter_value = loop_dmg.loc[max_ind, node_name]
elif inter_ind <= loop_dmg['ep'].min():
- min_ind = loop_dmg[loop_dmg['ep'] == loop_dmg['ep'].min()].index[0]
- #min_ind = loop_dmg.idxmin()
+ min_ind = loop_dmg[loop_dmg['ep'] == loop_dmg['ep'].min()].index[
+ 0
+ ]
+ # min_ind = loop_dmg.idxmin()
inter_value = loop_dmg.loc[min_ind, node_name]
else:
-
loop_dmg.loc['inter', 'ep'] = inter_ind
-
+
loop_dmg = loop_dmg.sort_values('ep')
ep_list = loop_dmg['ep'].to_list()
- inter_series = pd.Series(index=ep_list, data=loop_dmg[node_name].to_list())
+ inter_series = pd.Series(
+ index=ep_list, data=loop_dmg[node_name].to_list()
+ )
inter_series = inter_series.interpolate(method='linear')
- inter_value = inter_series.loc[inter_ind]
- if type(inter_value) != np.float64:
+ inter_value = inter_series.loc[inter_ind]
+ if type(inter_value) != np.float64: # noqa: E721
inter_value = inter_value.mean()
- res_dict_list.append({'node_name':node_name, 'res':inter_value})
+ res_dict_list.append({'node_name': node_name, 'res': inter_value})
else:
for node_name in data_frame.index:
- loop_dmg = data[[node_name,'prob']]
-
+ loop_dmg = data[[node_name, 'prob']]
+
loop_dmg = loop_dmg.sort_values(node_name, ascending=False)
- loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy())
+ loop_ep = Helper.EPHelper(loop_dmg['prob'].to_numpy())
loop_dmg['ep'] = loop_ep
inter_ind = param
if inter_ind >= loop_dmg[node_name].max():
- max_ind = loop_dmg[loop_dmg[node_name] == loop_dmg[node_name].max()].index[0]
+ max_ind = loop_dmg[
+ loop_dmg[node_name] == loop_dmg[node_name].max()
+ ].index[0]
inter_value = loop_dmg.loc[max_ind, 'ep']
elif inter_ind <= loop_dmg[node_name].min():
- min_ind = loop_dmg[loop_dmg[node_name] == loop_dmg[node_name].min()].index[0]
+ min_ind = loop_dmg[
+ loop_dmg[node_name] == loop_dmg[node_name].min()
+ ].index[0]
inter_value = loop_dmg.loc[min_ind, 'ep']
else:
loop_dmg.loc['inter', node_name] = inter_ind
-
+
loop_dmg = loop_dmg.sort_values(node_name)
hour_list = loop_dmg[node_name].to_list()
-
- inter_series = pd.Series(index=hour_list, data=loop_dmg['ep'].to_list())
+
+ inter_series = pd.Series(
+ index=hour_list, data=loop_dmg['ep'].to_list()
+ )
inter_series = inter_series.interpolate(method='linear')
- inter_value = inter_series.loc[inter_ind]
- if type(inter_value) != np.float64:
+ inter_value = inter_series.loc[inter_ind]
+ if type(inter_value) != np.float64: # noqa: E721
inter_value = inter_value.mean()
-
- res_dict_list.append({'node_name':node_name, 'res':inter_value})
-
+
+ res_dict_list.append({'node_name': node_name, 'res': inter_value})
+
res = pd.DataFrame.from_dict(res_dict_list)
res = res.set_index('node_name')['res']
-
+
s = self.createGeopandasPointDataFrameForNodes()
- s['res']=res
-
- #polygon = gpd.read_file('Northridge\GIS\Demand\demand_polygons.shp')
- #s = s.set_crs(epsg=polygon.crs.to_epsg())
- #joined_map = gpd.sjoin(polygon, s)
- #joined_map.plot(column='res', legend=True, categorical=True, cmap='Accent', ax=ax)
- #ax.get_legend().set_title('Hours without service')
- #ax.get_legend()._loc=3
- #props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
- print(tt)
+ s['res'] = res
+
+ # polygon = gpd.read_file('Northridge\GIS\Demand\demand_polygons.shp')
+ # s = s.set_crs(epsg=polygon.crs.to_epsg())
+ # joined_map = gpd.sjoin(polygon, s)
+ # joined_map.plot(column='res', legend=True, categorical=True, cmap='Accent', ax=ax)
+ # ax.get_legend().set_title('Hours without service')
+ # ax.get_legend()._loc=3
+ # props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
+ print(tt) # noqa: T201
return s
-
- def getOutageTimeGeoPandas_4(self, scn_name, LOS='DL' , iConsider_leak=False, leak_ratio=0, consistency_time_window=7200):
- #print(repr(LOS) + " " + repr(iConsider_leak)+" "+ repr(leak_ratio)+" "+repr(consistency_time_window ) )
+
+ def getOutageTimeGeoPandas_4( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ LOS='DL', # noqa: N803
+ iConsider_leak=False, # noqa: FBT002, N803
+ leak_ratio=0,
+ consistency_time_window=7200,
+ ):
+ # print(repr(LOS) + " " + repr(iConsider_leak)+" "+ repr(leak_ratio)+" "+repr(consistency_time_window ) )
self.loadScneariodata(scn_name)
- res = self.data[scn_name]
- map_res = pd.Series(data=0 , index=self.demand_node_name_list, dtype=np.int64)
-
- demands = self.getRequiredDemandForAllNodesandtime(scn_name)
+ res = self.data[scn_name]
+ map_res = pd.Series(data=0, index=self.demand_node_name_list, dtype=np.int64)
+
+ demands = self.getRequiredDemandForAllNodesandtime(scn_name)
refined_res = res.node['demand'][self.demand_node_name_list]
- union_ = set(res.node['leak'].columns).union(set(self.demand_node_name_list) - (set(res.node['leak'].columns) ) - set(self.demand_node_name_list)) - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
- leak_res = res.node['leak'][union_]
-
+ union_ = set(res.node['leak'].columns).union(
+ set(self.demand_node_name_list)
+ - (set(res.node['leak'].columns))
+ - set(self.demand_node_name_list)
+ ) - (set(self.demand_node_name_list) - set(res.node['leak'].columns))
+ leak_res = res.node['leak'][union_]
+
leak_data = []
if iConsider_leak:
for name in leak_res:
@@ -162,304 +187,420 @@ def getOutageTimeGeoPandas_4(self, scn_name, LOS='DL' , iConsider_leak=False, le
time_list = set(current_leak_res.dropna().index)
time_list_drop = set(demands.index) - time_list
demand_name = demand_name.drop(time_list_drop)
- leak_more_than_criteria = current_leak_res >= leak_ratio * demand_name
+ leak_more_than_criteria = (
+ current_leak_res >= leak_ratio * demand_name
+ )
if leak_more_than_criteria.any(0):
leak_data.append(leak_more_than_criteria)
leak_data = pd.DataFrame(leak_data).transpose()
demands = demands[self.demand_node_name_list]
-
- if LOS=="DL":
- DL_res_not_met_bool = refined_res <= demands * 0.01
- elif LOS =="QN":
- DL_res_not_met_bool = refined_res < demands * 0.98
-
+
+ if LOS == 'DL':
+ DL_res_not_met_bool = refined_res <= demands * 0.01 # noqa: N806
+ elif LOS == 'QN':
+ DL_res_not_met_bool = refined_res < demands * 0.98 # noqa: N806
+
time_window = consistency_time_window + 1
time_list = DL_res_not_met_bool.index.to_list()
time_list.reverse()
-
+
for time in time_list:
past_time_beg = time - time_window
window_data = DL_res_not_met_bool.loc[past_time_beg:time]
window_data = window_data.all()
- window_data_false = window_data[window_data == False]
+ window_data_false = window_data[window_data == False] # noqa: E712
DL_res_not_met_bool.loc[time, window_data_false.index] = False
-
+
for name in DL_res_not_met_bool:
if name in leak_data.columns:
leak_data_name = leak_data[name]
for time in leak_data_name.index:
- if leak_data_name.loc[time] == True:
+ if leak_data_name.loc[time] == True: # noqa: E712
DL_res_not_met_bool.loc[time, name] = True
-
+
all_node_name_list = refined_res.columns
- only_not_met_bool = DL_res_not_met_bool.any(0)
- only_not_met_any = all_node_name_list[only_not_met_bool]
- DL_res_not_met = DL_res_not_met_bool.filter(only_not_met_any)
- DL_res_MET = ~DL_res_not_met
+ only_not_met_bool = DL_res_not_met_bool.any(0)
+ only_not_met_any = all_node_name_list[only_not_met_bool]
+ DL_res_not_met = DL_res_not_met_bool.filter(only_not_met_any) # noqa: N806
+ DL_res_MET = ~DL_res_not_met # noqa: N806
time_window = 2
for name in only_not_met_any:
- rolled_DL_res_MET = DL_res_MET[name].rolling(time_window, center=True).sum()
- rolled_DL_res_MET = rolled_DL_res_MET.sort_index(ascending=False)
- rolled_DL_res_MET.dropna(inplace=True)
-
- false_found, found_index = Helper.helper_outageMap(rolled_DL_res_MET.ge(time_window-1))
- #if name == "SM323":
- #return DL_res_MET[name], rolled_DL_res_MET, false_found, rolled_DL_res_MET.index[found_index], rolled_DL_res_MET.ge(time_window-1), found_index
- if false_found == False:
+ rolled_DL_res_MET = ( # noqa: N806
+ DL_res_MET[name].rolling(time_window, center=True).sum()
+ )
+ rolled_DL_res_MET = rolled_DL_res_MET.sort_index(ascending=False) # noqa: N806
+ rolled_DL_res_MET.dropna(inplace=True) # noqa: PD002
+
+ false_found, found_index = Helper.helper_outageMap(
+ rolled_DL_res_MET.ge(time_window - 1)
+ )
+ # if name == "SM323":
+ # return DL_res_MET[name], rolled_DL_res_MET, false_found, rolled_DL_res_MET.index[found_index], rolled_DL_res_MET.ge(time_window-1), found_index
+ if false_found == False: # noqa: E712
latest_time = 0
else:
- if DL_res_MET[name].iloc[-1] == False:
+ if DL_res_MET[name].iloc[-1] == False: # noqa: E712
latest_time = DL_res_MET.index[-1]
else:
latest_time = rolled_DL_res_MET.index[found_index]
latest_time = rolled_DL_res_MET.index[found_index]
-
+
map_res.loc[name] = latest_time
- #map_res = map_res/(3600*24)
+ # map_res = map_res/(3600*24)
geopandas_df = self.createGeopandasPointDataFrameForNodes()
- geopandas_df.loc[map_res.index.to_list(), 'restoration_time'] = map_res.to_list()
-
+ geopandas_df.loc[map_res.index.to_list(), 'restoration_time'] = (
+ map_res.to_list()
+ )
+
return geopandas_df
-
- def getOutageTimeGeoPandas_5(self, scn_name, bsc='DL' , iConsider_leak=False, leak_ratio=0, consistency_time_window=7200, sum_time=False):
+
+ def getOutageTimeGeoPandas_5( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ bsc='DL',
+ iConsider_leak=False, # noqa: FBT002, N803
+ leak_ratio=0,
+ consistency_time_window=7200,
+ sum_time=False, # noqa: FBT002
+ ):
self.loadScneariodata(scn_name)
- res = self.data[scn_name]
- map_res = pd.Series(data=0 , index=self.demand_node_name_list, dtype=np.int64)
-
- required_demand = self.getRequiredDemandForAllNodesandtime(scn_name)
- delivered_demand = res.node['demand'][self.demand_node_name_list]
- common_nodes_leak = list (set( res.node['leak'].columns ).intersection( set( self.demand_node_name_list ) ))
- leak_res = res.node['leak'][common_nodes_leak]
-
- common_nodes_demand = list( set(delivered_demand.columns).intersection(set(self.demand_node_name_list) ) )
- delivered_demand = delivered_demand[common_nodes_demand]
- required_demand = required_demand[common_nodes_demand]
-
- required_demand.sort_index(inplace=True)
- delivered_demand.sort_index(inplace=True)
- leak_res.sort_index(inplace=True)
-
- #return delivered_demand, required_demand, leak_res
-
- if bsc=="DL":
- bsc_res_not_met_bool = (delivered_demand.fillna(0) <= required_demand * 0.0001)
- elif bsc =="QN":
- bsc_res_not_met_bool = (delivered_demand.fillna(0) < required_demand * 0.9999)
+ res = self.data[scn_name]
+ map_res = pd.Series(data=0, index=self.demand_node_name_list, dtype=np.int64)
+
+ required_demand = self.getRequiredDemandForAllNodesandtime(scn_name)
+ delivered_demand = res.node['demand'][self.demand_node_name_list]
+ common_nodes_leak = list(
+ set(res.node['leak'].columns).intersection(
+ set(self.demand_node_name_list)
+ )
+ )
+ leak_res = res.node['leak'][common_nodes_leak]
+
+ common_nodes_demand = list(
+ set(delivered_demand.columns).intersection(
+ set(self.demand_node_name_list)
+ )
+ )
+ delivered_demand = delivered_demand[common_nodes_demand]
+ required_demand = required_demand[common_nodes_demand]
+
+ required_demand.sort_index(inplace=True) # noqa: PD002
+ delivered_demand.sort_index(inplace=True) # noqa: PD002
+ leak_res.sort_index(inplace=True) # noqa: PD002
+
+ # return delivered_demand, required_demand, leak_res
+
+ if bsc == 'DL':
+ bsc_res_not_met_bool = (
+ delivered_demand.fillna(0) <= required_demand * 0.0001
+ )
+ elif bsc == 'QN':
+ bsc_res_not_met_bool = (
+ delivered_demand.fillna(0) < required_demand * 0.9999
+ )
else:
- raise ValueError("Unknown BSC= "+str(bsc))
-
- if iConsider_leak :
- #return leak_res, required_demand
- leak_res_non_available_time_list = set(required_demand[leak_res.columns].index) - set(leak_res.index)
+ raise ValueError('Unknown BSC= ' + str(bsc))
+
+ if iConsider_leak:
+ # return leak_res, required_demand
+ leak_res_non_available_time_list = set(
+ required_demand[leak_res.columns].index
+ ) - set(leak_res.index)
if len(leak_res_non_available_time_list) > 0:
- leak_res_non_available_time_list = list(leak_res_non_available_time_list)
- temp_data = pd.DataFrame([[0 for i in leak_res.columns] for j in range( len(leak_res_non_available_time_list) ) ], index= leak_res_non_available_time_list, columns=leak_res.columns)
- #leak_res.loc[leak_res_non_available_time_list, : ] = temp_data
+ leak_res_non_available_time_list = list(
+ leak_res_non_available_time_list
+ )
+ temp_data = pd.DataFrame(
+ [
+ [0 for i in leak_res.columns]
+ for j in range(len(leak_res_non_available_time_list))
+ ],
+ index=leak_res_non_available_time_list,
+ columns=leak_res.columns,
+ )
+ # leak_res.loc[leak_res_non_available_time_list, : ] = temp_data
leak_res = leak_res.append(temp_data)
- leak_res.sort_index(inplace=True)
- leak_criteria_exceeded = leak_res.fillna(0) >= leak_ratio * required_demand[leak_res.columns]
- combined_negative_result = (bsc_res_not_met_bool | leak_criteria_exceeded).dropna(axis=1)
- #return combined_negative_result
- bsc_res_not_met_bool.loc[:, combined_negative_result.columns] = combined_negative_result
-
- #end_time = delivered_demand.min()
+ leak_res.sort_index(inplace=True) # noqa: PD002
+ leak_criteria_exceeded = (
+ leak_res.fillna(0) >= leak_ratio * required_demand[leak_res.columns]
+ )
+ combined_negative_result = (
+ bsc_res_not_met_bool | leak_criteria_exceeded
+ ).dropna(axis=1)
+ # return combined_negative_result
+ bsc_res_not_met_bool.loc[:, combined_negative_result.columns] = (
+ combined_negative_result
+ )
+
+ # end_time = delivered_demand.min()
end_time = delivered_demand.index.max()
if consistency_time_window > 1:
time_beg_step_list = np.arange(0, end_time, consistency_time_window)
-
- #time_beg_step_list = np.append(time_beg_step_list, [end_time])
- time_end_step_list = time_beg_step_list # + consistency_time_window
- window_bsc_not_met = pd.DataFrame(index=time_end_step_list, columns= bsc_res_not_met_bool.columns, dtype=bool)
- #return bsc_res_not_met_bool#, delivered_demand, required_demand
+
+ # time_beg_step_list = np.append(time_beg_step_list, [end_time])
+ time_end_step_list = time_beg_step_list # + consistency_time_window
+ window_bsc_not_met = pd.DataFrame(
+ index=time_end_step_list,
+ columns=bsc_res_not_met_bool.columns,
+ dtype=bool,
+ )
+ # return bsc_res_not_met_bool#, delivered_demand, required_demand
for step_time_beg in time_beg_step_list:
step_time_end = step_time_beg + consistency_time_window
- window_data = bsc_res_not_met_bool.loc[step_time_beg:step_time_end]
+ window_data = bsc_res_not_met_bool.loc[step_time_beg:step_time_end]
if len(window_data) > 0:
- window_data = window_data.all()
- window_bsc_not_met.loc[step_time_beg, window_data.index] = window_data
+ window_data = window_data.all()
+ window_bsc_not_met.loc[step_time_beg, window_data.index] = (
+ window_data
+ )
else:
- # print(step_time_beg)
- window_bsc_not_met.drop(step_time_beg, inplace=True)
+ # print(step_time_beg)
+ window_bsc_not_met.drop(step_time_beg, inplace=True) # noqa: PD002
else:
window_bsc_not_met = bsc_res_not_met_bool
-
- pre_incident = (window_bsc_not_met.loc[:3600*3]).any()
- non_incident = pre_incident[pre_incident==False].index
-
+
+ pre_incident = (window_bsc_not_met.loc[: 3600 * 3]).any()
+ non_incident = pre_incident[pre_incident == False].index # noqa: E712
+
not_met_node_name_list = window_bsc_not_met.any()
-
- #("****************")
- #print(not_met_node_name_list[not_met_node_name_list==True])
-
- not_met_node_name_list = not_met_node_name_list[not_met_node_name_list==True]
+
+ # ("****************")
+ # print(not_met_node_name_list[not_met_node_name_list==True])
+
+ not_met_node_name_list = not_met_node_name_list[
+ not_met_node_name_list == True # noqa: E712
+ ]
not_met_node_name_list = not_met_node_name_list.index
-
-
+
if sum_time:
- time_difference = window_bsc_not_met.index[1:] - window_bsc_not_met.index[:-1]
- timed_diference_window_bsc_not_met = \
- (time_difference * window_bsc_not_met.iloc[1:].transpose()).transpose()
+ time_difference = (
+ window_bsc_not_met.index[1:] - window_bsc_not_met.index[:-1]
+ )
+ timed_diference_window_bsc_not_met = (
+ time_difference * window_bsc_not_met.iloc[1:].transpose()
+ ).transpose()
timed_diference_window_bsc_not_met.iloc[0] = 0
sum_window_bsc_not_met = timed_diference_window_bsc_not_met.sum()
- return sum_window_bsc_not_met
-
- window_bsc_not_met = window_bsc_not_met[not_met_node_name_list]
+ return sum_window_bsc_not_met # noqa: RET504
+
+ window_bsc_not_met = window_bsc_not_met[not_met_node_name_list]
cut_time = window_bsc_not_met.index.max()
- non_incident = list( set(non_incident).intersection(set(not_met_node_name_list) ) )
+ non_incident = list(
+ set(non_incident).intersection(set(not_met_node_name_list))
+ )
for step_time, row in window_bsc_not_met[non_incident].iterrows():
- if step_time <= 14400:
+ if step_time <= 14400: # noqa: PLR2004
continue
-
- if row.any() == False:
- print(step_time)
+
+ if row.any() == False: # noqa: E712
+ print(step_time) # noqa: T201
cut_time = step_time
break
-
+
window_bsc_not_met = window_bsc_not_met.loc[:cut_time]
window_bsc_not_met = window_bsc_not_met.loc[:cut_time]
-
- #return window_bsc_not_met
- #print(not_met_node_name_list)
- time_bsc_not_met_time = window_bsc_not_met.sort_index(ascending=False).idxmax()
+
+ # return window_bsc_not_met
+ # print(not_met_node_name_list)
+ time_bsc_not_met_time = window_bsc_not_met.sort_index(
+ ascending=False
+ ).idxmax()
map_res.loc[time_bsc_not_met_time.index] = time_bsc_not_met_time
-
-
- never_reported_nodes = set(self.demand_node_name_list) - set(common_nodes_demand)
+
+ never_reported_nodes = set(self.demand_node_name_list) - set(
+ common_nodes_demand
+ )
number_of_unreported_demand_nodes = len(never_reported_nodes)
if number_of_unreported_demand_nodes > 0:
- warnings.warn("REWET WARNING: there are " + str(number_of_unreported_demand_nodes ) + "unreported nodes")
- map_res.loc[never_reported_nodes ] = end_time
-
- map_res = map_res/(3600*24)
- return map_res
-
+ warnings.warn( # noqa: B028
+ 'REWET WARNING: there are '
+ + str(number_of_unreported_demand_nodes)
+ + 'unreported nodes'
+ )
+ map_res.loc[never_reported_nodes] = end_time
+
+ map_res = map_res / (3600 * 24) # noqa: PLR6104
+ return map_res # noqa: RET504
+
s = gpd.GeoDataFrame(index=self.demand_node_name_list)
- point_list = []
+ point_list = []
point_name_list = []
-
+
for name in self.demand_node_name_list:
coord = self.wn.get_node(name).coordinates
- point_list.append(shapely.geometry.Point(coord[0],coord[1]))
+ point_list.append(shapely.geometry.Point(coord[0], coord[1]))
point_name_list.append(name)
-
+
s.geometry = point_list
-
+
s.loc[map_res.index.to_list(), 'restoration_time'] = map_res.to_list()
- polygon = gpd.read_file('Northridge\GIS\Demand\demand_polygons.shp')
+ polygon = gpd.read_file(r'Northridge\GIS\Demand\demand_polygons.shp')
s = s.set_crs(crs=polygon.crs)
joined_map = gpd.sjoin(polygon, s)
- #return joined_map
- #joined_map.loc[map_res.index.to_list(), 'restoration_time'] = (map_res/3600/24).to_list()
-
- return joined_map
-
- def percentOfEffectNodes(self, scn_name, bsc='QN' , iConsider_leak=True, leak_ratio=0.75, consistency_time_window=7200):
+ # return joined_map
+ # joined_map.loc[map_res.index.to_list(), 'restoration_time'] = (map_res/3600/24).to_list()
+
+ return joined_map # noqa: RET504
+
+ def percentOfEffectNodes( # noqa: C901, N802, D102
+ self,
+ scn_name,
+ bsc='QN',
+ iConsider_leak=True, # noqa: FBT002, N803
+ leak_ratio=0.75,
+ consistency_time_window=7200,
+ ):
self.loadScneariodata(scn_name)
- res = self.data[scn_name]
- map_res = pd.Series(data=0 , index=self.demand_node_name_list, dtype=np.int64)
-
- required_demand = self.getRequiredDemandForAllNodesandtime(scn_name)
- delivered_demand = res.node['demand'][self.demand_node_name_list]
- common_nodes_leak = set(res.node['leak'].columns).intersection(set(self.demand_node_name_list))
- leak_res = res.node['leak'][common_nodes_leak]
-
- common_nodes_demand = list( set(delivered_demand.columns).intersection(set(self.demand_node_name_list) ) )
- delivered_demand = delivered_demand[common_nodes_demand]
- required_demand = required_demand[common_nodes_demand]
-
- required_demand.sort_index(inplace=True)
- delivered_demand.sort_index(inplace=True)
- leak_res.sort_index(inplace=True)
-
- #return delivered_demand, required_demand, leak_res
-
- if bsc=="DL":
- bsc_res_not_met_bool = (delivered_demand.fillna(0) <= required_demand * 0.1)
- elif bsc =="QN":
- bsc_res_not_met_bool = (delivered_demand.fillna(0) < required_demand * 0.98)
+ res = self.data[scn_name]
+ map_res = pd.Series(data=0, index=self.demand_node_name_list, dtype=np.int64)
+
+ required_demand = self.getRequiredDemandForAllNodesandtime(scn_name)
+ delivered_demand = res.node['demand'][self.demand_node_name_list]
+ common_nodes_leak = set(res.node['leak'].columns).intersection(
+ set(self.demand_node_name_list)
+ )
+ leak_res = res.node['leak'][common_nodes_leak]
+
+ common_nodes_demand = list(
+ set(delivered_demand.columns).intersection(
+ set(self.demand_node_name_list)
+ )
+ )
+ delivered_demand = delivered_demand[common_nodes_demand]
+ required_demand = required_demand[common_nodes_demand]
+
+ required_demand.sort_index(inplace=True) # noqa: PD002
+ delivered_demand.sort_index(inplace=True) # noqa: PD002
+ leak_res.sort_index(inplace=True) # noqa: PD002
+
+ # return delivered_demand, required_demand, leak_res
+
+ if bsc == 'DL':
+ bsc_res_not_met_bool = (
+ delivered_demand.fillna(0) <= required_demand * 0.1
+ )
+ elif bsc == 'QN':
+ bsc_res_not_met_bool = (
+ delivered_demand.fillna(0) < required_demand * 0.98
+ )
else:
- raise ValueError("Unknown BSC= "+str(bsc))
-
- if iConsider_leak :
- #return leak_res, required_demand
- leak_res_non_available_time_list = set(required_demand[leak_res.columns].index) - set(leak_res.index)
+ raise ValueError('Unknown BSC= ' + str(bsc))
+
+ if iConsider_leak:
+ # return leak_res, required_demand
+ leak_res_non_available_time_list = set(
+ required_demand[leak_res.columns].index
+ ) - set(leak_res.index)
if len(leak_res_non_available_time_list) > 0:
- leak_res_non_available_time_list = list(leak_res_non_available_time_list)
- temp_data = pd.DataFrame([[0 for i in leak_res.columns] for j in range( len(leak_res_non_available_time_list) ) ], index= leak_res_non_available_time_list, columns=leak_res.columns)
- #leak_res.loc[leak_res_non_available_time_list, : ] = temp_data
+ leak_res_non_available_time_list = list(
+ leak_res_non_available_time_list
+ )
+ temp_data = pd.DataFrame(
+ [
+ [0 for i in leak_res.columns]
+ for j in range(len(leak_res_non_available_time_list))
+ ],
+ index=leak_res_non_available_time_list,
+ columns=leak_res.columns,
+ )
+ # leak_res.loc[leak_res_non_available_time_list, : ] = temp_data
leak_res = leak_res.append(temp_data)
- leak_res.sort_index(inplace=True)
- leak_criteria_exceeded = leak_res.fillna(0) >= leak_ratio * required_demand[leak_res.columns]
- combined_negative_result = (bsc_res_not_met_bool | leak_criteria_exceeded).dropna(axis=1)
- #return combined_negative_result
- bsc_res_not_met_bool.loc[:, combined_negative_result.columns] = combined_negative_result
-
- #end_time = delivered_demand.min()
+ leak_res.sort_index(inplace=True) # noqa: PD002
+ leak_criteria_exceeded = (
+ leak_res.fillna(0) >= leak_ratio * required_demand[leak_res.columns]
+ )
+ combined_negative_result = (
+ bsc_res_not_met_bool | leak_criteria_exceeded
+ ).dropna(axis=1)
+ # return combined_negative_result
+ bsc_res_not_met_bool.loc[:, combined_negative_result.columns] = (
+ combined_negative_result
+ )
+
+ # end_time = delivered_demand.min()
end_time = delivered_demand.index.max()
time_beg_step_list = np.arange(0, end_time, consistency_time_window)
-
- #time_beg_step_list = np.append(time_beg_step_list, [end_time])
- time_end_step_list = time_beg_step_list # + consistency_time_window
- window_bsc_not_met = pd.DataFrame(index=time_end_step_list, columns= bsc_res_not_met_bool.columns, dtype=bool)
- #return bsc_res_not_met_bool#, delivered_demand, required_demand
+
+ # time_beg_step_list = np.append(time_beg_step_list, [end_time])
+ time_end_step_list = time_beg_step_list # + consistency_time_window
+ window_bsc_not_met = pd.DataFrame(
+ index=time_end_step_list,
+ columns=bsc_res_not_met_bool.columns,
+ dtype=bool,
+ )
+ # return bsc_res_not_met_bool#, delivered_demand, required_demand
for step_time_beg in time_beg_step_list:
step_time_end = step_time_beg + consistency_time_window
- window_data = bsc_res_not_met_bool.loc[step_time_beg:step_time_end]
+ window_data = bsc_res_not_met_bool.loc[step_time_beg:step_time_end]
if len(window_data) > 0:
- window_data = window_data.all()
- window_bsc_not_met.loc[step_time_beg, window_data.index] = window_data
+ window_data = window_data.all()
+ window_bsc_not_met.loc[step_time_beg, window_data.index] = (
+ window_data
+ )
else:
- # print(step_time_beg)
- window_bsc_not_met.drop(step_time_beg, inplace=True)
- #return window_bsc_not_met
- pre_incident = (window_bsc_not_met.loc[:3600*3]).any()
- non_incident = pre_incident[pre_incident==False].index
-
+ # print(step_time_beg)
+ window_bsc_not_met.drop(step_time_beg, inplace=True) # noqa: PD002
+ # return window_bsc_not_met
+ pre_incident = (window_bsc_not_met.loc[: 3600 * 3]).any()
+ non_incident = pre_incident[pre_incident == False].index # noqa: E712
+
number_of_good_nodes = len(non_incident)
-
+
not_met_node_name_list = window_bsc_not_met.any()
-
- #("****************")
- #print(not_met_node_name_list[not_met_node_name_list==True])
-
- not_met_node_name_list = not_met_node_name_list[not_met_node_name_list==True]
+
+ # ("****************")
+ # print(not_met_node_name_list[not_met_node_name_list==True])
+
+ not_met_node_name_list = not_met_node_name_list[
+ not_met_node_name_list == True # noqa: E712
+ ]
not_met_node_name_list = not_met_node_name_list.index
- window_bsc_not_met = window_bsc_not_met[not_met_node_name_list]
-
+ window_bsc_not_met = window_bsc_not_met[not_met_node_name_list]
+
cut_time = window_bsc_not_met.index.max()
- non_incident = list( set(non_incident).intersection(set(not_met_node_name_list) ) )
+ non_incident = list(
+ set(non_incident).intersection(set(not_met_node_name_list))
+ )
for step_time, row in window_bsc_not_met[non_incident].iterrows():
- if step_time <= 14400:
+ if step_time <= 14400: # noqa: PLR2004
continue
-
- if row.any() == False:
- print(step_time)
+
+ if row.any() == False: # noqa: E712
+ print(step_time) # noqa: T201
cut_time = step_time
break
-
+
cut_time = 24 * 3600
window_bsc_not_met = window_bsc_not_met.loc[:cut_time]
window_bsc_not_met = window_bsc_not_met.loc[:cut_time]
-
-
- number_of_bad_node_at_damage = window_bsc_not_met[non_incident].loc[14400].sum()
+
+ number_of_bad_node_at_damage = (
+ window_bsc_not_met[non_incident].loc[14400].sum()
+ )
percent_init = number_of_bad_node_at_damage / number_of_good_nodes * 100
- #return window_bsc_not_met
- #print(not_met_node_name_list)
- time_bsc_not_met_time = window_bsc_not_met.sort_index(ascending=False).idxmax()
+ # return window_bsc_not_met
+ # print(not_met_node_name_list)
+ time_bsc_not_met_time = window_bsc_not_met.sort_index(
+ ascending=False
+ ).idxmax()
map_res.loc[time_bsc_not_met_time.index] = time_bsc_not_met_time
-
-
- never_reported_nodes = set(self.demand_node_name_list) - set(common_nodes_demand)
+
+ never_reported_nodes = set(self.demand_node_name_list) - set(
+ common_nodes_demand
+ )
number_of_unreported_demand_nodes = len(never_reported_nodes)
if number_of_unreported_demand_nodes > 0:
- warnings.warn("REWET WARNING: there are " + str(number_of_unreported_demand_nodes ) + "unreported nodes")
- map_res.loc[never_reported_nodes ] = end_time
-
- map_res = map_res/(3600*24)
+ warnings.warn( # noqa: B028
+ 'REWET WARNING: there are '
+ + str(number_of_unreported_demand_nodes)
+ + 'unreported nodes'
+ )
+ map_res.loc[never_reported_nodes] = end_time
+
+ map_res = map_res / (3600 * 24) # noqa: PLR6104
percent = (map_res.loc[non_incident] > 0).sum() / number_of_good_nodes * 100
- return np.round(percent_init, 2) , np.round(percent, 2)
\ No newline at end of file
+ return np.round(percent_init, 2), np.round(percent, 2)
diff --git a/modules/systemPerformance/REWET/REWET/Output/Raw_Data.py b/modules/systemPerformance/REWET/REWET/Output/Raw_Data.py
index 5c364c60e..54cf489ce 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Raw_Data.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Raw_Data.py
@@ -1,78 +1,138 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Oct 24 18:27:03 2022
+"""Created on Mon Oct 24 18:27:03 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-class Raw_Data():
+
+class Raw_Data: # noqa: D101
def __init__():
pass
-
- def saveDetailedDemandNodeData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['pressure','head','demand','quality']:
- raise ValueError("data type is not recognized for demand nodes: "+repr(data_type))
+
+ def saveDetailedDemandNodeData( # noqa: N802, D102
+ self,
+ scn_name,
+ data_type,
+ file_address,
+ file_type,
+ ):
+ if data_type not in ['pressure', 'head', 'demand', 'quality']: # noqa: PLR6201
+ raise ValueError(
+ 'data type is not recognized for demand nodes: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.demand_node_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedJunctionData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['pressure','head','demand','quality']:
- raise ValueError("data type is not recognized for junctiosn: "+repr(data_type))
+
+ def saveDetailedJunctionData(self, scn_name, data_type, file_address, file_type): # noqa: N802, D102
+ if data_type not in ['pressure', 'head', 'demand', 'quality']: # noqa: PLR6201
+ raise ValueError(
+ 'data type is not recognized for junctiosn: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.junction_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedTankData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['pressure','head','demand','quality']:
- raise ValueError("data type is not recognized for tanks: "+repr(data_type))
+
+ def saveDetailedTankData(self, scn_name, data_type, file_address, file_type): # noqa: N802, D102
+ if data_type not in ['pressure', 'head', 'demand', 'quality']: # noqa: PLR6201
+ raise ValueError(
+ 'data type is not recognized for tanks: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.tank_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedReservoirData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['pressure','head','demand','quality']:
- raise ValueError("data type is not recognized for demand nodes: "+repr(data_type))
+
+ def saveDetailedReservoirData( # noqa: N802, D102
+ self,
+ scn_name,
+ data_type,
+ file_address,
+ file_type,
+ ):
+ if data_type not in ['pressure', 'head', 'demand', 'quality']: # noqa: PLR6201
+ raise ValueError(
+ 'data type is not recognized for demand nodes: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.tank_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedPipeData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']:
- raise ValueError("data type is not recognized for pipes: "+repr(data_type))
+
+ def saveDetailedPipeData(self, scn_name, data_type, file_address, file_type): # noqa: N802, D102
+ if data_type not in [ # noqa: PLR6201
+ 'linkquality',
+ 'flowrate',
+ 'headloss',
+ 'velocity',
+ 'status',
+ 'setting',
+ 'frictionfact',
+ 'rxnrate',
+ ]:
+ raise ValueError(
+ 'data type is not recognized for pipes: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.pipe_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedPumpData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']:
- raise ValueError("data type is not recognized for pumps: "+repr(data_type))
+
+ def saveDetailedPumpData(self, scn_name, data_type, file_address, file_type): # noqa: N802, D102
+ if data_type not in [ # noqa: PLR6201
+ 'linkquality',
+ 'flowrate',
+ 'headloss',
+ 'velocity',
+ 'status',
+ 'setting',
+ 'frictionfact',
+ 'rxnrate',
+ ]:
+ raise ValueError(
+ 'data type is not recognized for pumps: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.pump_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def saveDetailedValveData(self, scn_name, data_type, file_address, file_type):
- if data_type not in ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']:
- raise ValueError("data type is not recognized for valves: "+repr(data_type))
+
+ def saveDetailedValveData(self, scn_name, data_type, file_address, file_type): # noqa: N802, D102
+ if data_type not in [ # noqa: PLR6201
+ 'linkquality',
+ 'flowrate',
+ 'headloss',
+ 'velocity',
+ 'status',
+ 'setting',
+ 'frictionfact',
+ 'rxnrate',
+ ]:
+ raise ValueError(
+ 'data type is not recognized for valves: ' + repr(data_type)
+ )
data = self.getDetailedData(scn_name, data_type)
data = data[self.wn.valve_name_list]
self.saveDataFrame(data, file_address, file_type=file_type)
-
- def getDetailedData(self, scn_name, data_type):
+
+ def getDetailedData(self, scn_name, data_type): # noqa: N802, D102
cur_scn_data = None
- if data_type in ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']:
+ if data_type in [ # noqa: PLR6201
+ 'linkquality',
+ 'flowrate',
+ 'headloss',
+ 'velocity',
+ 'status',
+ 'setting',
+ 'frictionfact',
+ 'rxnrate',
+ ]:
cur_scn_data = self.data[scn_name].link[data_type]
- elif data_type in ['pressure','head','demand','quality']:
- cur_scn_data = self.data[scn_name].node[data_type]
+ elif data_type in ['pressure', 'head', 'demand', 'quality']: # noqa: PLR6201
+ cur_scn_data = self.data[scn_name].node[data_type]
else:
- raise ValueError("Unknown Data Type For output")
+ raise ValueError('Unknown Data Type For output') # noqa: EM101, TRY003
return cur_scn_data
- def saveDataFrame(dataframe , file_address, file_type='xlsx'):
+ def saveDataFrame(dataframe, file_address, file_type='xlsx'): # noqa: N802, N805, D102
if file_type == 'xlsx':
dataframe.to_excel(file_address)
elif file_type == 'csv':
dataframe.to_csv(file_address)
else:
- raise ValueError("Unknown file type: "+repr(file_type))
-
\ No newline at end of file
+ raise ValueError('Unknown file type: ' + repr(file_type))
diff --git a/modules/systemPerformance/REWET/REWET/Output/Result_Time.py b/modules/systemPerformance/REWET/REWET/Output/Result_Time.py
index fc163b85a..6f853ef9d 100644
--- a/modules/systemPerformance/REWET/REWET/Output/Result_Time.py
+++ b/modules/systemPerformance/REWET/REWET/Output/Result_Time.py
@@ -1,30 +1,32 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Thu Nov 10 18:00:55 2022
+"""Created on Thu Nov 10 18:00:55 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
+import numpy as np
import pandas as pd
-import numpy as np
-class Result_Time():
+
+class Result_Time: # noqa: D101
def __init__():
pass
-
- def convertTimeSecondToDay(self, data, column, time_shift=0):
- data.loc[:, column] = data.loc[:, column] - time_shift
+
+ def convertTimeSecondToDay(self, data, column, time_shift=0): # noqa: D102, N802, PLR6301
+ data.loc[:, column] = data.loc[:, column] - time_shift # noqa: PLR6104
data.loc[:, column] = data.loc[:, column] / 24 / 3600
-
- def convertTimeSecondToHour(self, data, column, time_shift=0):
- data.loc[:, column] = data.loc[:, column] - time_shift
- data.loc[:, column] = data.loc[:, column] / 3600
-
- def averageOverDaysCrewTotalReport(self, crew_report):
- time_max_seconds = crew_report.index.max()
- time_max_days = int(np.ceil(time_max_seconds/24/3600 ) )
- daily_crew_report = pd.DataFrame(index=[i+1 for i in range(0,time_max_days)], columns=crew_report.columns)
- for day in range(0, time_max_days):
- daily_crew = crew_report.loc[day*24*3600: (day+1)*24*3600]
- daily_crew_report.loc[day+1] = daily_crew.mean()
- return daily_crew_report
\ No newline at end of file
+
+ def convertTimeSecondToHour(self, data, column, time_shift=0): # noqa: D102, N802, PLR6301
+ data.loc[:, column] = data.loc[:, column] - time_shift # noqa: PLR6104
+ data.loc[:, column] = data.loc[:, column] / 3600 # noqa: PLR6104
+
+ def averageOverDaysCrewTotalReport(self, crew_report): # noqa: D102, N802, PLR6301
+ time_max_seconds = crew_report.index.max()
+ time_max_days = int(np.ceil(time_max_seconds / 24 / 3600))
+ daily_crew_report = pd.DataFrame(
+ index=[i + 1 for i in range(time_max_days)],
+ columns=crew_report.columns,
+ )
+ for day in range(time_max_days):
+ daily_crew = crew_report.loc[day * 24 * 3600 : (day + 1) * 24 * 3600]
+ daily_crew_report.loc[day + 1] = daily_crew.mean()
+ return daily_crew_report
diff --git a/modules/systemPerformance/REWET/REWET/Project.py b/modules/systemPerformance/REWET/REWET/Project.py
index f1c4b8efe..679b6d692 100644
--- a/modules/systemPerformance/REWET/REWET/Project.py
+++ b/modules/systemPerformance/REWET/REWET/Project.py
@@ -1,11 +1,10 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Jan 9 09:03:57 2023
+"""Created on Mon Jan 9 09:03:57 2023
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-class Project():
+
+class Project: # noqa: D101
def __init__(self, project_settings, scenario_list):
- self.scenario_list = scenario_list
- self.project_settings = project_settings
\ No newline at end of file
+ self.scenario_list = scenario_list
+ self.project_settings = project_settings
diff --git a/modules/systemPerformance/REWET/REWET/README.md b/modules/systemPerformance/REWET/REWET/README.md
index 914cc0d32..410b198c9 100644
--- a/modules/systemPerformance/REWET/REWET/README.md
+++ b/modules/systemPerformance/REWET/REWET/README.md
@@ -5,10 +5,10 @@ REstoration of Water after Event Tool (REWET) is a tool for the simulation of fu
## Installation
The current release of REWET (V0.1) only support Windows AMD64 systems.
-To install the package, first intall teh depencies. Installing Anacodna for managing the depencies is strongly advised.To install the latest version of Anaconda, please visit
+To install the package, first install the dependencies. Installing Anacodna for managing the dependencies is strongly advised.To install the latest version of Anaconda, please visit
Use the package manager please [click here](https://www.anaconda.com).
-After installing anaconda, open up the conda bash. To create a virtual enviroment * using Anaconda, type:
+After installing anaconda, open up the conda bash. To create a virtual environment * using Anaconda, type:
```bash
conda create --name myenv
@@ -38,4 +38,4 @@ to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
-No Liciense yet
\ No newline at end of file
+No Liciense yet
diff --git a/modules/systemPerformance/REWET/REWET/Report_Reading.py b/modules/systemPerformance/REWET/REWET/Report_Reading.py
index 7212facc9..fa5e17bfa 100644
--- a/modules/systemPerformance/REWET/REWET/Report_Reading.py
+++ b/modules/systemPerformance/REWET/REWET/Report_Reading.py
@@ -1,44 +1,55 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Oct 4 16:07:24 2022
+"""Created on Tue Oct 4 16:07:24 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-import io
import datetime
-def parseTimeStamp(time_stamp):
- striped_time_stamp = time_stamp.split(":")
- hour = striped_time_stamp[0]
+
+def parseTimeStamp(time_stamp): # noqa: N802, D103
+ striped_time_stamp = time_stamp.split(':')
+ hour = striped_time_stamp[0]
minute = striped_time_stamp[1]
second = striped_time_stamp[2]
-
- hour = int(hour)
+
+ hour = int(hour)
minute = int(minute)
second = int(second)
-
+
return (hour, minute, minute)
-class Report_Reading():
+
+class Report_Reading: # noqa: D101
def __init__(self, file_addr):
self.file_data = {}
self.maximum_trial_time = []
- with io.open(file_addr, 'r', encoding='utf-8') as f:
+ with open(file_addr, encoding='utf-8') as f: # noqa: PTH123
lnum = 0
for line in f:
- #self.file_data[lnum] = line
- if "Maximum trials exceeded at" in line:
- time_str = line.split("WARNING: Maximum trials exceeded at ")[1].split(" hrs")[0].split(',')[0]
-
+ # self.file_data[lnum] = line
+ if 'Maximum trials exceeded at' in line:
+ time_str = (
+ line.split('WARNING: Maximum trials exceeded at ')[1]
+ .split(' hrs')[0]
+ .split(',')[0]
+ )
+
x = parseTimeStamp(time_str)
- time_sec = datetime.timedelta(hours=x[0], minutes=x[1], seconds=x[2]).total_seconds()
+ time_sec = datetime.timedelta(
+ hours=x[0], minutes=x[1], seconds=x[2]
+ ).total_seconds()
time_sec = int(time_sec)
self.maximum_trial_time.append(time_sec)
- elif "System unbalanced at" in line:
- time_str = line.split("System unbalanced at ")[1].split(" hrs")[0].split(',')[0]
+ elif 'System unbalanced at' in line:
+ time_str = (
+ line.split('System unbalanced at ')[1]
+ .split(' hrs')[0]
+ .split(',')[0]
+ )
x = parseTimeStamp(time_str)
- time_sec = datetime.timedelta(hours=x[0], minutes=x[1], seconds=x[2]).total_seconds()
+ time_sec = datetime.timedelta(
+ hours=x[0], minutes=x[1], seconds=x[2]
+ ).total_seconds()
time_sec = int(time_sec)
self.maximum_trial_time.append(time_sec)
- lnum += 1
\ No newline at end of file
+ lnum += 1 # noqa: SIM113
diff --git a/modules/systemPerformance/REWET/REWET/Result_Project.py b/modules/systemPerformance/REWET/REWET/Result_Project.py
index 3970e9d14..112d400d5 100644
--- a/modules/systemPerformance/REWET/REWET/Result_Project.py
+++ b/modules/systemPerformance/REWET/REWET/Result_Project.py
@@ -1,572 +1,698 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Sun Oct 23 15:00:31 2022
+"""Created on Sun Oct 23 15:00:31 2022
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, N999
-import wntrfr
-import pandas as pd
-import numpy as np
+import copy
import os
-import pickle
+import pickle # noqa: S403
from collections import OrderedDict
-import copy
-#import Report_Reading
-from Output.Map import Map
-from Output.Map import Helper
-from Output.Raw_Data import Raw_Data
-from Output.Curve import Curve
+
+import numpy as np
+import pandas as pd
+import wntrfr
from Output.Crew_Report import Crew_Report
+from Output.Curve import Curve
+
+# import Report_Reading
+from Output.Map import Helper, Map
+from Output.Raw_Data import Raw_Data
from Output.Result_Time import Result_Time
-import Input.Input_IO as io
-from Project import Project as MainProject
-class Project_Result(Map, Raw_Data, Curve, Crew_Report, Result_Time):
- def __init__(self, project_file_addr, result_directory=None, ignore_not_found=False, to_neglect_file=None, node_col='', result_file_dir = None, iObject=False):
-
- if iObject==False:
+
+class Project_Result(Map, Raw_Data, Curve, Crew_Report, Result_Time): # noqa: D101
+ def __init__(
+ self,
+ project_file_addr,
+ result_directory=None,
+ ignore_not_found=False, # noqa: FBT002
+ to_neglect_file=None,
+ node_col='',
+ result_file_dir=None,
+ iObject=False, # noqa: FBT002, N803
+ ):
+ if iObject == False: # noqa: E712
self.readPorjectFile(project_file_addr)
else:
self.project = copy.deepcopy(project_file_addr)
-
-
- if result_file_dir != None:
- self.project.project_settings.process.settings['result_directory'] = result_file_dir
- #self.project.scenario_list = io.read_damage_list(self.project.project_settings.process['pipe_damage_file_list' ], self.project.project_settings.process['pipe_damage_file_directory'])
- #print(self.project.scenario_list)
- self.project.scenario_list = self.project.scenario_list.set_index('Scenario Name')
-
- self.demand_node_name_list = []
- self._list = []
- self.pipe_damages = {}
- self.node_damages = {}
- self.pump_damages = {}
- self.tank_damages = {}
- #self.time_size = {}
- self.demand_node_size = {}
- self.index_to_scen_name = {}
- self.data = OrderedDict()
- self.registry = OrderedDict()
- self.scenario_prob = {}
- #self.scenario_set = {}
+
+ if result_file_dir != None: # noqa: E711
+ self.project.project_settings.process.settings['result_directory'] = (
+ result_file_dir
+ )
+ # self.project.scenario_list = io.read_damage_list(self.project.project_settings.process['pipe_damage_file_list' ], self.project.project_settings.process['pipe_damage_file_directory'])
+ # print(self.project.scenario_list)
+ self.project.scenario_list = self.project.scenario_list.set_index(
+ 'Scenario Name'
+ )
+
+ self.demand_node_name_list = []
+ self._list = []
+ self.pipe_damages = {}
+ self.node_damages = {}
+ self.pump_damages = {}
+ self.tank_damages = {}
+ # self.time_size = {}
+ self.demand_node_size = {}
+ self.index_to_scen_name = {}
+ self.data = OrderedDict()
+ self.registry = OrderedDict()
+ self.scenario_prob = {}
+ # self.scenario_set = {}
self.empty_scenario_name_list = set()
- self._delivery_data = None
- self._quality_data = None
- self._DLQNIndexPopulation = {}
- self._population_data = None
+ self._delivery_data = None
+ self._quality_data = None
+ self._DLQNIndexPopulation = {}
+ self._population_data = None
self.exceedance_map_intermediate_data = None
- self.rest_data = None
+ self.rest_data = None
self._RequiredDemandForAllNodesandtime = {}
- self.demand_ratio = self.project.project_settings.process['demand_ratio']
+ self.demand_ratio = self.project.project_settings.process['demand_ratio']
self.scn_name_list_that_result_file_not_found = []
- self.wn = wntrfr.network.WaterNetworkModel(self.project.project_settings.process['WN_INP'] )
-
- self.result_directory = self.project.project_settings.process['result_directory']
-
- if not isinstance(result_directory, type(None) ):
+ self.wn = wntrfr.network.WaterNetworkModel(
+ self.project.project_settings.process['WN_INP']
+ )
+
+ self.result_directory = self.project.project_settings.process[
+ 'result_directory'
+ ]
+
+ if result_directory is not None:
self.result_directory = result_directory
-
- to_neglect=[];
- if to_neglect_file != None and False: #sina hereeeee bug dadi amedane
- raise
+
+ to_neglect = []
+ # sina hereeeee bug dadi amedane
+ if to_neglect_file != None and False: # noqa: SIM223, E711
+ raise # noqa: PLE0704
file_data = pd.read_excel(to_neglect_file)
to_neglect = file_data[node_col].to_list()
-
+
for node_name in self.wn.junction_name_list:
node = self.wn.get_node(node_name)
- if node.demand_timeseries_list[0].base_value > 0 and node_name not in to_neglect:
+ if (
+ node.demand_timeseries_list[0].base_value > 0
+ and node_name not in to_neglect
+ ):
self.demand_node_name_list.append(node_name)
-
- self.node_name_list = self.wn.node_name_list.copy()
+
+ self.node_name_list = self.wn.node_name_list.copy()
ret_val = self.checkForNotExistingFile(ignore_not_found)
self.prepareData()
- return ret_val
-
- def readPorjectFile(self, project_file_addr):
- print(project_file_addr)
- with open(project_file_addr, 'rb') as f:
- self.project = pickle.load(f)
-
- def loadPopulation(self, popuation_data, node_id_header, population_header):
+ return ret_val # noqa: PLE0101
+
+ def readPorjectFile(self, project_file_addr): # noqa: N802, D102
+ print(project_file_addr) # noqa: T201
+ with open(project_file_addr, 'rb') as f: # noqa: PTH123
+ self.project = pickle.load(f) # noqa: S301
+
+ def loadPopulation(self, popuation_data, node_id_header, population_header): # noqa: N802, D102
pop = popuation_data.copy()
pop = pop.set_index(node_id_header)
pop = pop[population_header]
self._population_data = pop
-
- def checkForNotExistingFile(self, ignore_not_found):
+
+ def checkForNotExistingFile(self, ignore_not_found): # noqa: N802, D102
self.scn_name_list_that_result_file_not_found = []
-
+
result_directory = self.result_directory
- #print(self.project.scenario_list)
- for scn_name, row in self.project.scenario_list.iterrows():
- scenario_registry_file_name = scn_name+"_registry.pkl"
- #print(result_directory)
- #print(scenario_registry_file_name)
- registry_file_data_addr = os.path.join(result_directory, scenario_registry_file_name)
- if not os.path.exists(registry_file_data_addr):
+ # print(self.project.scenario_list)
+ for scn_name, row in self.project.scenario_list.iterrows(): # noqa: B007
+ scenario_registry_file_name = scn_name + '_registry.pkl'
+ # print(result_directory)
+ # print(scenario_registry_file_name)
+ registry_file_data_addr = os.path.join( # noqa: PTH118
+ result_directory, scenario_registry_file_name
+ )
+ if not os.path.exists(registry_file_data_addr): # noqa: PTH110
self.scn_name_list_that_result_file_not_found.append(scn_name)
-
- if len( self.scn_name_list_that_result_file_not_found)> 0:
+
+ if len(self.scn_name_list_that_result_file_not_found) > 0:
if ignore_not_found:
- #print(str(len(self.scn_name_list_that_result_file_not_found)) +" out of "+ repr(len(self.project.scenario_list)) +" Result Files are not found and ignored" )
- #print(self.scn_name_list_that_result_file_not_found)
+ # print(str(len(self.scn_name_list_that_result_file_not_found)) +" out of "+ repr(len(self.project.scenario_list)) +" Result Files are not found and ignored" )
+ # print(self.scn_name_list_that_result_file_not_found)
pass
- #self.project.scenario_list.drop(self.scn_name_list_that_result_file_not_found, inplace=True)
+ # self.project.scenario_list.drop(self.scn_name_list_that_result_file_not_found, inplace=True)
else:
- raise ValueError("Res File Not Found: "+ repr(self.scn_name_list_that_result_file_not_found) +" in "+repr(result_directory))
-
-
- def prepareData(self):
- i=0
- #result_directory = self.project.project_settings.process['result_directory']
- #self.project.scenario_list = self.project.scenario_list.iloc[0:20]
- for scn_name, row in self.project.scenario_list.iterrows():
+ raise ValueError(
+ 'Res File Not Found: '
+ + repr(self.scn_name_list_that_result_file_not_found)
+ + ' in '
+ + repr(result_directory)
+ )
+
+ def prepareData(self): # noqa: N802, D102
+ i = 0
+ # result_directory = self.project.project_settings.process['result_directory']
+ # self.project.scenario_list = self.project.scenario_list.iloc[0:20]
+ for scn_name, row in self.project.scenario_list.iterrows(): # noqa: B007
self._RequiredDemandForAllNodesandtime[scn_name] = None
- #settings_file_name = scn_name+'.xlsx'
- #settings_file_addr = os.path.join(result_directory, settings_file_name)
- #scenario_set = pd.read_excel(settings_file_addr)
- #self.scenario_set[scn_name] = scenario_set
+ # settings_file_name = scn_name+'.xlsx'
+ # settings_file_addr = os.path.join(result_directory, settings_file_name)
+ # scenario_set = pd.read_excel(settings_file_addr)
+ # self.scenario_set[scn_name] = scenario_set
self.data[scn_name] = None
self.registry[scn_name] = None
-
- #self.time_size[scn_name] = len(self.data[scn_name].node['demand'].index)
- self.index_to_scen_name[i] = scn_name
- i+=1
-
- self.scenario_prob[scn_name] = self.project.scenario_list.loc[scn_name, 'Probability']
-
-
- '''
+
+ # self.time_size[scn_name] = len(self.data[scn_name].node['demand'].index)
+ self.index_to_scen_name[i] = scn_name
+ i += 1 # noqa: SIM113
+
+ self.scenario_prob[scn_name] = self.project.scenario_list.loc[
+ scn_name, 'Probability'
+ ]
+
+ """
ATTENTION: We need probability for any prbablistic result
- '''
-
- def loadScneariodata(self, scn_name):
- if self.data[scn_name] != None:
+ """
+
+ def loadScneariodata(self, scn_name): # noqa: N802, D102
+ if self.data[scn_name] != None: # noqa: E711
return
- print("loading scenario "+str(scn_name))
+ print('loading scenario ' + str(scn_name)) # noqa: T201
result_directory = self.result_directory
- #scenario_registry_file_name = scn_name+"_registry.pkl"
- #registry_file_data_addr = os.path.join(result_directory, scenario_registry_file_name)
- scenario_registry_file_name = scn_name+"_registry.pkl"
- reg_addr = os.path.join(result_directory, scenario_registry_file_name)
+ # scenario_registry_file_name = scn_name+"_registry.pkl"
+ # registry_file_data_addr = os.path.join(result_directory, scenario_registry_file_name)
+ scenario_registry_file_name = scn_name + '_registry.pkl'
+ reg_addr = os.path.join(result_directory, scenario_registry_file_name) # noqa: PTH118
try:
- with open(reg_addr, 'rb') as f:
- #print(output_addr)
- reg_file_data = pickle.load(f)
+ with open(reg_addr, 'rb') as f: # noqa: PTH123
+ # print(output_addr)
+ reg_file_data = pickle.load(f) # noqa: S301
self.registry[scn_name] = reg_file_data
res_file_data = self.registry[scn_name].result
- except:
- scenario_registry_file_name = scn_name+".res"
- res_addr = os.path.join(result_directory, scenario_registry_file_name)
- with open(res_addr, 'rb') as f:
- res_file_data = pickle.load(f)
- #scenario_registry_file_name = scn_name+".res"
- #res_addr = os.path.join(result_directory, scenario_registry_file_name)
- #with open(res_addr, 'rb') as f:
- #print(output_addr)
- #res_file_data = pickle.load(f)
- #res_file_data.node['head'] = None
- #res_file_data.node['quality'] = None
- #res_file_data = self.registry[scn_name].result
+ except: # noqa: E722
+ scenario_registry_file_name = scn_name + '.res'
+ res_addr = os.path.join(result_directory, scenario_registry_file_name) # noqa: PTH118
+ with open(res_addr, 'rb') as f: # noqa: PTH123
+ res_file_data = pickle.load(f) # noqa: S301
+ # scenario_registry_file_name = scn_name+".res"
+ # res_addr = os.path.join(result_directory, scenario_registry_file_name)
+ # with open(res_addr, 'rb') as f:
+ # print(output_addr)
+ # res_file_data = pickle.load(f)
+ # res_file_data.node['head'] = None
+ # res_file_data.node['quality'] = None
+ # res_file_data = self.registry[scn_name].result
self.remove_maximum_trials(res_file_data)
- self.data[scn_name] = res_file_data
-
- def readData(self):
- #i=0
+ self.data[scn_name] = res_file_data
+
+ def readData(self): # noqa: N802, D102
+ # i=0
self.project.scenario_list = self.project.scenario_list.iloc[0:2]
result_directory = self.result_directory
-
- for scn_name, row in self.project.scenario_list.iterrows():
+
+ for scn_name, row in self.project.scenario_list.iterrows(): # noqa: B007
self._RequiredDemandForAllNodesandtime[scn_name] = None
- scenario_registry_file_name = scn_name+"_registry.pkl"
- registry_file_data_addr = os.path.join(result_directory, scenario_registry_file_name)
-
- with open(registry_file_data_addr, 'rb') as f:
- if not os.path.exists(registry_file_data_addr):
- raise ValueError("Registry File Not Found: "+ str(registry_file_data_addr))
- self.registry[scn_name] = pickle.load(f)
-
- #self.pipe_damages[scn_name] = current_scenario_registry.damage.pipe_all_damages
- #self.node_damages[scn_name] = current_scenario_registry.node_damage
- #self.pump_damages[scn_name] = current_scenario_registry.damaged_pumps
- #self.tank_damages[scn_name] = current_scenario_registry.tank_damage
-
- #res_addr = os.path.join(result_directory, scn_name+'.res')
-
- #with open(res_addr, 'rb') as f:
- #print(output_addr)
- #res_file_data = pickle.load(f)
-
- #settings_file_name = scn_name+'.xlsx'
- #settings_file_addr = os.path.join(result_directory, settings_file_name)
- #scenario_set = pd.read_excel(settings_file_addr)
- #self.scenario_set[scn_name] = scenario_set
-
- #res_file_data.node['head'] = None
- #res_file_data.node['quality'] = None
+ scenario_registry_file_name = scn_name + '_registry.pkl'
+ registry_file_data_addr = os.path.join( # noqa: PTH118
+ result_directory, scenario_registry_file_name
+ )
+
+ with open(registry_file_data_addr, 'rb') as f: # noqa: PTH123
+ if not os.path.exists(registry_file_data_addr): # noqa: PTH110
+ raise ValueError(
+ 'Registry File Not Found: ' + str(registry_file_data_addr)
+ )
+ self.registry[scn_name] = pickle.load(f) # noqa: S301
+
+ # self.pipe_damages[scn_name] = current_scenario_registry.damage.pipe_all_damages
+ # self.node_damages[scn_name] = current_scenario_registry.node_damage
+ # self.pump_damages[scn_name] = current_scenario_registry.damaged_pumps
+ # self.tank_damages[scn_name] = current_scenario_registry.tank_damage
+
+ # res_addr = os.path.join(result_directory, scn_name+'.res')
+
+ # with open(res_addr, 'rb') as f:
+ # print(output_addr)
+ # res_file_data = pickle.load(f)
+
+ # settings_file_name = scn_name+'.xlsx'
+ # settings_file_addr = os.path.join(result_directory, settings_file_name)
+ # scenario_set = pd.read_excel(settings_file_addr)
+ # self.scenario_set[scn_name] = scenario_set
+
+ # res_file_data.node['head'] = None
+ # res_file_data.node['quality'] = None
res_file_data = self.registry[scn_name]
self.remove_maximum_trials(res_file_data)
- self.data[scn_name] = res_file_data
- #self.time_size[scn_name] = len(self.data[scn_name].node['demand'].index)
- #self.index_to_scen_name[i] = scn_name
- #i+=1
-
- self.scenario_prob[scn_name] = self.project.scenario_list.loc[scn_name, 'Probability']
-
-
- '''
+ self.data[scn_name] = res_file_data
+ # self.time_size[scn_name] = len(self.data[scn_name].node['demand'].index)
+ # self.index_to_scen_name[i] = scn_name
+ # i+=1
+
+ self.scenario_prob[scn_name] = self.project.scenario_list.loc[
+ scn_name, 'Probability'
+ ]
+
+ """
ATTENTION: We need probability for any prbablistic result
- '''
- print(str(scn_name) +" loaded")
-
- def remove_maximum_trials(self, data):
+ """
+ print(str(scn_name) + ' loaded') # noqa: T201
+ def remove_maximum_trials(self, data): # noqa: D102, PLR6301
all_time_list = data.maximum_trial_time
result_time_list = data.node['demand'].index.to_list()
- result_time_max_trailed_list = [ time for time in result_time_list if time in all_time_list]
-
+ result_time_max_trailed_list = [
+ time for time in result_time_list if time in all_time_list
+ ]
+
for att in data.node:
all_time_list = data.maximum_trial_time
result_time_list = data.node[att].index.to_list()
- result_time_max_trailed_list = list( set(result_time_list).intersection(set(all_time_list) ) )
+ result_time_max_trailed_list = list(
+ set(result_time_list).intersection(set(all_time_list))
+ )
result_time_max_trailed_list.sort()
if len(result_time_max_trailed_list) > 0:
- #print(result_time_max_trailed_list)
+ # print(result_time_max_trailed_list)
att_data = data.node[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
data.node[att] = att_data
-
+
for att in data.link:
all_time_list = data.maximum_trial_time
result_time_list = data.link[att].index.to_list()
- result_time_max_trailed_list = [ time for time in result_time_list if time in all_time_list]
+ result_time_max_trailed_list = [
+ time for time in result_time_list if time in all_time_list
+ ]
att_data = data.link[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
data.link[att] = att_data
-
+
flow_balance = data.node['demand'].sum(axis=1)
-
- time_to_drop = flow_balance[abs(flow_balance) >= 0.01 ].index
-
- #result_time_list = data.node['demand'].index.to_list()
+
+ time_to_drop = flow_balance[abs(flow_balance) >= 0.01].index # noqa: PLR2004
+
+ # result_time_list = data.node['demand'].index.to_list()
# = [ time for time in result_time_list if time in all_time_list]
-
+
for att in data.node:
- #all_time_list = data.maximum_trial_time
+ # all_time_list = data.maximum_trial_time
result_time_list = data.node[att].index.to_list()
- result_time_max_trailed_list = list( set(result_time_list).intersection(set(time_to_drop) ) )
+ result_time_max_trailed_list = list(
+ set(result_time_list).intersection(set(time_to_drop))
+ )
result_time_max_trailed_list.sort()
if len(result_time_max_trailed_list) > 0:
- #print(result_time_max_trailed_list)
- att_data = data.node[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
- data.node[att] = att_data
-
+ # print(result_time_max_trailed_list)
+ att_data = data.node[att]
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
+ data.node[att] = att_data
+
for att in data.link:
- #all_time_list = data.maximum_trial_time
+ # all_time_list = data.maximum_trial_time
result_time_list = data.link[att].index.to_list()
- result_time_max_trailed_list = list( set(result_time_list).intersection(set(time_to_drop) ) )
+ result_time_max_trailed_list = list(
+ set(result_time_list).intersection(set(time_to_drop))
+ )
result_time_max_trailed_list.sort()
if len(result_time_max_trailed_list) > 0:
att_data = data.link[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
data.link[att] = att_data
-
- def remove_maximum_trials_demand_flow(self, data):
+
+ def remove_maximum_trials_demand_flow(self, data): # noqa: D102, PLR6301
flow_balance = data.node['demand'].sum(axis=1)
-
- time_to_drop = flow_balance[abs(flow_balance) >= 0.01 ].index
-
- #result_time_list = data.node['demand'].index.to_list()
+
+ time_to_drop = flow_balance[abs(flow_balance) >= 0.01].index # noqa: PLR2004
+
+ # result_time_list = data.node['demand'].index.to_list()
# = [ time for time in result_time_list if time in all_time_list]
-
+
for att in data.node:
- #all_time_list = data.maximum_trial_time
+ # all_time_list = data.maximum_trial_time
result_time_list = data.node[att].index.to_list()
- result_time_max_trailed_list = [ time for time in result_time_list if time in time_to_drop]
- print(result_time_max_trailed_list)
+ result_time_max_trailed_list = [
+ time for time in result_time_list if time in time_to_drop
+ ]
+ print(result_time_max_trailed_list) # noqa: T201
att_data = data.node[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
data.node[att] = att_data
-
+
for att in data.link:
- #all_time_list = data.maximum_trial_time
+ # all_time_list = data.maximum_trial_time
result_time_list = data.link[att].index.to_list()
- result_time_max_trailed_list = [ time for time in result_time_list if time in time_to_drop]
+ result_time_max_trailed_list = [
+ time for time in result_time_list if time in time_to_drop
+ ]
att_data = data.link[att]
- att_data.drop(result_time_max_trailed_list, inplace=True)
+ att_data.drop(result_time_max_trailed_list, inplace=True) # noqa: PD002
data.link[att] = att_data
-
- def readPopulation(self, population_xlsx_addr = 'demandNode-Northridge.xlsx', demand_node_header='NodeID', population_header='#Customer'):
+
+ def readPopulation( # noqa: N802, D102
+ self,
+ population_xlsx_addr='demandNode-Northridge.xlsx',
+ demand_node_header='NodeID',
+ population_header='#Customer', # noqa: ARG002
+ ):
pop = pd.read_excel(population_xlsx_addr)
pop = pop.set_index(demand_node_header)
pop = pop['#Customer']
self._population_data = pop
- demand_node_without_population = [node_name for node_name in self.demand_node_name_list if node_name not in pop.index]
+ demand_node_without_population = [
+ node_name
+ for node_name in self.demand_node_name_list
+ if node_name not in pop.index
+ ]
if len(demand_node_without_population) > 0:
- raise ValueError("The following demand nodes are not population data: " + repr(demand_node_without_population))
+ raise ValueError(
+ 'The following demand nodes are not population data: '
+ + repr(demand_node_without_population)
+ )
-
- def getRequiredDemandForAllNodesandtime(self, scn_name):
- """
+ def getRequiredDemandForAllNodesandtime(self, scn_name): # noqa: N802
+ """**********
+ ATTENTION: We Assume that all scenarios have the same time indexing
**********
- ATTENTION: We Assume that all scnearios have teh same time indexing
- **********
-
- Calculates and return required demands for all nodes in all the times steps
+
+ Calculates and return required demands for all nodes in all the times steps
Returns
-------
req_node_demand : Pandas DataFrame
Demand for all nodes and in all time
- """
+ """ # noqa: D205, D400
self.loadScneariodata(scn_name)
demand_ratio = self.demand_ratio
- if type(self._RequiredDemandForAllNodesandtime[scn_name])!=type(None):
+ if type(self._RequiredDemandForAllNodesandtime[scn_name]) != type(None): # noqa: E721
return self._RequiredDemandForAllNodesandtime[scn_name]
- undamaged_wn = self.wn
- time_index = self.data[scn_name].node['demand'].index
- #req_node_demand = pd.DataFrame(index=time_index.unique())
- default_pattern = undamaged_wn.options.hydraulic.pattern
- node_pattern_list = pd.Series(index=undamaged_wn.junction_name_list, dtype=str)
- _size=len(self.demand_node_name_list)
- i=0
- #req_node_demand = req_node_demand.transpose()
-
+ undamaged_wn = self.wn
+ time_index = self.data[scn_name].node['demand'].index
+ # req_node_demand = pd.DataFrame(index=time_index.unique())
+ default_pattern = undamaged_wn.options.hydraulic.pattern
+ node_pattern_list = pd.Series(
+ index=undamaged_wn.junction_name_list, dtype=str
+ )
+ _size = len(self.demand_node_name_list)
+ i = 0
+ # req_node_demand = req_node_demand.transpose()
+
all_base_demand = []
all_node_name_list = []
-
+
while i < _size:
node_name = self.demand_node_name_list[i]
- #print(i)
- i+=1
+ # print(i)
+ i += 1
node = undamaged_wn.get_node(node_name)
pattern_list = node.demand_timeseries_list.pattern_list()
- if pattern_list[0] != None:
+ if pattern_list[0] != None: # noqa: E711
node_pattern_list[node_name] = pattern_list[0].name
- elif pattern_list[0] == None and default_pattern != None:
+ elif pattern_list[0] == None and default_pattern != None: # noqa: E711
node_pattern_list[node_name] = str(default_pattern)
else:
node_pattern_list[node_name] = None
base_demand = node.base_demand * demand_ratio
all_base_demand.append([base_demand for i in time_index])
all_node_name_list.append(node_name)
- #temp=pd.DataFrame(data = base_demand, index = time_index, columns = [node_name])
- #req_node_demand = req_node_demand.append(temp.transpose())
- #constant_base_demand = [constant_base_demand for i in time_index]
+ # temp=pd.DataFrame(data = base_demand, index = time_index, columns = [node_name])
+ # req_node_demand = req_node_demand.append(temp.transpose())
+ # constant_base_demand = [constant_base_demand for i in time_index]
node_pattern_list = node_pattern_list.dropna()
patterns_list = node_pattern_list.unique()
- multiplier = pd.DataFrame(index=time_index, columns = patterns_list)
-
+ multiplier = pd.DataFrame(index=time_index, columns=patterns_list)
+
for pattern_name in iter(patterns_list):
cur_pattern = undamaged_wn.get_pattern(pattern_name)
time_index = time_index.unique()
for time in iter(time_index):
multiplier[pattern_name].loc[time] = cur_pattern.at(time)
-
- variable_base_demand = []
- variable_node_name_list = []
+
+ variable_base_demand = [] # noqa: F841
+ variable_node_name_list = [] # noqa: F841
for node_name, pattern_name in node_pattern_list.items():
- cur_node_req_demand = multiplier[pattern_name] * undamaged_wn.get_node(node_name).demand_timeseries_list[0].base_value * demand_ratio
-
+ cur_node_req_demand = (
+ multiplier[pattern_name]
+ * undamaged_wn.get_node(node_name)
+ .demand_timeseries_list[0]
+ .base_value
+ * demand_ratio
+ )
+
all_node_name_list.append(node_name)
all_base_demand.append(cur_node_req_demand.to_list())
- #cur_node_req_demand.name = node_name
- #cur_node_req_demand=pd.DataFrame(cur_node_req_demand).transpose()
- #req_node_demand = req_node_demand.append(cur_node_req_demand)
- #variable_base_demand = np.array(variable_base_demand).transpose().tolist()
- req_node_demand = pd.DataFrame(columns=time_index, index=all_node_name_list, data = all_base_demand)
+ # cur_node_req_demand.name = node_name
+ # cur_node_req_demand=pd.DataFrame(cur_node_req_demand).transpose()
+ # req_node_demand = req_node_demand.append(cur_node_req_demand)
+ # variable_base_demand = np.array(variable_base_demand).transpose().tolist()
+ req_node_demand = pd.DataFrame(
+ columns=time_index, index=all_node_name_list, data=all_base_demand
+ )
req_node_demand = req_node_demand.transpose()
- #constant_node_demand_df = pd.DataFrame(data = constant_base_demand, index = time_index, columns = constant_node_name_list)
- #variable_node_demand_df = pd.DataFrame(data = variable_base_demand, index = time_index, columns = variable_node_name_list)
- #if len(variable_base_demand) > 0 and len(variable_base_demand) == 0:
- #req_node_demand = constant_node_demand_df
- #elif len(variable_base_demand) == 0 and len(variable_base_demand) > 0:
- #req_node_demand = variable_base_demand
- #elif len(variable_base_demand) == 0 and len(variable_base_demand) == 0:
- #req_node_demand = constant_node_demand_df
- #else:
- #req_node_demand = pd.concat([constant_node_demand_df.transpose(), variable_node_demand_df.transpose()]).transpose()
-
- #print(len(all_node_name_list))
- #print(len(constant_base_demand))
- #print(len(variant_base_demand))
- #print("************************")
- #all_base_demand = constant_base_demand
-
-
- #req_node_demand = pd.DataFrame(index=time_index, columns=all_node_name_list, data=all_base_demand)
- #req_node_demand = req_node_demand.transpose()
- self._RequiredDemandForAllNodesandtime[scn_name] = req_node_demand.filter(self.demand_node_name_list)
+ # constant_node_demand_df = pd.DataFrame(data = constant_base_demand, index = time_index, columns = constant_node_name_list)
+ # variable_node_demand_df = pd.DataFrame(data = variable_base_demand, index = time_index, columns = variable_node_name_list)
+ # if len(variable_base_demand) > 0 and len(variable_base_demand) == 0:
+ # req_node_demand = constant_node_demand_df
+ # elif len(variable_base_demand) == 0 and len(variable_base_demand) > 0:
+ # req_node_demand = variable_base_demand
+ # elif len(variable_base_demand) == 0 and len(variable_base_demand) == 0:
+ # req_node_demand = constant_node_demand_df
+ # else:
+ # req_node_demand = pd.concat([constant_node_demand_df.transpose(), variable_node_demand_df.transpose()]).transpose()
+
+ # print(len(all_node_name_list))
+ # print(len(constant_base_demand))
+ # print(len(variant_base_demand))
+ # print("************************")
+ # all_base_demand = constant_base_demand
+
+ # req_node_demand = pd.DataFrame(index=time_index, columns=all_node_name_list, data=all_base_demand)
+ # req_node_demand = req_node_demand.transpose()
+ self._RequiredDemandForAllNodesandtime[scn_name] = req_node_demand.filter(
+ self.demand_node_name_list
+ )
return self._RequiredDemandForAllNodesandtime[scn_name]
- self._RequiredDemandForAllNodesandtime[scn_name] = req_node_demand.filter(self.demand_node_name_list)
+ self._RequiredDemandForAllNodesandtime[scn_name] = req_node_demand.filter(
+ self.demand_node_name_list
+ )
return self._RequiredDemandForAllNodesandtime[scn_name]
-
-
- def AS_getDLIndexPopulation(self, iPopulation="No", ratio=False, consider_leak=False, leak_ratio=0.75):
- scenario_list = list(self.data.keys() )
- all_scenario_DL_data = {}
+
+ def AS_getDLIndexPopulation( # noqa: N802, D102
+ self,
+ iPopulation='No', # noqa: N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=0.75,
+ ):
+ scenario_list = list(self.data.keys())
+ all_scenario_DL_data = {} # noqa: N806
for scn_name in scenario_list:
- cur_scn_DL = self.getDLIndexPopulation_4(scn_name, iPopulation=iPopulation, ratio=ratio, consider_leak=consider_leak, leak_ratio=leak_ratio)
- cur_scn_DL = cur_scn_DL.to_dict()
+ cur_scn_DL = self.getDLIndexPopulation_4( # noqa: N806
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
+ cur_scn_DL = cur_scn_DL.to_dict() # noqa: N806
all_scenario_DL_data[scn_name] = cur_scn_DL
-
+
return pd.DataFrame.from_dict(all_scenario_DL_data)
-
- def AS_getQNIndexPopulation(self, iPopulation="No", ratio=False, consider_leak=False, leak_ratio=0.75):
- scenario_list = list(self.data.keys() )
- all_scenario_QN_data = {}
+
+ def AS_getQNIndexPopulation( # noqa: N802, D102
+ self,
+ iPopulation='No', # noqa: N803
+ ratio=False, # noqa: FBT002
+ consider_leak=False, # noqa: FBT002
+ leak_ratio=0.75,
+ ):
+ scenario_list = list(self.data.keys())
+ all_scenario_QN_data = {} # noqa: N806
for scn_name in scenario_list:
self.loadScneariodata(scn_name)
- cur_scn_QN = self.getQNIndexPopulation_4(scn_name, iPopulation=iPopulation, ratio=ratio, consider_leak=consider_leak, leak_ratio=leak_ratio)
- cur_scn_QN = cur_scn_QN.to_dict()
+ cur_scn_QN = self.getQNIndexPopulation_4( # noqa: N806
+ scn_name,
+ iPopulation=iPopulation,
+ ratio=ratio,
+ consider_leak=consider_leak,
+ leak_ratio=leak_ratio,
+ )
+ cur_scn_QN = cur_scn_QN.to_dict() # noqa: N806
all_scenario_QN_data[scn_name] = cur_scn_QN
-
+
return pd.DataFrame.from_dict(all_scenario_QN_data)
-
- def AS_getOutage_4(self, LOS='DL', iConsider_leak=False, leak_ratio=0, consistency_time_window=7200):
- scenario_list = list(self.data.keys() )
+
+ def AS_getOutage_4( # noqa: N802, D102
+ self,
+ LOS='DL', # noqa: N803
+ iConsider_leak=False, # noqa: FBT002, N803
+ leak_ratio=0,
+ consistency_time_window=7200,
+ ):
+ scenario_list = list(self.data.keys())
all_scenario_outage_data = {}
i = 0
for scn_name in scenario_list:
- cur_scn_outage = self.getOutageTimeGeoPandas_4(scn_name, LOS=LOS, iConsider_leak=iConsider_leak, leak_ratio=leak_ratio, consistency_time_window=consistency_time_window)
+ cur_scn_outage = self.getOutageTimeGeoPandas_4(
+ scn_name,
+ LOS=LOS,
+ iConsider_leak=iConsider_leak,
+ leak_ratio=leak_ratio,
+ consistency_time_window=consistency_time_window,
+ )
cur_scn_outage = cur_scn_outage['restoration_time'].to_dict()
all_scenario_outage_data[scn_name] = cur_scn_outage
- i+=1
+ i += 1 # noqa: SIM113
return pd.DataFrame.from_dict(all_scenario_outage_data)
-
- def PR_getBSCPercentageExcedanceCurce(self, data_frame, restoration_percentage):
+
+ def PR_getBSCPercentageExcedanceCurce(self, data_frame, restoration_percentage): # noqa: N802, D102
max_time = data_frame.max().max()
restore_time = {}
-
- if type(self._population_data) == type(None):
+
+ if type(self._population_data) == type(None): # noqa: E721
demand_node_name_list = data_frame.index
- population = pd.Series(index = demand_node_name_list, data=1)
+ population = pd.Series(index=demand_node_name_list, data=1)
else:
population = self._population_data
population = population.loc[data_frame.index]
- population_dataframe = dict(zip(data_frame.columns, [population.to_dict() for i in data_frame.columns] ) )
+ population_dataframe = dict(
+ zip(
+ data_frame.columns,
+ [population.to_dict() for i in data_frame.columns],
+ )
+ )
population_dataframe = pd.DataFrame.from_dict(population_dataframe)
total_population = population.sum()
for t in range(0, int(max_time), 3600):
satisfies_nodes_scnearios = data_frame <= t
- satisfies_nodes_scnearios = satisfies_nodes_scnearios * population_dataframe
- scenario_percentages = satisfies_nodes_scnearios.sum() / total_population * 100
- satisfied_scenarios = (scenario_percentages[scenario_percentages >= restoration_percentage]).index
- already_recorded_scenarios = set(restore_time.keys() )
+ satisfies_nodes_scnearios = ( # noqa: PLR6104
+ satisfies_nodes_scnearios * population_dataframe
+ )
+ scenario_percentages = (
+ satisfies_nodes_scnearios.sum() / total_population * 100
+ )
+ satisfied_scenarios = (
+ scenario_percentages[scenario_percentages >= restoration_percentage]
+ ).index
+ already_recorded_scenarios = set(restore_time.keys())
new_scenarios = set(satisfied_scenarios) - already_recorded_scenarios
-
- new_record = dict(zip(new_scenarios,[t for k in range(len(new_scenarios) )]))
+
+ new_record = dict(
+ zip(new_scenarios, [t for k in range(len(new_scenarios))])
+ )
restore_time.update(new_record)
-
- already_recorded_scenarios = set(restore_time.keys() )
- unsatisfied_scenarios = set(self.scenario_prob.keys() ) - already_recorded_scenarios
- new_record = dict(zip(unsatisfied_scenarios,[t for k in range(len(unsatisfied_scenarios) )]))
+
+ already_recorded_scenarios = set(restore_time.keys())
+ unsatisfied_scenarios = (
+ set(self.scenario_prob.keys()) - already_recorded_scenarios
+ )
+ new_record = dict(
+ zip(
+ unsatisfied_scenarios, [t for k in range(len(unsatisfied_scenarios))]
+ )
+ )
restore_time.update(new_record)
-
- restore_data = pd.DataFrame.from_dict({'restore_time': restore_time} )
- restore_data['restore_time'] = restore_data.loc[list(self.scenario_prob.keys() ) , 'restore_time']
- restore_data['prob' ] = list(self.scenario_prob.values() )
- restore_data.sort_values('restore_time', ascending = False, inplace = True)
- ep_mat = Helper.EPHelper(restore_data['prob'].to_numpy() )
+ restore_data = pd.DataFrame.from_dict({'restore_time': restore_time})
+
+ restore_data['restore_time'] = restore_data.loc[
+ list(self.scenario_prob.keys()), 'restore_time'
+ ]
+ restore_data['prob'] = list(self.scenario_prob.values())
+ restore_data.sort_values('restore_time', ascending=False, inplace=True) # noqa: PD002
+ ep_mat = Helper.EPHelper(restore_data['prob'].to_numpy())
restore_data['EP'] = ep_mat
-
+
return restore_data
-
- def PR_getCurveExcedence(self, data_frame, result_type='mean', daily=False, min_time=0, max_time=24*3600*1000):
+
+ def PR_getCurveExcedence( # noqa: C901, N802, D102
+ self,
+ data_frame,
+ result_type='mean',
+ daily=False, # noqa: FBT002
+ min_time=0,
+ max_time=24 * 3600 * 1000,
+ ):
data_size = len(data_frame.columns)
table_temp = []
-
+
for i in np.arange(data_size):
- scn_name = data_frame.columns[i]
- prob = self.scenario_prob[scn_name]
+ scn_name = data_frame.columns[i]
+ prob = self.scenario_prob[scn_name]
cur_scn_data = data_frame[scn_name]
dmg_index_list = []
-
+
cur_scn_data = cur_scn_data[cur_scn_data.index >= min_time]
cur_scn_data = cur_scn_data[cur_scn_data.index <= max_time]
-
- if daily == True:
+
+ if daily == True: # noqa: E712
cur_scn_data = self.getResultSeperatedDaily(cur_scn_data)
-
+
if result_type == 'mean':
cur_mean_res = cur_scn_data.mean()
- if type(cur_mean_res) != pd.core.series.Series:
- temp_res = {'mean_dmg' : cur_mean_res}
+ if type(cur_mean_res) != pd.core.series.Series: # noqa: E721
+ temp_res = {'mean_dmg': cur_mean_res}
dmg_index_list.append('mean_dmg')
else:
temp_res = {}
for day_time, value in cur_mean_res.iteritems():
- temp_dmg_index = 'mean_dmg_'+day_time
- temp_res.update({temp_dmg_index:value})
+ temp_dmg_index = 'mean_dmg_' + day_time
+ temp_res.update({temp_dmg_index: value})
dmg_index_list.append(temp_dmg_index)
elif result_type == 'min':
dmg_min_res = cur_scn_data.min()
- if type(dmg_min_res) != pd.core.series.Series:
- temp_res = {'min_dmg' : dmg_min_res}
+ if type(dmg_min_res) != pd.core.series.Series: # noqa: E721
+ temp_res = {'min_dmg': dmg_min_res}
dmg_index_list.append('min_dmg')
else:
temp_res = {}
for day_time, value in dmg_min_res.iteritems():
- temp_dmg_index = 'min_dmg_'+day_time
- temp_res.update({temp_dmg_index:value})
+ temp_dmg_index = 'min_dmg_' + day_time
+ temp_res.update({temp_dmg_index: value})
dmg_index_list.append(temp_dmg_index)
elif result_type == 'max':
dmg_max_res = cur_scn_data.min()
- if type(dmg_max_res) != pd.core.series.Series:
- temp_res = {'max_dmg' : dmg_max_res}
+ if type(dmg_max_res) != pd.core.series.Series: # noqa: E721
+ temp_res = {'max_dmg': dmg_max_res}
dmg_index_list.append('max_dmg')
else:
temp_res = {}
for day_time, value in dmg_max_res.iteritems():
- temp_dmg_index = 'max_dmg_'+day_time
- temp_res.update({temp_dmg_index:value})
+ temp_dmg_index = 'max_dmg_' + day_time
+ temp_res.update({temp_dmg_index: value})
dmg_index_list.append(temp_dmg_index)
else:
- raise ValueError("Unknown group method: "+repr(result_type))
-
- loop_res = {'prob':prob, 'index':scn_name}
- loop_res.update(temp_res)
+ raise ValueError('Unknown group method: ' + repr(result_type))
+
+ loop_res = {'prob': prob, 'index': scn_name}
+ loop_res.update(temp_res)
table_temp.append(loop_res)
-
+
table = pd.DataFrame.from_dict(table_temp).set_index('index')
- res = pd.DataFrame(index=[i for i in range(0, len(table.index))],
- dtype=np.float64)
+ res = pd.DataFrame(
+ index=[i for i in range(len(table.index))], # noqa: C416
+ dtype=np.float64,
+ )
for dmg_name in dmg_index_list:
select_columns = ['prob']
select_columns.extend([dmg_name])
loop_table = table[select_columns]
- loop_table.sort_values(dmg_name, inplace = True)
-
+ loop_table.sort_values(dmg_name, inplace=True) # noqa: PD002
+
ep_mat = Helper.EPHelper(loop_table['prob'].to_numpy())
res[dmg_name] = loop_table[dmg_name].to_numpy()
- res[dmg_name+'_EP'] = ep_mat
-
+ res[dmg_name + '_EP'] = ep_mat
+
return res
-
- def getResultSeperatedDaily(self, data, begin_time=0):
+
+ def getResultSeperatedDaily(self, data, begin_time=0): # noqa: D102, N802, PLR6301
data = data[data.index >= begin_time]
- data.index = (data.index - begin_time)/(24*3600)
-
+ data.index = (data.index - begin_time) / (24 * 3600)
+
res_data = []
- res_day = []
-
- for day_iter in range(0, np.int64(np.ceil(np.max(data.index)))):
- day_data = data[(data.index >= day_iter) & (data.index <= day_iter+1)]
+ res_day = []
+
+ for day_iter in range(np.int64(np.ceil(np.max(data.index)))):
+ day_data = data[(data.index >= day_iter) & (data.index <= day_iter + 1)]
res_data.append(day_data.to_list())
- res_day.append(str(day_iter)+'-'+str(day_iter+1))
-
- return pd.DataFrame(res_data, index = res_day).transpose()
-
+ res_day.append(str(day_iter) + '-' + str(day_iter + 1))
-
-
\ No newline at end of file
+ return pd.DataFrame(res_data, index=res_day).transpose()
diff --git a/modules/systemPerformance/REWET/REWET/Sim/Simulation.py b/modules/systemPerformance/REWET/REWET/Sim/Simulation.py
index cc923e325..b9f7adb1e 100644
--- a/modules/systemPerformance/REWET/REWET/Sim/Simulation.py
+++ b/modules/systemPerformance/REWET/REWET/Sim/Simulation.py
@@ -1,392 +1,627 @@
+import math # noqa: CPY001, D100, INP001
import os
-import math
-import pandas as pd
+
import numpy as np
+import pandas as pd
from EnhancedWNTR.sim.epanet import EpanetSimulator
from EnhancedWNTR.sim.results import SimulationResults
-import wntrfr
-class Hydraulic_Simulation():
- def __init__(self, wn, settings, current_stop_time, worker_rank, prev_isolated_junctions, prev_isolated_links):
- self.wn = wn
- self.nne_flow_criteria = settings.process['nne_flow_limit']
+class Hydraulic_Simulation: # noqa: D101
+ def __init__(
+ self,
+ wn,
+ settings,
+ current_stop_time,
+ worker_rank,
+ prev_isolated_junctions,
+ prev_isolated_links,
+ ):
+ self.wn = wn
+ self.nne_flow_criteria = settings.process['nne_flow_limit']
self.nne_pressure_criteria = settings.process['nne_pressure_limit']
- self.minimum_pressure = 8
- self.required_pressure = 25
- self.current_stop_time = current_stop_time
- self.worker_rank = worker_rank
-
- self.wn.options.hydraulic.demand_model = "PDA"
-
- temp_folder = settings['temp_directory']
- if type(temp_folder) != str:
- raise ValueError("temp folder type is not str")
-
- if settings['save_time_step'] == True:
- if temp_folder == '':
- self.temp_directory = str(worker_rank) + "_" + repr(current_stop_time)
+ self.minimum_pressure = 8
+ self.required_pressure = 25
+ self.current_stop_time = current_stop_time
+ self.worker_rank = worker_rank
+
+ self.wn.options.hydraulic.demand_model = 'PDA'
+
+ temp_folder = settings['temp_directory']
+ if type(temp_folder) != str: # noqa: E721
+ raise ValueError('temp folder type is not str') # noqa: EM101, TRY003
+
+ if settings['save_time_step'] == True: # noqa: E712
+ if temp_folder == '': # noqa: PLC1901
+ self.temp_directory = (
+ str(worker_rank) + '_' + repr(current_stop_time)
+ )
else:
- self.temp_directory = os.path.join(temp_folder, str(worker_rank) + "_" + repr(current_stop_time))
-
- elif settings['save_time_step'] == False:
- if temp_folder == '':
+ self.temp_directory = os.path.join( # noqa: PTH118
+ temp_folder, str(worker_rank) + '_' + repr(current_stop_time)
+ )
+
+ elif settings['save_time_step'] == False: # noqa: E712
+ if temp_folder == '': # noqa: PLC1901
self.temp_directory = str(worker_rank)
else:
- self.temp_directory = os.path.join(temp_folder, str(worker_rank))
+ self.temp_directory = os.path.join(temp_folder, str(worker_rank)) # noqa: PTH118
else:
- raise ValueError("Unknown value for settings 'save_time_step': " + repr())
+ raise ValueError(
+ "Unknown value for settings 'save_time_step': " + repr()
+ )
self._prev_isolated_junctions = prev_isolated_junctions
- self._prev_isolated_links = prev_isolated_links
-
- def removeNonDemandNegativeNodeByPythonMinorLoss(self, maximum_iteration):
+ self._prev_isolated_links = prev_isolated_links
+
+ def removeNonDemandNegativeNodeByPythonMinorLoss(self, maximum_iteration): # noqa: N802, D102
current_stop_time = self.current_stop_time
- minimum_pressure = self.minimum_pressure
- required_pressure = self.required_pressure
- temp_file_dest = self.temp_directory
- orginal_c_dict = {}
- for itrr in range(0, maximum_iteration):
- print(itrr)
- sim = EpanetSimulator(self.wn)
+ minimum_pressure = self.minimum_pressure # noqa: F841
+ required_pressure = self.required_pressure # noqa: F841
+ temp_file_dest = self.temp_directory
+ orginal_c_dict = {}
+ for itrr in range(maximum_iteration):
+ print(itrr) # noqa: T201
+ sim = EpanetSimulator(self.wn)
self.s = sim
- self._prev_isolated_junctions, self._prev_isolated_links = sim._get_isolated_junctions_and_links(self._prev_isolated_junctions, self._prev_isolated_links)
+ self._prev_isolated_junctions, self._prev_isolated_links = (
+ sim._get_isolated_junctions_and_links( # noqa: SLF001
+ self._prev_isolated_junctions, self._prev_isolated_links
+ )
+ )
sim.manipulateTimeOrder(current_stop_time, current_stop_time)
- rr, i_run_successful = sim.run_sim(file_prefix = temp_file_dest, start_time = current_stop_time, iModified=False)
- new_closed_pipes, ifinish = sim.now_temp_2(rr, self._prev_isolated_links, self._prev_isolated_junctions, self.nne_flow_criteria, self.nne_pressure_criteria)
-
+ rr, i_run_successful = sim.run_sim( # noqa: F841
+ file_prefix=temp_file_dest,
+ start_time=current_stop_time,
+ iModified=False,
+ )
+ new_closed_pipes, ifinish = sim.now_temp_2(
+ rr,
+ self._prev_isolated_links,
+ self._prev_isolated_junctions,
+ self.nne_flow_criteria,
+ self.nne_pressure_criteria,
+ )
+
if ifinish:
break
-
+
new_pipe_altered_name_list = new_closed_pipes.keys()
- #print(str(list(new_pipe_altered_name_list)[0]) + " " + str(new_closed_pipes[list(new_pipe_altered_name_list)[0]]))
- new_c_altered = [pipe_name for pipe_name in new_pipe_altered_name_list if pipe_name not in orginal_c_dict]
+ # print(str(list(new_pipe_altered_name_list)[0]) + " " + str(new_closed_pipes[list(new_pipe_altered_name_list)[0]]))
+ new_c_altered = [
+ pipe_name
+ for pipe_name in new_pipe_altered_name_list
+ if pipe_name not in orginal_c_dict
+ ]
for pipe_name in new_c_altered:
- if pipe_name not in orginal_c_dict: #in order not tohange orginal C to something very new in the last oteration
+ if (
+ pipe_name not in orginal_c_dict
+ ): # in order not tohange original C to something very new in the last oteration
orginal_c_dict[pipe_name] = new_closed_pipes[pipe_name]
return orginal_c_dict
-
- def isolateReservoirs(self, isolated_nodes):
+
+ def isolateReservoirs(self, isolated_nodes): # noqa: N802, D102
for reservoir_name, reservoir in self.wn.reservoirs():
- if self.wn._node_reg.get_usage(reservoir_name) == None:
- reservoir._is_isolated = True
+ if self.wn._node_reg.get_usage(reservoir_name) == None: # noqa: SLF001, E711
+ reservoir._is_isolated = True # noqa: SLF001
isolated_nodes.add(reservoir_name)
return isolated_nodes
-
-
- def isolateTanks(self, isolated_nodes):
+
+ def isolateTanks(self, isolated_nodes): # noqa: N802, D102
for tank_name, tank in self.wn.tanks():
- if self.wn._node_reg.get_usage(tank_name) == None:
- tank._is_isolated = True
+ if self.wn._node_reg.get_usage(tank_name) == None: # noqa: SLF001, E711
+ tank._is_isolated = True # noqa: SLF001
isolated_nodes.add(tank_name)
return isolated_nodes
-
- def removeNonDemandNegativeNodeByPythonClose(self, maximum_iteration):
+
+ def removeNonDemandNegativeNodeByPythonClose(self, maximum_iteration): # noqa: N802, D102
current_stop_time = self.current_stop_time
- minimum_pressure = self.minimum_pressure
- required_pressure = self.required_pressure
- temp_file_dest = self.temp_directory
+ minimum_pressure = self.minimum_pressure # noqa: F841
+ required_pressure = self.required_pressure # noqa: F841
+ temp_file_dest = self.temp_directory
self.closed_pipes = {}
- for itrr in range(0, maximum_iteration):
- print(itrr)
- sim = EpanetSimulator(self.wn)
- self._prev_isolated_junctions, self._prev_isolated_links = sim._get_isolated_junctions_and_links(self._prev_isolated_junctions, self._prev_isolated_links)
+ for itrr in range(maximum_iteration):
+ print(itrr) # noqa: T201
+ sim = EpanetSimulator(self.wn)
+ self._prev_isolated_junctions, self._prev_isolated_links = (
+ sim._get_isolated_junctions_and_links( # noqa: SLF001
+ self._prev_isolated_junctions, self._prev_isolated_links
+ )
+ )
sim.manipulateTimeOrder(current_stop_time, current_stop_time)
- rr, i_run_successful = sim.run_sim(file_prefix = temp_file_dest, start_time = current_stop_time, iModified=False)
- new_closed_pipes, ifinish = sim.closePipeNNN(rr, self._prev_isolated_links, self._prev_isolated_junctions, self.nne_flow_criteria, self.nne_pressure_criteria)
-
+ rr, i_run_successful = sim.run_sim( # noqa: F841
+ file_prefix=temp_file_dest,
+ start_time=current_stop_time,
+ iModified=False,
+ )
+ new_closed_pipes, ifinish = sim.closePipeNNN(
+ rr,
+ self._prev_isolated_links,
+ self._prev_isolated_junctions,
+ self.nne_flow_criteria,
+ self.nne_pressure_criteria,
+ )
+
if ifinish:
break
-
+
new_pipe_altered_name_list = new_closed_pipes.keys()
- new_c_altered = [pipe_name for pipe_name in new_pipe_altered_name_list if pipe_name not in self.closed_pipes]
+ new_c_altered = [
+ pipe_name
+ for pipe_name in new_pipe_altered_name_list
+ if pipe_name not in self.closed_pipes
+ ]
for pipe_name in new_c_altered:
self.closed_pipes[pipe_name] = new_closed_pipes[pipe_name]
- #self.closed_pipes = orginal_c_dict
- #return orginal_c_dict
-
- def rollBackPipeMinorLoss(self, altered_pipes):
+ # self.closed_pipes = orginal_c_dict
+ # return orginal_c_dict
+
+ def rollBackPipeMinorLoss(self, altered_pipes): # noqa: N802, D102
for pipe_name in altered_pipes:
self.wn.get_link(pipe_name).minor_loss = altered_pipes[pipe_name]
-
- def rollBackPipeClose(self):
+
+ def rollBackPipeClose(self): # noqa: N802, D102
altered_pipes = self.closed_pipes
for pipe_name in altered_pipes:
pipe = self.wn.get_link(pipe_name)
pipe.initial_status = altered_pipes[pipe_name]
-
- def performSimulation(self, next_event_time, iModified):
+
+ def performSimulation(self, next_event_time, iModified): # noqa: N802, N803, D102
current_stop_time = self.current_stop_time
- minimum_pressure = self.minimum_pressure
- required_pressure = self.required_pressure
- temp_file_dest = self.temp_directory
+ minimum_pressure = self.minimum_pressure # noqa: F841
+ required_pressure = self.required_pressure # noqa: F841
+ temp_file_dest = self.temp_directory
sim = EpanetSimulator(self.wn)
- #self.s=sim
- self._prev_isolated_junctions, self._prev_isolated_links = sim._get_isolated_junctions_and_links(self._prev_isolated_junctions, self._prev_isolated_links)
- self._prev_isolated_junctions = self.isolateReservoirs(self._prev_isolated_junctions)
- self._prev_isolated_junctions = self.isolateTanks(self._prev_isolated_junctions)
- print('***********')
- print(len(self._prev_isolated_junctions))
- print(len(self._prev_isolated_links))
- print('-----------')
- sim.manipulateTimeOrder(current_stop_time, next_event_time) #, change_time_step=True, min_correction_time_step=self._min_correction_time)
- rr, i_run_successful = sim.run_sim(file_prefix = temp_file_dest, start_time = current_stop_time,iModified=iModified)
+ # self.s=sim
+ self._prev_isolated_junctions, self._prev_isolated_links = (
+ sim._get_isolated_junctions_and_links( # noqa: SLF001
+ self._prev_isolated_junctions, self._prev_isolated_links
+ )
+ )
+ self._prev_isolated_junctions = self.isolateReservoirs(
+ self._prev_isolated_junctions
+ )
+ self._prev_isolated_junctions = self.isolateTanks(
+ self._prev_isolated_junctions
+ )
+ print('***********') # noqa: T201
+ print(len(self._prev_isolated_junctions)) # noqa: T201
+ print(len(self._prev_isolated_links)) # noqa: T201
+ print('-----------') # noqa: T201
+ sim.manipulateTimeOrder(
+ current_stop_time, next_event_time
+ ) # , change_time_step=True, min_correction_time_step=self._min_correction_time)
+ rr, i_run_successful = sim.run_sim(
+ file_prefix=temp_file_dest,
+ start_time=current_stop_time,
+ iModified=iModified,
+ )
return rr, i_run_successful
-
- def estimateRun(self, next_event_time, iModified):
+
+ def estimateRun(self, next_event_time, iModified): # noqa: N802, N803, D102
current_stop_time = self.current_stop_time
- minimum_pressure = self.minimum_pressure
- required_pressure = self.required_pressure
-
+ minimum_pressure = self.minimum_pressure # noqa: F841
+ required_pressure = self.required_pressure # noqa: F841
+
sim = EpanetSimulator(self.wn)
duration = self.wn.options.time.duration
- report_time_step = self.wn.options.time.report_timestep
+ report_time_step = self.wn.options.time.report_timestep
sim.manipulateTimeOrder(current_stop_time, current_stop_time)
-
- temp_file_dest = self.temp_directory
- self._prev_isolated_junctions, self._prev_isolated_links = sim._get_isolated_junctions_and_links(self._prev_isolated_junctions, self._prev_isolated_links)
- self._prev_isolated_junctions = self.isolateReservoirs(self._prev_isolated_junctions)
- self._prev_isolated_junctions = self.isolateTanks(self._prev_isolated_junctions)
- rr, i_run_successful = sim.run_sim(file_prefix= temp_file_dest, start_time = current_stop_time, iModified=iModified)
+
+ temp_file_dest = self.temp_directory
+ self._prev_isolated_junctions, self._prev_isolated_links = (
+ sim._get_isolated_junctions_and_links( # noqa: SLF001
+ self._prev_isolated_junctions, self._prev_isolated_links
+ )
+ )
+ self._prev_isolated_junctions = self.isolateReservoirs(
+ self._prev_isolated_junctions
+ )
+ self._prev_isolated_junctions = self.isolateTanks(
+ self._prev_isolated_junctions
+ )
+ rr, i_run_successful = sim.run_sim(
+ file_prefix=temp_file_dest,
+ start_time=current_stop_time,
+ iModified=iModified,
+ )
self.wn.options.time.duration = duration
self.wn.options.time.report_timestep = report_time_step
rr = self.approximateNewResult(rr, current_stop_time, next_event_time, 0)
-
+
return rr, i_run_successful
-
- def estimateWithoutRun(self, result, next_event_time):
+
+ def estimateWithoutRun(self, result, next_event_time): # noqa: N802, D102
current_stop_time = self.current_stop_time
- minimum_pressure = self.minimum_pressure
- required_pressure = self.required_pressure
-
- time = result.node["demand"].index.to_list()
+ minimum_pressure = self.minimum_pressure # noqa: F841
+ required_pressure = self.required_pressure # noqa: F841
+
+ time = result.node['demand'].index.to_list()
unreliable_time_list = result.maximum_trial_time
time.reverse()
-
- last_valid_time = int(-1)
+
+ last_valid_time = -1
for checked_last_time in time:
if checked_last_time not in unreliable_time_list:
last_valid_time = checked_last_time
break
-
- if last_valid_time == int(-1):
- raise ValueError("Last reliabale tiem is not found")
-
- time_step = min(self.wn.options.time.hydraulic_timestep, self.wn.options.time.report_timestep)
+
+ if last_valid_time == -1:
+ raise ValueError('Last reliabale time is not found') # noqa: EM101, TRY003
+
+ time_step = min(
+ self.wn.options.time.hydraulic_timestep,
+ self.wn.options.time.report_timestep,
+ )
time_step = int(time_step)
- end_time = next_event_time
- end_time = int(end_time)
- #result_node_head = {}
- #result_node_demand = {}
- #result_node_pressure = {}
- result_node_status = {}
- result_node_setting = {}
+ end_time = next_event_time
+ end_time = int(end_time)
+ # result_node_head = {}
+ # result_node_demand = {}
+ # result_node_pressure = {}
+ result_node_status = {}
+ result_node_setting = {}
result_node_flowrate = {}
-
+
sim = EpanetSimulator(self.wn)
-
- self._prev_isolated_junctions, self._prev_isolated_links = sim._get_isolated_junctions_and_links(self._prev_isolated_junctions, self._prev_isolated_links)
- self._prev_isolated_junctions = self.isolateReservoirs(self._prev_isolated_junctions)
- self._prev_isolated_junctions = self.isolateTanks(self._prev_isolated_junctions)
-
- #available_node_list = [node_name for node_name in self.wn.node_name_list if self.wn.get_node(node_name)._is_isolated == False]
- #available_link_list = [link_name for link_name in self.wn.link_name_list if self.wn.get_link(link_name)._is_isolated == False]
-
- available_node_list = [node_name for node_name in self.wn.node_name_list]
- available_link_list = [link_name for link_name in self.wn.link_name_list]
-
- available_node_list = [node_name for node_name in available_node_list if node_name in result.node['demand'].columns]
- available_link_list = [link_name for link_name in available_link_list if link_name in result.link['flowrate'].columns]
-
- result_node_head = pd.DataFrame(columns= available_node_list)
- result_node_demand = pd.DataFrame(columns= available_node_list)
- result_node_pressure = pd.DataFrame(columns= available_node_list)
- result_node_leak = pd.DataFrame(columns= available_node_list)
-
- result_link_status = pd.DataFrame(columns= available_link_list)
- result_link_setting = pd.DataFrame(columns= available_link_list)
- result_link_flowrate = pd.DataFrame(columns= available_link_list)
-
+
+ self._prev_isolated_junctions, self._prev_isolated_links = (
+ sim._get_isolated_junctions_and_links( # noqa: SLF001
+ self._prev_isolated_junctions, self._prev_isolated_links
+ )
+ )
+ self._prev_isolated_junctions = self.isolateReservoirs(
+ self._prev_isolated_junctions
+ )
+ self._prev_isolated_junctions = self.isolateTanks(
+ self._prev_isolated_junctions
+ )
+
+ # available_node_list = [node_name for node_name in self.wn.node_name_list if self.wn.get_node(node_name)._is_isolated == False]
+ # available_link_list = [link_name for link_name in self.wn.link_name_list if self.wn.get_link(link_name)._is_isolated == False]
+
+ available_node_list = [node_name for node_name in self.wn.node_name_list] # noqa: C416
+ available_link_list = [link_name for link_name in self.wn.link_name_list] # noqa: C416
+
+ available_node_list = [
+ node_name
+ for node_name in available_node_list
+ if node_name in result.node['demand'].columns
+ ]
+ available_link_list = [
+ link_name
+ for link_name in available_link_list
+ if link_name in result.link['flowrate'].columns
+ ]
+
+ result_node_head = pd.DataFrame(columns=available_node_list)
+ result_node_demand = pd.DataFrame(columns=available_node_list)
+ result_node_pressure = pd.DataFrame(columns=available_node_list)
+ result_node_leak = pd.DataFrame(columns=available_node_list)
+
+ result_link_status = pd.DataFrame(columns=available_link_list)
+ result_link_setting = pd.DataFrame(columns=available_link_list)
+ result_link_flowrate = pd.DataFrame(columns=available_link_list)
+
first_step = True
- for time_step_iter in range(current_stop_time, end_time+1, time_step):
- #print(result.node['head'].loc[last_valid_time, available_node_list])
- result_node_head.loc[time_step_iter, available_node_list] = result.node['head'].loc[last_valid_time, available_node_list]
- result_node_demand.loc[time_step_iter, available_node_list] = result.node['demand'].loc[last_valid_time, available_node_list]
- result_node_pressure.loc[time_step_iter, available_node_list] = result.node['pressure'].loc[last_valid_time, available_node_list]
- result_node_leak.loc[time_step_iter, available_node_list] = result.node['leak'].loc[last_valid_time, result.node['leak'].columns]
-
- result_link_status.loc[time_step_iter] = result.link['status'].loc[last_valid_time, available_link_list]
- result_link_setting.loc[time_step_iter] = result.link['setting'].loc[last_valid_time, available_link_list]
- result_link_flowrate.loc[time_step_iter] = result.link['flowrate'].loc[last_valid_time, available_link_list]
- #print("---------------")
- #print(result_node_head)
- #print("---------------")
- if first_step==True:
+ for time_step_iter in range(current_stop_time, end_time + 1, time_step):
+ # print(result.node['head'].loc[last_valid_time, available_node_list])
+ result_node_head.loc[time_step_iter, available_node_list] = result.node[
+ 'head'
+ ].loc[last_valid_time, available_node_list]
+ result_node_demand.loc[time_step_iter, available_node_list] = (
+ result.node['demand'].loc[last_valid_time, available_node_list]
+ )
+ result_node_pressure.loc[time_step_iter, available_node_list] = (
+ result.node['pressure'].loc[last_valid_time, available_node_list]
+ )
+ result_node_leak.loc[time_step_iter, available_node_list] = result.node[
+ 'leak'
+ ].loc[last_valid_time, result.node['leak'].columns]
+
+ result_link_status.loc[time_step_iter] = result.link['status'].loc[
+ last_valid_time, available_link_list
+ ]
+ result_link_setting.loc[time_step_iter] = result.link['setting'].loc[
+ last_valid_time, available_link_list
+ ]
+ result_link_flowrate.loc[time_step_iter] = result.link['flowrate'].loc[
+ last_valid_time, available_link_list
+ ]
+ # print("---------------")
+ # print(result_node_head)
+ # print("---------------")
+ if first_step == True: # noqa: E712
first_step = False
else:
- self.updateTankHeadsAndPressure(result_node_demand, result_node_head, result_node_pressure, time_step_iter, time_step)
-
-
+ self.updateTankHeadsAndPressure(
+ result_node_demand,
+ result_node_head,
+ result_node_pressure,
+ time_step_iter,
+ time_step,
+ )
+
rr = SimulationResults()
- result_node_head = result_node_head.sort_index()
- result_node_demand = result_node_demand.sort_index()
+ result_node_head = result_node_head.sort_index()
+ result_node_demand = result_node_demand.sort_index()
result_node_pressure = result_node_pressure.sort_index()
- result_node_status = pd.DataFrame.from_dict(result_node_status).sort_index()
- result_node_setting = pd.DataFrame.from_dict(result_node_setting).sort_index()
- result_node_flowrate = pd.DataFrame.from_dict(result_node_flowrate).sort_index()
-
- rr.node = {'head': result_node_head, 'demand': result_node_demand, 'pressure': result_node_pressure, "leak": result_node_leak}
- rr.link = {'status': result_link_status, 'setting': result_link_setting, 'flowrate': result_link_flowrate}
+ result_node_status = pd.DataFrame.from_dict(result_node_status).sort_index()
+ result_node_setting = pd.DataFrame.from_dict(
+ result_node_setting
+ ).sort_index()
+ result_node_flowrate = pd.DataFrame.from_dict(
+ result_node_flowrate
+ ).sort_index()
+
+ rr.node = {
+ 'head': result_node_head,
+ 'demand': result_node_demand,
+ 'pressure': result_node_pressure,
+ 'leak': result_node_leak,
+ }
+ rr.link = {
+ 'status': result_link_status,
+ 'setting': result_link_setting,
+ 'flowrate': result_link_flowrate,
+ }
return rr, True
-
- def updateTankHeadsAndPressure(self, demand, head, pressure, sim_time, time_step): # Addapted from the latest version of wntrfr. Courtessy of WNTR: https://github.com/USEPA/WNTR
- """
- Parameters
+
+ def updateTankHeadsAndPressure( # noqa: N802
+ self,
+ demand,
+ head,
+ pressure,
+ sim_time,
+ time_step,
+ ): # Adapted from the latest version of wntrfr. Courtessy of WNTR: https://github.com/USEPA/WNTR
+ """Parameters
----------
wn: wntrfr.network.WaterNetworkModel
- """
+
+ """ # noqa: D205
dt = time_step
- #print(sim_time)
- demand_na = demand.loc[sim_time].isna()
- head_na = head.loc[sim_time].isna()
+ # print(sim_time)
+ demand_na = demand.loc[sim_time].isna()
+ head_na = head.loc[sim_time].isna()
pressure_na = pressure.loc[sim_time].isna()
-
+
for tank_name, tank in self.wn.tanks():
-
- #checks if the node is isolated.
- if tank._is_isolated == True:
+ # checks if the node is isolated.
+ if tank._is_isolated == True: # noqa: SLF001, E712
continue
-
- #checks of this node has been isolated at the last valid time. if
- #so, ignores this node, even though it is not isolated at this time
- #print(sim_time)
- #print(demand_na.loc[tank_name])
- #print(demand.loc[sim_time, tank_name])
- if demand_na.loc[tank_name] or head_na.loc[tank_name] or pressure_na.loc[tank_name]:
+
+ # checks of this node has been isolated at the last valid time. if
+ # so, ignores this node, even though it is not isolated at this time
+ # print(sim_time)
+ # print(demand_na.loc[tank_name])
+ # print(demand.loc[sim_time, tank_name])
+ if (
+ demand_na.loc[tank_name]
+ or head_na.loc[tank_name]
+ or pressure_na.loc[tank_name]
+ ):
continue
-
- #With formers checks, this "if statement" must not be needed.
- #Just leave it here for now
-
+
+ # With formers checks, this "if statement" must not be needed.
+ # Just leave it here for now
+
if tank_name in demand.columns:
q_net = demand.loc[sim_time, tank_name]
else:
q_net = 0.0
-
- dV = q_net * dt
-
- previous_head = head.loc[sim_time, tank_name]
- if tank.vol_curve is None:
- delta_h = 4.0 * dV / (math.pi * tank.diameter ** 2)
- new_level = previous_head + delta_h - tank.elevation
+
+ dV = q_net * dt # noqa: N806
+
+ previous_head = head.loc[sim_time, tank_name]
+ if tank.vol_curve is None:
+ delta_h = 4.0 * dV / (math.pi * tank.diameter**2)
+ new_level = previous_head + delta_h - tank.elevation
else:
vcurve = np.array(tank.vol_curve.points)
- level_x = vcurve[:,0]
- volume_y = vcurve[:,1]
-
+ level_x = vcurve[:, 0]
+ volume_y = vcurve[:, 1]
+
previous_level = previous_head - tank.elevation
-
- V0 = np.interp(previous_level,level_x,volume_y)
- V1 = V0 + dV
- new_level = np.interp(V1,volume_y,level_x)
+
+ V0 = np.interp(previous_level, level_x, volume_y) # noqa: N806
+ V1 = V0 + dV # noqa: N806
+ new_level = np.interp(V1, volume_y, level_x)
delta_h = new_level - previous_level
-
- #checks if the new levels and head are within the tanks limit.
- #It ignores the possibility of tank overflow and does not alter the
- #tank demand.
+
+ # checks if the new levels and head are within the tanks limit.
+ # It ignores the possibility of tank overflow and does not alter the
+ # tank demand.
if new_level < tank.min_level:
new_level = tank.min_level
- new_head = tank.elevation + tank.min_level
+ new_head = tank.elevation + tank.min_level
elif new_level > tank.max_level:
new_level = tank.max_level
- new_head = tank.elevation + tank.max_level
-
+ new_head = tank.elevation + tank.max_level
+
new_head = previous_head + delta_h
- head.loc[sim_time, tank_name ] = new_head
+ head.loc[sim_time, tank_name] = new_head
pressure.loc[sim_time, tank_name] = new_head - tank.elevation
-
-
- def approximateNewResult(self, rr, current_stop_time, end_time, little_time_step):
- time_step = min(self.wn.options.time.hydraulic_timestep, self.wn.options.time.report_timestep)
+
+ def approximateNewResult( # noqa: N802, D102
+ self,
+ rr,
+ current_stop_time,
+ end_time,
+ little_time_step,
+ ):
+ time_step = min(
+ self.wn.options.time.hydraulic_timestep,
+ self.wn.options.time.report_timestep,
+ )
current_stop_time = int(current_stop_time)
- end_time = int(end_time)
- time_step = int(time_step)
- not_isolated_tanks = [tank_name for tank_name, tank in self.wn.tanks() if tank._is_isolated == False]
- #isolated_tanks = [tank_name for tank_name in self.tanks_name_list if tank_name in self._prev_isolated_junctions]
- #isolated_nodes = [node_name for node_name in self.node_name_list if node_name in self._prev_isolated_junctions]
+ end_time = int(end_time)
+ time_step = int(time_step)
+ not_isolated_tanks = [
+ tank_name
+ for tank_name, tank in self.wn.tanks()
+ if tank._is_isolated == False # noqa: SLF001, E712
+ ]
+ # isolated_tanks = [tank_name for tank_name in self.tanks_name_list if tank_name in self._prev_isolated_junctions]
+ # isolated_nodes = [node_name for node_name in self.node_name_list if node_name in self._prev_isolated_junctions]
tank_heads = rr.node['head'][not_isolated_tanks]
- #tank_demands=rr.node['demand'][self.wn.tank_name_list]
- if little_time_step==0:
- tank_elevation_list = [self.wn.get_node(e).elevation for e in not_isolated_tanks]
- tank_min_level_list = [self.wn.get_node(l).min_level for l in not_isolated_tanks]
- tank_max_level_list = [self.wn.get_node(l).max_level for l in not_isolated_tanks]
-
- tanks_min_heads = [tank_elevation_list[i]+tank_min_level_list[i] for i in range(len(tank_elevation_list))]
- tanks_max_heads = [tank_elevation_list[i]+tank_max_level_list[i] for i in range(len(tank_elevation_list))]
-
+ # tank_demands=rr.node['demand'][self.wn.tank_name_list]
+ if little_time_step == 0:
+ tank_elevation_list = [
+ self.wn.get_node(e).elevation for e in not_isolated_tanks
+ ]
+ tank_min_level_list = [
+ self.wn.get_node(l).min_level
+ for l in not_isolated_tanks # noqa: E741
+ ]
+ tank_max_level_list = [
+ self.wn.get_node(l).max_level
+ for l in not_isolated_tanks # noqa: E741
+ ]
+
+ tanks_min_heads = [
+ tank_elevation_list[i] + tank_min_level_list[i]
+ for i in range(len(tank_elevation_list))
+ ]
+ tanks_max_heads = [
+ tank_elevation_list[i] + tank_max_level_list[i]
+ for i in range(len(tank_elevation_list))
+ ]
+
tank_heads_diff = rr.node['demand'][not_isolated_tanks]
tank_heads_diff = tank_heads_diff.iloc[-1]
-
+
tanks_min_heads = pd.Series(tanks_min_heads, not_isolated_tanks)
tanks_max_heads = pd.Series(tanks_max_heads, not_isolated_tanks)
-
- print(current_stop_time)
- print(time_step)
- print(end_time)
- for time_step_iter in range(current_stop_time+time_step, end_time+1, time_step):
- rr.node['head'].loc[time_step_iter] = rr.node['head'].loc[current_stop_time]
- rr.node['demand'].loc[time_step_iter] = rr.node['demand'].loc[current_stop_time]
- rr.node['pressure'].loc[time_step_iter] = rr.node['pressure'].loc[current_stop_time]
- rr.link['status'].loc[time_step_iter] = rr.link['status'].loc[current_stop_time]
- rr.link['setting'].loc[time_step_iter] = rr.link['setting'].loc[current_stop_time]
- rr.link['flowrate'].loc[time_step_iter] = rr.link['flowrate'].loc[current_stop_time]
-
- new_tank_heads = tank_heads.loc[current_stop_time]+(tank_heads_diff * (time_step_iter-current_stop_time) )
-
- under_min_heads = new_tank_heads[new_tank_headstanks_max_heads]
-
- new_tank_heads.loc[under_min_heads.index] = tanks_min_heads.loc[under_min_heads.index]
- new_tank_heads.loc[over_max_heads.index] = tanks_min_heads.loc[over_max_heads.index]
-
- rr.node['head'].loc[time_step_iter, new_tank_heads.index] = new_tank_heads.to_list()
-
- #Future updates: updating tank levels based on Newer version of WNTR for tansks with curves
+
+ print(current_stop_time) # noqa: T201
+ print(time_step) # noqa: T201
+ print(end_time) # noqa: T201
+ for time_step_iter in range(
+ current_stop_time + time_step, end_time + 1, time_step
+ ):
+ rr.node['head'].loc[time_step_iter] = rr.node['head'].loc[
+ current_stop_time
+ ]
+ rr.node['demand'].loc[time_step_iter] = rr.node['demand'].loc[
+ current_stop_time
+ ]
+ rr.node['pressure'].loc[time_step_iter] = rr.node['pressure'].loc[
+ current_stop_time
+ ]
+ rr.link['status'].loc[time_step_iter] = rr.link['status'].loc[
+ current_stop_time
+ ]
+ rr.link['setting'].loc[time_step_iter] = rr.link['setting'].loc[
+ current_stop_time
+ ]
+ rr.link['flowrate'].loc[time_step_iter] = rr.link['flowrate'].loc[
+ current_stop_time
+ ]
+
+ new_tank_heads = tank_heads.loc[current_stop_time] + (
+ tank_heads_diff * (time_step_iter - current_stop_time)
+ )
+
+ under_min_heads = new_tank_heads[new_tank_heads < tanks_min_heads]
+ over_max_heads = new_tank_heads[new_tank_heads > tanks_max_heads]
+
+ new_tank_heads.loc[under_min_heads.index] = tanks_min_heads.loc[
+ under_min_heads.index
+ ]
+ new_tank_heads.loc[over_max_heads.index] = tanks_min_heads.loc[
+ over_max_heads.index
+ ]
+
+ rr.node['head'].loc[time_step_iter, new_tank_heads.index] = (
+ new_tank_heads.to_list()
+ )
+
+ # Future updates: updating tank levels based on Newer version of WNTR for tansks with curves
else:
- tank_elevation_list = [self.wn.get_node(e).elevation for e in not_isolated_tanks]
- tank_min_level_list = [self.wn.get_node(l).min_level for l in not_isolated_tanks]
- tank_max_level_list = [self.wn.get_node(l).max_level for l in not_isolated_tanks]
-
- tanks_min_heads = [tank_elevation_list[i]+tank_min_level_list[i] for i in range(len(tank_elevation_list))]
- tanks_max_heads = [tank_elevation_list[i]+tank_max_level_list[i] for i in range(len(tank_elevation_list))]
-
- tank_heads_diff = tank_heads.loc[current_stop_time + little_time_step]-tank_heads.loc[current_stop_time]
-
+ tank_elevation_list = [
+ self.wn.get_node(e).elevation for e in not_isolated_tanks
+ ]
+ tank_min_level_list = [
+ self.wn.get_node(l).min_level
+ for l in not_isolated_tanks # noqa: E741
+ ]
+ tank_max_level_list = [
+ self.wn.get_node(l).max_level
+ for l in not_isolated_tanks # noqa: E741
+ ]
+
+ tanks_min_heads = [
+ tank_elevation_list[i] + tank_min_level_list[i]
+ for i in range(len(tank_elevation_list))
+ ]
+ tanks_max_heads = [
+ tank_elevation_list[i] + tank_max_level_list[i]
+ for i in range(len(tank_elevation_list))
+ ]
+
+ tank_heads_diff = (
+ tank_heads.loc[current_stop_time + little_time_step]
+ - tank_heads.loc[current_stop_time]
+ )
+
tanks_min_heads = pd.Series(tanks_min_heads, not_isolated_tanks)
tanks_max_heads = pd.Series(tanks_max_heads, not_isolated_tanks)
- #print(repr(current_stop_time)+' '+repr(time_step)+' '+repr(end_time)+' '+repr(time_step)+' ')
- for time_step_iter in range(int(current_stop_time+time_step), int(end_time+1), int(time_step)):
- #print(time_step_iter)
- new_tank_heads = tank_heads.loc[current_stop_time]+(tank_heads_diff * (time_step_iter-current_stop_time) / little_time_step)
-
- under_min_heads = new_tank_heads[new_tank_headstanks_max_heads]
-
- new_tank_heads.loc[under_min_heads.index] = tanks_min_heads.loc[under_min_heads.index]
- new_tank_heads.loc[over_max_heads.index] = tanks_min_heads.loc[over_max_heads.index]
-
- rr.node['head'].loc[time_step_iter] = rr.node['head'].loc[current_stop_time]
- rr.node['head'].loc[time_step_iter, new_tank_heads.columns] = new_tank_heads
-
- rr.node['demand'].loc[time_step_iter] = rr.node['demand'].loc[current_stop_time]
- rr.node['pressure'].loc[time_step_iter] = rr.node['pressure'].loc[current_stop_time]
- rr.link['status'].loc[time_step_iter] = rr.link['status'].loc[current_stop_time]
- rr.link['setting'].loc[time_step_iter] = rr.link['setting'].loc[current_stop_time]
-
- rr.node['head']=rr.node['head'].drop(current_stop_time+little_time_step)
- rr.node['demand']=rr.node['demand'].drop(current_stop_time+little_time_step)
- rr.node['pressure']=rr.node['pressure'].drop(current_stop_time+little_time_step)
- rr.link['status']=rr.link['status'].drop(current_stop_time+little_time_step)
- rr.link['setting']=rr.link['setting'].drop(current_stop_time+little_time_step)
-
- return rr
\ No newline at end of file
+ # print(repr(current_stop_time)+' '+repr(time_step)+' '+repr(end_time)+' '+repr(time_step)+' ')
+ for time_step_iter in range(
+ int(current_stop_time + time_step), int(end_time + 1), int(time_step)
+ ):
+ # print(time_step_iter)
+ new_tank_heads = tank_heads.loc[current_stop_time] + (
+ tank_heads_diff
+ * (time_step_iter - current_stop_time)
+ / little_time_step
+ )
+
+ under_min_heads = new_tank_heads[new_tank_heads < tanks_min_heads]
+ over_max_heads = new_tank_heads[new_tank_heads > tanks_max_heads]
+
+ new_tank_heads.loc[under_min_heads.index] = tanks_min_heads.loc[
+ under_min_heads.index
+ ]
+ new_tank_heads.loc[over_max_heads.index] = tanks_min_heads.loc[
+ over_max_heads.index
+ ]
+
+ rr.node['head'].loc[time_step_iter] = rr.node['head'].loc[
+ current_stop_time
+ ]
+ rr.node['head'].loc[time_step_iter, new_tank_heads.columns] = (
+ new_tank_heads
+ )
+
+ rr.node['demand'].loc[time_step_iter] = rr.node['demand'].loc[
+ current_stop_time
+ ]
+ rr.node['pressure'].loc[time_step_iter] = rr.node['pressure'].loc[
+ current_stop_time
+ ]
+ rr.link['status'].loc[time_step_iter] = rr.link['status'].loc[
+ current_stop_time
+ ]
+ rr.link['setting'].loc[time_step_iter] = rr.link['setting'].loc[
+ current_stop_time
+ ]
+
+ rr.node['head'] = rr.node['head'].drop(
+ current_stop_time + little_time_step
+ )
+ rr.node['demand'] = rr.node['demand'].drop(
+ current_stop_time + little_time_step
+ )
+ rr.node['pressure'] = rr.node['pressure'].drop(
+ current_stop_time + little_time_step
+ )
+ rr.link['status'] = rr.link['status'].drop(
+ current_stop_time + little_time_step
+ )
+ rr.link['setting'] = rr.link['setting'].drop(
+ current_stop_time + little_time_step
+ )
+
+ return rr
diff --git a/modules/systemPerformance/REWET/REWET/StochasticModel.py b/modules/systemPerformance/REWET/REWET/StochasticModel.py
index fbb81da14..3dcd10522 100644
--- a/modules/systemPerformance/REWET/REWET/StochasticModel.py
+++ b/modules/systemPerformance/REWET/REWET/StochasticModel.py
@@ -1,64 +1,77 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Apr 8 20:19:10 2020
+"""Created on Wed Apr 8 20:19:10 2020
@author: snaeimi
-"""
-import os
-import pickle
-import wntrfr
-import Damage
-import pandas as pd
+""" # noqa: CPY001, D400, N999
+
import logging
-from timeline import Timeline
+import os
+import pickle # noqa: S403
import sys
-#from wntrplus import WNTRPlus
-from wntrfr.utils.ordered_set import OrderedSet
-from Sim.Simulation import Hydraulic_Simulation
+
+import Damage
import EnhancedWNTR.network.model
+import pandas as pd
+import wntrfr
from EnhancedWNTR.sim.results import SimulationResults
+from Sim.Simulation import Hydraulic_Simulation
+from timeline import Timeline
from wntrfr.network.model import LinkStatus
+# from wntrplus import WNTRPlus
+from wntrfr.utils.ordered_set import OrderedSet
logger = logging.getLogger(__name__)
-class StochasticModel():
- def __init__(self, water_network, damage_model , registry, simulation_end_time, restoration, mode='PDD', i_restoration=True):
- if type(water_network) != wntrfr.network.model.WaterNetworkModel and type(water_network) != EnhancedWNTR.network.model.WaterNetworkModel:
- raise ValueError('Water_network model is not legitimate water Network Model')
- if type(damage_model) != Damage.Damage:
- raise ValueError('damage_model is not a ligitimate Damage Model')
- self.wn = water_network
- self.damage_model = damage_model
+
+class StochasticModel: # noqa: D101
+ def __init__(
+ self,
+ water_network,
+ damage_model,
+ registry,
+ simulation_end_time,
+ restoration,
+ mode='PDD',
+ i_restoration=True, # noqa: FBT002
+ ):
+ if (
+ type(water_network) != wntrfr.network.model.WaterNetworkModel # noqa: E721
+ and type(water_network) != EnhancedWNTR.network.model.WaterNetworkModel # noqa: E721
+ ):
+ raise ValueError( # noqa: TRY003
+ 'Water_network model is not legitimate water Network Model' # noqa: EM101
+ )
+ if type(damage_model) != Damage.Damage: # noqa: E721
+ raise ValueError('damage_model is not a ligitimate Damage Model') # noqa: EM101, TRY003
+ self.wn = water_network
+ self.damage_model = damage_model
self._simulation_time = simulation_end_time
- self.timeline = Timeline(simulation_end_time, restoration, registry)
- damage_distict_time = self.damage_model.get_damage_distinct_time()
+ self.timeline = Timeline(simulation_end_time, restoration, registry)
+ damage_distict_time = self.damage_model.get_damage_distinct_time()
self.timeline.addEventTime(damage_distict_time)
self.timeline.checkAndAmendTime()
-
- self.simulation_mode=None
- if mode=='PDD' or mode=='DD':
- self.simulation_mode=mode
+ self.simulation_mode = None
+ if mode == 'PDD' or mode == 'DD': # noqa: PLR1714
+ self.simulation_mode = mode
else:
- self.simulation_mode='PDD'
- self._linear_result = registry.result
- self.registry = registry
- #self.wp = WNTRPlus(restoration._registry)
- self.restoration = restoration
- self._min_correction_time = 900
- self.simulation_time = 0
- self.restoration_time = 0
- self.iRestoration = i_restoration
+ self.simulation_mode = 'PDD'
+ self._linear_result = registry.result
+ self.registry = registry
+ # self.wp = WNTRPlus(restoration._registry)
+ self.restoration = restoration
+ self._min_correction_time = 900
+ self.simulation_time = 0
+ self.restoration_time = 0
+ self.iRestoration = i_restoration
self._prev_isolated_junctions = OrderedSet()
- self._prev_isolated_links = OrderedSet()
- self.first_leak_flag = True
+ self._prev_isolated_links = OrderedSet()
+ self.first_leak_flag = True
- def runLinearScenario(self, damage, settings, worker_rank=None):
- """
- Runs a simple linear analysis of water damage scenario
+ def runLinearScenario(self, damage, settings, worker_rank=None): # noqa: C901, N802
+ """Runs a simple linear analysis of water damage scenario
Parameters
-
+
Water Network object (WN) shall not be altered in any object except restoration
----------
damage : Damage Object
@@ -67,397 +80,593 @@ def runLinearScenario(self, damage, settings, worker_rank=None):
-------
Result.
- """
-
- while self.timeline.iContinue():
+ """ # noqa: D205, D400, D401
+ while self.timeline.iContinue(): # noqa: PLR1702
sys.stdout.flush()
current_stop_time = self.timeline.getCurrentStopTime()
- print('--------------------------------------')
- print('At stop Time: ' + repr(current_stop_time/3600))
-# =============================================================================
- #Restoration event Block
- if self.timeline.iCurenttimeRestorationEvent() and self.iRestoration==True:
+ print('--------------------------------------') # noqa: T201
+ print('At stop Time: ' + repr(current_stop_time / 3600)) # noqa: T201
+ # =============================================================================
+ # Restoration event Block
+ if (
+ self.timeline.iCurenttimeRestorationEvent()
+ and self.iRestoration == True # noqa: E712
+ ):
logger.debug('\t Restoration Event ')
- event_time_list = self.restoration.perform_action(self.wn, current_stop_time)
+ event_time_list = self.restoration.perform_action(
+ self.wn, current_stop_time
+ )
self.timeline.addEventTime(event_time_list, event_type='rst')
-# =============================================================================
-# Damage (earthquake) event block
+ # =============================================================================
+ # Damage (earthquake) event block
if self.timeline.iCurrentTimeDamageEvent():
self.ttemp = pd.DataFrame()
self.registry.if_first_event_occured = True
logger.debug('\t DAMAGE EVENT')
- #pipe_list = self.restoration.getPipeListForHydraulicSignificant()
- if len(self.restoration.getHydSigPipeList() ) > 0:
+ # pipe_list = self.restoration.getPipeListForHydraulicSignificant()
+ if len(self.restoration.getHydSigPipeList()) > 0:
last_demand_node_pressure = None
pipe_list = damage.getPipeDamageListAt(current_stop_time)
for pipe_name in pipe_list:
- if type(last_demand_node_pressure) == type(None):
- time_index = self.registry.result.node["pressure"].index
- time_index = list(set(time_index) - set(self.registry.result.maximum_trial_time))
+ if last_demand_node_pressure is None:
+ time_index = self.registry.result.node['pressure'].index
+ time_index = list(
+ set(time_index)
+ - set(self.registry.result.maximum_trial_time)
+ )
time_index.sort()
if len(time_index) > 0:
time_index = time_index[-1]
else:
- self.registry.hydraulic_significance.loc[pipe_name] = -1000
+ self.registry.hydraulic_significance.loc[
+ pipe_name
+ ] = -1000
continue
time_index = current_stop_time
demand_node_list = self.registry.demand_node_name_list
- demand_node_list = set(demand_node_list).intersection(self.registry.result.node["pressure"].columns)
- last_demand_node_pressure = self.registry.result.node["pressure"].loc[time_index, list(demand_node_list)]
- last_demand_node_pressure.loc[last_demand_node_pressure[last_demand_node_pressure < 0].index] = 0
+ demand_node_list = set(demand_node_list).intersection(
+ self.registry.result.node['pressure'].columns
+ )
+ last_demand_node_pressure = self.registry.result.node[
+ 'pressure'
+ ].loc[time_index, list(demand_node_list)]
+ last_demand_node_pressure.loc[
+ last_demand_node_pressure[
+ last_demand_node_pressure < 0
+ ].index
+ ] = 0
pipe = self.wn.get_link(pipe_name)
initial_pipe_status = pipe.initial_status
if initial_pipe_status == LinkStatus.Closed:
continue
-
+
pipe.initial_status = LinkStatus.Closed
- hyd_sim = Hydraulic_Simulation(self.wn, settings, current_stop_time, worker_rank, self._prev_isolated_junctions, self._prev_isolated_links)
+ hyd_sim = Hydraulic_Simulation(
+ self.wn,
+ settings,
+ current_stop_time,
+ worker_rank,
+ self._prev_isolated_junctions,
+ self._prev_isolated_links,
+ )
self.hyd_temp = hyd_sim
- duration = self.wn.options.time.duration
- report_time_step = self.wn.options.time.report_timestep
- try: # Run with modified EPANET V2.2
- print("Performing method 1")
- rr, i_run_successful = hyd_sim.performSimulation(current_stop_time, True)
+ duration = self.wn.options.time.duration
+ report_time_step = self.wn.options.time.report_timestep
+ try: # Run with modified EPANET V2.2
+ print('Performing method 1') # noqa: T201
+ rr, i_run_successful = hyd_sim.performSimulation(
+ current_stop_time,
+ True, # noqa: FBT003
+ )
if current_stop_time in rr.maximum_trial_time:
pass
- #self.registry.hydraulic_significance.loc[pipe_name] = -20000
- #pipe.initial_status = initial_pipe_status
- #self._prev_isolated_junctions = hyd_sim._prev_isolated_junctions
- #self._prev_isolated_links = hyd_sim._prev_isolated_links
- #continue
- demand_node_list = self.registry.demand_node_name_list
- demand_node_list = set(demand_node_list).intersection(rr.node["pressure"].columns)
- new_node_pressure = rr.node["pressure"].loc[current_stop_time, list(demand_node_list)]
- new_node_pressure.loc[new_node_pressure[new_node_pressure < 0].index] = 0
-
- hydraulic_impact = (last_demand_node_pressure - new_node_pressure).mean()
- self.registry.hydraulic_significance.loc[pipe_name] = hydraulic_impact
-
- except Exception as epa_err_1:
+ # self.registry.hydraulic_significance.loc[pipe_name] = -20000
+ # pipe.initial_status = initial_pipe_status
+ # self._prev_isolated_junctions = hyd_sim._prev_isolated_junctions
+ # self._prev_isolated_links = hyd_sim._prev_isolated_links
+ # continue
+ demand_node_list = self.registry.demand_node_name_list
+ demand_node_list = set(demand_node_list).intersection(
+ rr.node['pressure'].columns
+ )
+ new_node_pressure = rr.node['pressure'].loc[
+ current_stop_time, list(demand_node_list)
+ ]
+ new_node_pressure.loc[
+ new_node_pressure[new_node_pressure < 0].index
+ ] = 0
+
+ hydraulic_impact = (
+ last_demand_node_pressure - new_node_pressure
+ ).mean()
+ self.registry.hydraulic_significance.loc[pipe_name] = (
+ hydraulic_impact
+ )
+
+ except Exception as epa_err_1: # noqa: TRY302
raise
if epa_err_1.args[0] == 'EPANET Error 110':
- print("Method 1 failed. Performing method 2")
- self.wn.options.time.duration = duration
- self.wn.options.time.report_timestep = report_time_step
- self.registry.hydraulic_significance.loc[pipe_name] = -1
+ print('Method 1 failed. Performing method 2') # noqa: T201
+ self.wn.options.time.duration = duration
+ self.wn.options.time.report_timestep = (
+ report_time_step
+ )
+ self.registry.hydraulic_significance.loc[
+ pipe_name
+ ] = -1
pipe.initial_status = initial_pipe_status
- self._prev_isolated_junctions = hyd_sim._prev_isolated_junctions
- self._prev_isolated_links = hyd_sim._prev_isolated_links
- self.wn.options.time.duration = duration
+ self._prev_isolated_junctions = (
+ hyd_sim._prev_isolated_junctions # noqa: SLF001
+ )
+ self._prev_isolated_links = hyd_sim._prev_isolated_links # noqa: SLF001
+ self.wn.options.time.duration = duration
self.wn.options.time.report_timestep = report_time_step
damage.applyPipeDamages(self.wn, current_stop_time)
damage.applyNodalDamage(self.wn, current_stop_time)
damage.applyPumpDamages(self.wn, current_stop_time)
damage.applyTankDamages(self.wn, current_stop_time)
-
- if self.iRestoration == True:
- event_time_list = self.restoration.initialize(self.wn, current_stop_time) # starts restoration
+
+ if self.iRestoration == True: # noqa: E712
+ event_time_list = self.restoration.initialize(
+ self.wn, current_stop_time
+ ) # starts restoration
self.timeline.addEventTime(event_time_list, event_type='rst')
-
-# =============================================================================
-# This is for updatng the pipe damage log
- if settings["record_damage_table_logs"] == True:
- self.restoration._registry.updatePipeDamageTableTimeSeries(current_stop_time)
- self.restoration._registry.updateNodeDamageTableTimeSeries(current_stop_time)
-# =============================================================================
-# runing the model
+
+ # =============================================================================
+ # This is for updatng the pipe damage log
+ if settings['record_damage_table_logs'] == True: # noqa: E712
+ self.restoration._registry.updatePipeDamageTableTimeSeries( # noqa: SLF001
+ current_stop_time
+ )
+ self.restoration._registry.updateNodeDamageTableTimeSeries( # noqa: SLF001
+ current_stop_time
+ )
+ # =============================================================================
+ # running the model
next_event_time = self.timeline.getNextTime()
- logger.debug('next event time is: '+ repr(next_event_time))
+ logger.debug('next event time is: ' + repr(next_event_time)) # noqa: G003
self.wn.implicitLeakToExplicitReservoir(self.registry)
-
- print('***** Running hydraulic *****')
-
- if type(worker_rank) != str:
+
+ print('***** Running hydraulic *****') # noqa: T201
+
+ if type(worker_rank) != str: # noqa: E721
worker_rank = str(worker_rank)
-
- hyd_sim = Hydraulic_Simulation(self.wn, settings, current_stop_time, worker_rank, self._prev_isolated_junctions, self._prev_isolated_links)
+
+ hyd_sim = Hydraulic_Simulation(
+ self.wn,
+ settings,
+ current_stop_time,
+ worker_rank,
+ self._prev_isolated_junctions,
+ self._prev_isolated_links,
+ )
self.hyd_temp = hyd_sim
- duration = self.wn.options.time.duration
- report_time_step = self.wn.options.time.report_timestep
- try: # Run with modified EPANET V2.2
- print("Performing method 1")
- rr, i_run_successful = hyd_sim.performSimulation(next_event_time, True)
+ duration = self.wn.options.time.duration
+ report_time_step = self.wn.options.time.report_timestep
+ try: # Run with modified EPANET V2.2
+ print('Performing method 1') # noqa: T201
+ rr, i_run_successful = hyd_sim.performSimulation(
+ next_event_time,
+ True, # noqa: FBT003
+ )
except Exception as epa_err_1:
if epa_err_1.args[0] == 'EPANET Error 110':
- print("Method 1 failed. Performing method 2")
- try: # Remove Non-Demand Node by Python-Side iterative algorythm with closing
- #self.wn.options.time.duration = duration
- #self.wn.options.time.report_timestep = report_time_step
- #hyd_sim.removeNonDemandNegativeNodeByPythonClose(1000)
- #rr, i_run_successful = hyd_sim.performSimulation(next_event_time, False)
- #hyd_sim.rollBackPipeClose()
+ print('Method 1 failed. Performing method 2') # noqa: T201
+ try: # Remove Non-Demand Node by Python-Side iterative algorithm with closing
+ # self.wn.options.time.duration = duration
+ # self.wn.options.time.report_timestep = report_time_step
+ # hyd_sim.removeNonDemandNegativeNodeByPythonClose(1000)
+ # rr, i_run_successful = hyd_sim.performSimulation(next_event_time, False)
+ # hyd_sim.rollBackPipeClose()
raise
except Exception as epa_err_2:
- if True: #epa_err_2.args[0] == 'EPANET Error 110':
- try: # Extend result from teh reult at the begining of teh time step with modified EPANET V2.2
- #print("Method 2 failed. Performing method 3")
- self.wn.options.time.duration = duration
- self.wn.options.time.report_timestep = report_time_step
- #hyd_sim.rollBackPipeClose()
- rr, i_run_successful = hyd_sim.estimateRun(next_event_time, True)
+ if True: # epa_err_2.args[0] == 'EPANET Error 110':
+ try: # Extend result from the result at the begning of the time step with modified EPANET V2.2
+ # print("Method 2 failed. Performing method 3")
+ self.wn.options.time.duration = duration
+ self.wn.options.time.report_timestep = (
+ report_time_step
+ )
+ # hyd_sim.rollBackPipeClose()
+ rr, i_run_successful = hyd_sim.estimateRun(
+ next_event_time,
+ True, # noqa: FBT003
+ )
except Exception as epa_err_3:
if epa_err_3.args[0] == 'EPANET Error 110':
- print("Method 3 failed. Performing method 4")
- try: # Extend result from teh reult at the begining of teh time step with modified EPANET V2.2
- self.wn.options.time.duration = duration
- self.wn.options.time.report_timestep = report_time_step
- rr, i_run_successful = hyd_sim.performSimulation(next_event_time, False)
+ print('Method 3 failed. Performing method 4') # noqa: T201
+ try: # Extend result from the result at the beginning of the time step with modified EPANET V2.2
+ self.wn.options.time.duration = duration
+ self.wn.options.time.report_timestep = (
+ report_time_step
+ )
+ rr, i_run_successful = (
+ hyd_sim.performSimulation(
+ next_event_time,
+ False, # noqa: FBT003
+ )
+ )
except Exception as epa_err_4:
if epa_err_4.args[0] == 'EPANET Error 110':
try:
- self.wn.options.time.duration = duration
+ self.wn.options.time.duration = (
+ duration
+ )
self.wn.options.time.report_timestep = report_time_step
- print("Method 4 failed. Performing method 5")
- # Extend result from teh reult at the begining of teh time step with modified EPANET V2.2
- rr, i_run_successful = hyd_sim.estimateRun(next_event_time, False)
+ print( # noqa: T201
+ 'Method 4 failed. Performing method 5'
+ )
+ # Extend result from the result at the beginning of the time step with modified EPANET V2.2
+ rr, i_run_successful = (
+ hyd_sim.estimateRun(
+ next_event_time,
+ False, # noqa: FBT003
+ )
+ )
except Exception as epa_err_5:
- if epa_err_5.args[0] == 'EPANET Error 110':
+ if (
+ epa_err_5.args[0]
+ == 'EPANET Error 110'
+ ):
try:
- print("Method 5 failed. Performing method 6")
- self.wn.options.time.duration = duration
+ print( # noqa: T201
+ 'Method 5 failed. Performing method 6'
+ )
+ self.wn.options.time.duration = duration
self.wn.options.time.report_timestep = report_time_step
- rr, i_run_successful = hyd_sim.estimateWithoutRun(self._linear_result, next_event_time)
- except Exception as epa_err_6:
- print("ERROR in rank="+repr(worker_rank)+" and time="+repr(current_stop_time))
- raise epa_err_6
+ rr, i_run_successful = (
+ hyd_sim.estimateWithoutRun(
+ self._linear_result,
+ next_event_time,
+ )
+ )
+ except Exception as epa_err_6:
+ print( # noqa: T201
+ 'ERROR in rank='
+ + repr(worker_rank)
+ + ' and time='
+ + repr(current_stop_time)
+ )
+ raise epa_err_6 # noqa: TRY201
else:
- raise epa_err_5
+ raise epa_err_5 # noqa: TRY201
else:
- raise epa_err_4
+ raise epa_err_4 # noqa: TRY201
else:
- raise epa_err_3
+ raise epa_err_3 # noqa: TRY201
else:
- raise epa_err_2
+ raise epa_err_2 # noqa: TRY201
else:
- raise epa_err_1
- self._prev_isolated_junctions = hyd_sim._prev_isolated_junctions
- self._prev_isolated_links = hyd_sim._prev_isolated_links
- print('***** Finish Running at time '+ repr(current_stop_time)+' '+repr(i_run_successful)+' *****')
-
- if i_run_successful==False:
+ raise epa_err_1 # noqa: TRY201
+ self._prev_isolated_junctions = hyd_sim._prev_isolated_junctions # noqa: SLF001
+ self._prev_isolated_links = hyd_sim._prev_isolated_links # noqa: SLF001
+ print( # noqa: T201
+ '***** Finish Running at time '
+ + repr(current_stop_time)
+ + ' '
+ + repr(i_run_successful)
+ + ' *****'
+ )
+
+ if i_run_successful == False: # noqa: E712
continue
- self.wn.updateWaterNetworkModelWithResult(rr, self.restoration._registry)
-
- self.KeepLinearResult(rr, self._prev_isolated_junctions, node_attributes=['pressure','head','demand', 'leak'], link_attributes=['status', 'setting', 'flowrate'])
- if self.registry.settings["limit_result_file_size"] > 0:
+ self.wn.updateWaterNetworkModelWithResult(rr, self.restoration._registry) # noqa: SLF001
+
+ self.KeepLinearResult(
+ rr,
+ self._prev_isolated_junctions,
+ node_attributes=['pressure', 'head', 'demand', 'leak'],
+ link_attributes=['status', 'setting', 'flowrate'],
+ )
+ if self.registry.settings['limit_result_file_size'] > 0:
self.dumpPartOfResult()
- #self.wp.unlinkBreackage(self.registry)
+ # self.wp.unlinkBreackage(self.registry)
self.wn.resetExplicitLeak()
-
-
-# =============================================================================
- #self.resoration._registry.updateTankTimeSeries(self.wn, current_stop_time)
- self.restoration._registry.updateRestorationIncomeWaterTimeSeries(self.wn, current_stop_time)
-
+ # =============================================================================
+ # self.resoration._registry.updateTankTimeSeries(self.wn, current_stop_time)
+ self.restoration._registry.updateRestorationIncomeWaterTimeSeries( # noqa: SLF001
+ self.wn, current_stop_time
+ )
+
return self._linear_result
-
- def KeepLinearResult(self, result, isolated_nodes, node_attributes=None, link_attributes=None, iCheck=False):#, iNeedTimeCorrection=False, start_time=None):
-
- if self.registry.if_first_event_occured == False:
- self.registry.pre_event_demand_met = self.registry.pre_event_demand_met.append(result.node['demand'])
-
- #if node_attributes == None:
- #node_attributes = ['pressure','head','demand','quality']
- #if link_attributes == None:
- #link_attributes = ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']
-
+
+ def KeepLinearResult( # noqa: C901, N802, D102
+ self,
+ result,
+ isolated_nodes,
+ node_attributes=None,
+ link_attributes=None,
+ iCheck=False, # noqa: FBT002, ARG002, N803
+ ): # , iNeedTimeCorrection=False, start_time=None):
+ if self.registry.if_first_event_occured == False: # noqa: E712
+ self.registry.pre_event_demand_met = (
+ self.registry.pre_event_demand_met.append(result.node['demand'])
+ )
+
+ # if node_attributes == None:
+ # node_attributes = ['pressure','head','demand','quality']
+ # if link_attributes == None:
+ # link_attributes = ['linkquality', 'flowrate', 'headloss', 'velocity', 'status', 'setting', 'frictionfact', 'rxnrate']
+
just_initialized_flag = False
- if self._linear_result == None:
+ if self._linear_result == None: # noqa: E711
just_initialized_flag = True
- self._linear_result = result
-
- self.restoration._registry.result = self._linear_result
- node_result_type_elimination_list = set( result.node.keys() ) - set(node_attributes)
- link_result_type_elimination_list = set( result.link.keys() ) - set(link_attributes)
-
+ self._linear_result = result
+
+ self.restoration._registry.result = self._linear_result # noqa: SLF001
+ node_result_type_elimination_list = set(result.node.keys()) - set(
+ node_attributes
+ )
+ link_result_type_elimination_list = set(result.link.keys()) - set(
+ link_attributes
+ )
+
for node_result_type in node_result_type_elimination_list:
self._linear_result.node.pop(node_result_type)
-
+
for link_result_type in link_result_type_elimination_list:
self._linear_result.link.pop(link_result_type)
-
+
self._linear_result.node['leak'] = pd.DataFrame(dtype=float)
-
- active_pipe_damages = self.restoration._registry.active_pipe_damages
-
+
+ active_pipe_damages = self.restoration._registry.active_pipe_damages # noqa: SLF001
+
temp_active = active_pipe_damages.copy()
for virtual_demand_node in active_pipe_damages:
- if virtual_demand_node in isolated_nodes or active_pipe_damages[virtual_demand_node] in isolated_nodes:
+ if (
+ virtual_demand_node in isolated_nodes
+ or active_pipe_damages[virtual_demand_node] in isolated_nodes
+ ):
temp_active.pop(virtual_demand_node)
-
- virtual_demand_nodes = list(temp_active.keys() )
- real_demand_nodes = list(temp_active.values() )
-
+
+ virtual_demand_nodes = list(temp_active.keys())
+ real_demand_nodes = list(temp_active.values())
+
if len(temp_active) > 0:
- #this must be here in the case that a node that is not isolated at
+ # this must be here in the case that a node that is not isolated at
# this step does not have a result. This can happen if the result is
- #simulated without run.. For example, in the latest vallid result
- #some nodes were isolated, but not in the current run.
- available_nodes_in_current_result = result.node['demand'].columns.to_list()
- not_available_virtual_node_names = set(virtual_demand_nodes) - set(available_nodes_in_current_result)
+ # simulated without run.. For example, in the latest valid result
+ # some nodes were isolated, but not in the current run.
+ available_nodes_in_current_result = result.node[
+ 'demand'
+ ].columns.to_list()
+ not_available_virtual_node_names = set(virtual_demand_nodes) - set(
+ available_nodes_in_current_result
+ )
if len(not_available_virtual_node_names):
- not_available_real_node_names = [temp_active[virtual_node_name] for virtual_node_name in not_available_virtual_node_names]
- virtual_demand_nodes = set(virtual_demand_nodes) - not_available_virtual_node_names
- real_demand_nodes = set(real_demand_nodes) - set(not_available_real_node_names)
+ not_available_real_node_names = [
+ temp_active[virtual_node_name]
+ for virtual_node_name in not_available_virtual_node_names
+ ]
+ virtual_demand_nodes = (
+ set(virtual_demand_nodes) - not_available_virtual_node_names
+ )
+ real_demand_nodes = set(real_demand_nodes) - set(
+ not_available_real_node_names
+ )
virtual_demand_nodes = list(virtual_demand_nodes)
- real_demand_nodes = list(real_demand_nodes)
-
- result.node['demand'][real_demand_nodes] = result.node['demand'][virtual_demand_nodes]
- result.node['demand'].drop(virtual_demand_nodes, axis =1, inplace=True)
-
- active_nodal_damages = self.restoration._registry.active_nodal_damages
+ real_demand_nodes = list(real_demand_nodes)
+
+ result.node['demand'][real_demand_nodes] = result.node['demand'][
+ virtual_demand_nodes
+ ]
+ result.node['demand'].drop(virtual_demand_nodes, axis=1, inplace=True) # noqa: PD002
+
+ active_nodal_damages = self.restoration._registry.active_nodal_damages # noqa: SLF001
temp_active = active_nodal_damages.copy()
for virtual_demand_node in active_nodal_damages:
- if virtual_demand_node in isolated_nodes or temp_active[virtual_demand_node] in isolated_nodes:
+ if (
+ virtual_demand_node in isolated_nodes
+ or temp_active[virtual_demand_node] in isolated_nodes
+ ):
temp_active.pop(virtual_demand_node)
-
- virtual_demand_nodes = list(temp_active.keys() )
- real_demand_nodes = list(temp_active.values() )
-
+
+ virtual_demand_nodes = list(temp_active.keys())
+ real_demand_nodes = list(temp_active.values())
+
if len(temp_active) > 0:
- #this must be here in the case that a node that is not isolated at
+ # this must be here in the case that a node that is not isolated at
# this step has not result. This can happen if the result is being
- #simulated without run.. For example, in the latest vallid result
- #some nodes were isolated, but not in the current run.
- available_nodes_in_current_result = result.node['demand'].columns.to_list()
- not_available_virtual_node_names = set(virtual_demand_nodes) - set(available_nodes_in_current_result)
+ # simulated without run.. For example, in the latest valid result
+ # some nodes were isolated, but not in the current run.
+ available_nodes_in_current_result = result.node[
+ 'demand'
+ ].columns.to_list()
+ not_available_virtual_node_names = set(virtual_demand_nodes) - set(
+ available_nodes_in_current_result
+ )
if len(not_available_virtual_node_names):
- not_available_real_node_names = [temp_active[virtual_node_name] for virtual_node_name in not_available_virtual_node_names]
- virtual_demand_nodes = set(virtual_demand_nodes) - not_available_virtual_node_names
- real_demand_nodes = set(real_demand_nodes) - set(not_available_real_node_names)
+ not_available_real_node_names = [
+ temp_active[virtual_node_name]
+ for virtual_node_name in not_available_virtual_node_names
+ ]
+ virtual_demand_nodes = (
+ set(virtual_demand_nodes) - not_available_virtual_node_names
+ )
+ real_demand_nodes = set(real_demand_nodes) - set(
+ not_available_real_node_names
+ )
virtual_demand_nodes = list(virtual_demand_nodes)
- real_demand_nodes = list(real_demand_nodes)
-
- non_isolated_pairs = dict(zip(virtual_demand_nodes, real_demand_nodes))
- result.node['leak'] = result.node['demand'][virtual_demand_nodes].rename(non_isolated_pairs, axis=1)
-
-
- if just_initialized_flag == False:
+ real_demand_nodes = list(real_demand_nodes)
+
+ non_isolated_pairs = dict(zip(virtual_demand_nodes, real_demand_nodes))
+ result.node['leak'] = result.node['demand'][virtual_demand_nodes].rename(
+ non_isolated_pairs, axis=1
+ )
+
+ if just_initialized_flag == False: # noqa: E712
self._linear_result.maximum_trial_time.extend(result.maximum_trial_time)
-
- saved_max_time = self._linear_result.node[list(self._linear_result.node.keys())[0]].index.max()
- to_be_saved_min_time = result.node[list(result.node.keys())[0]].index.min()
- if abs(to_be_saved_min_time - saved_max_time) != 0: #>= min(self.wn.options.time.hydraulic_timestep, self.wn.options.time.report_timestep):
- #logger.error(repr(to_be_saved_min_time)+ ' ' + repr(saved_max_time))
- raise ValueError("saved result and to be saved result are not the same. "+repr(saved_max_time) + " "+repr(to_be_saved_min_time))
+
+ saved_max_time = self._linear_result.node[
+ list(self._linear_result.node.keys())[0] # noqa: RUF015
+ ].index.max()
+ to_be_saved_min_time = result.node[
+ list(result.node.keys())[0] # noqa: RUF015
+ ].index.min()
+ if (
+ abs(to_be_saved_min_time - saved_max_time) != 0
+ ): # >= min(self.wn.options.time.hydraulic_timestep, self.wn.options.time.report_timestep):
+ # logger.error(repr(to_be_saved_min_time)+ ' ' + repr(saved_max_time))
+ raise ValueError(
+ 'saved result and to be saved result are not the same. '
+ + repr(saved_max_time)
+ + ' '
+ + repr(to_be_saved_min_time)
+ )
for att in node_attributes:
if len(active_nodal_damages) == 0 and att == 'leak':
continue
_leak_flag = False
leak_first_time_result = None
- if att == 'leak' and 'leak' in result.node: #the second condition is not needed. It's there only for assurance
-
+ if (
+ att == 'leak' and 'leak' in result.node
+ ): # the second condition is not needed. It's there only for assurance
former_nodes_list = set(self._linear_result.node['leak'].columns)
to_add_nodes_list = set(result.node[att].columns)
- complete_result_node_list = (to_add_nodes_list - former_nodes_list)
+ complete_result_node_list = to_add_nodes_list - former_nodes_list
if len(complete_result_node_list) > 0:
_leak_flag = True
-
- leak_first_time_result = result.node['leak'][complete_result_node_list].iloc[0]
-
+
+ leak_first_time_result = result.node['leak'][
+ complete_result_node_list
+ ].iloc[0]
+
if att in result.node:
- result.node[att].drop(result.node[att].index[0], inplace=True)
- self._linear_result.node[att] = self._linear_result.node[att].append(result.node[att])
-
+ result.node[att].drop(result.node[att].index[0], inplace=True) # noqa: PD002
+ self._linear_result.node[att] = self._linear_result.node[
+ att
+ ].append(result.node[att])
+
if _leak_flag:
- self._linear_result.node['leak'].loc[leak_first_time_result.name, leak_first_time_result.index] = leak_first_time_result
- self._linear_result.node['leak'] = self._linear_result.node['leak'].sort_index()
-
+ self._linear_result.node['leak'].loc[
+ leak_first_time_result.name, leak_first_time_result.index
+ ] = leak_first_time_result
+ self._linear_result.node['leak'] = self._linear_result.node[
+ 'leak'
+ ].sort_index()
+
for att in link_attributes:
- result.link[att].drop(result.link[att].index[0], inplace=True)
- self._linear_result.link[att] = self._linear_result.link[att].append(result.link[att])
-
- def dumpPartOfResult(self):
- limit_size = self.registry.settings["limit_result_file_size"]
+ result.link[att].drop(result.link[att].index[0], inplace=True) # noqa: PD002
+ self._linear_result.link[att] = self._linear_result.link[att].append(
+ result.link[att]
+ )
+
+ def dumpPartOfResult(self): # noqa: C901, N802, D102
+ limit_size = self.registry.settings['limit_result_file_size']
limit_size_byte = limit_size * 1024 * 1024
-
+
total_size = 0
-
+
for att in self._linear_result.node:
- att_size = sys.getsizeof(self._linear_result.node[att] )
+ att_size = sys.getsizeof(self._linear_result.node[att])
total_size += att_size
-
+
for att in self._linear_result.link:
- att_size = sys.getsizeof(self._linear_result.link[att] )
+ att_size = sys.getsizeof(self._linear_result.link[att])
total_size += att_size
-
- print("total size= "+repr(total_size/1024/1024))
-
+
+ print('total size= ' + repr(total_size / 1024 / 1024)) # noqa: T201
+
if total_size >= limit_size_byte:
dump_result = SimulationResults()
dump_result.node = {}
dump_result.link = {}
for att in self._linear_result.node:
- #just to make sure. it obly add tens of micro seconds for each
- #att
-
- self._linear_result.node[att].sort_index(inplace=True)
- att_result = self._linear_result.node[att]
+ # just to make sure. it obly add tens of micro seconds for each
+ # att
+
+ self._linear_result.node[att].sort_index(inplace=True) # noqa: PD002
+ att_result = self._linear_result.node[att]
if att_result.empty:
continue
- #first_time_index = att_result.index[0]
- last_valid_time = []
- att_time_index = att_result.index.to_list()
- last_valid_time = [cur_time for cur_time in att_time_index if cur_time not in self._linear_result.maximum_trial_time]
+ # first_time_index = att_result.index[0]
+ last_valid_time = []
+ att_time_index = att_result.index.to_list()
+ last_valid_time = [
+ cur_time
+ for cur_time in att_time_index
+ if cur_time not in self._linear_result.maximum_trial_time
+ ]
last_valid_time.sort()
-
+
if len(last_valid_time) > 0:
last_valid_time = last_valid_time[-2]
else:
- print(att_time_index)
+ print(att_time_index) # noqa: T201
last_valid_time = att_time_index[-2]
-
+
dump_result.node[att] = att_result.loc[:last_valid_time]
- last_valid_time_index = att_result.index.searchsorted(last_valid_time)
- self._linear_result.node[att].drop(att_result.index[:last_valid_time_index+1], inplace=True)
-
+ last_valid_time_index = att_result.index.searchsorted(
+ last_valid_time
+ )
+ self._linear_result.node[att].drop(
+ att_result.index[: last_valid_time_index + 1],
+ inplace=True, # noqa: PD002
+ )
+
for att in self._linear_result.link:
- #just to make sure. it obly add tens of micro seconds for each
- #att
- self._linear_result.link[att].sort_index(inplace=True)
- att_result = self._linear_result.link[att]
+ # just to make sure. it obly add tens of micro seconds for each
+ # att
+ self._linear_result.link[att].sort_index(inplace=True) # noqa: PD002
+ att_result = self._linear_result.link[att]
if att_result.empty:
continue
- #first_time_index = att_result.index[0]
- last_valid_time = []
- att_time_index = att_result.index.to_list()
- last_valid_time = [cur_time for cur_time in att_time_index if cur_time not in self._linear_result.maximum_trial_time]
+ # first_time_index = att_result.index[0]
+ last_valid_time = []
+ att_time_index = att_result.index.to_list()
+ last_valid_time = [
+ cur_time
+ for cur_time in att_time_index
+ if cur_time not in self._linear_result.maximum_trial_time
+ ]
last_valid_time.sort()
-
+
if len(last_valid_time) > 0:
last_valid_time = last_valid_time[-2]
else:
last_valid_time = att_time_index[-2]
-
+
dump_result.link[att] = att_result.loc[:last_valid_time]
- last_valid_time_index = att_result.index.searchsorted(last_valid_time)
- self._linear_result.link[att].drop(att_result.index[:last_valid_time_index+1], inplace=True)
-
+ last_valid_time_index = att_result.index.searchsorted(
+ last_valid_time
+ )
+ self._linear_result.link[att].drop(
+ att_result.index[: last_valid_time_index + 1],
+ inplace=True, # noqa: PD002
+ )
+
dump_file_index = len(self.registry.result_dump_file_list) + 1
-
+
if dump_file_index >= 1:
- list_file_opening_mode = "at"
+ list_file_opening_mode = 'at'
else:
- list_file_opening_mode = "wt"
-
- result_dump_file_name = self.registry.scenario_name + ".part"+str(dump_file_index)
- result_dump_file_dst = os.path.join(self.registry.settings.process['result_directory'], result_dump_file_name)
-
- with open(result_dump_file_dst, "wb") as resul_file:
+ list_file_opening_mode = 'wt'
+
+ result_dump_file_name = (
+ self.registry.scenario_name + '.part' + str(dump_file_index)
+ )
+ result_dump_file_dst = os.path.join( # noqa: PTH118
+ self.registry.settings.process['result_directory'],
+ result_dump_file_name,
+ )
+
+ with open(result_dump_file_dst, 'wb') as resul_file: # noqa: PTH123
pickle.dump(dump_result, resul_file)
-
- dump_list_file_name = self.registry.scenario_name + ".dumplist"
- list_file_dst = os.path.join(self.registry.settings.process['result_directory'], dump_list_file_name)
-
- with open(list_file_dst, list_file_opening_mode) as part_list_file:
+
+ dump_list_file_name = self.registry.scenario_name + '.dumplist'
+ list_file_dst = os.path.join( # noqa: PTH118
+ self.registry.settings.process['result_directory'],
+ dump_list_file_name,
+ )
+
+ with open(list_file_dst, list_file_opening_mode) as part_list_file: # noqa: PTH123
part_list_file.writelines([result_dump_file_name])
-
-
+
self.registry.result_dump_file_list.append(result_dump_file_name)
diff --git a/modules/systemPerformance/REWET/REWET/__init__.py b/modules/systemPerformance/REWET/REWET/__init__.py
index a704185b0..2f9efa217 100644
--- a/modules/systemPerformance/REWET/REWET/__init__.py
+++ b/modules/systemPerformance/REWET/REWET/__init__.py
@@ -1,5 +1,3 @@
-from REWET import Input
+from REWET import Input # noqa: CPY001, D104, N999
__version__ = '0.1.1'
-
-
diff --git a/modules/systemPerformance/REWET/REWET/initial.py b/modules/systemPerformance/REWET/REWET/initial.py
index 42b19a5e9..df8f2a479 100644
--- a/modules/systemPerformance/REWET/REWET/initial.py
+++ b/modules/systemPerformance/REWET/REWET/initial.py
@@ -1,40 +1,39 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Jun 1 21:04:18 2021
+"""Created on Tue Jun 1 21:04:18 2021
@author: snaeimi
-"""
+""" # noqa: CPY001, D400
-import StochasticModel
-import Damage
+import logging
import os
-import signal
-import pickle
+import pickle # noqa: S403
import time
-import pandas as pd
-import logging
+import Damage
import Input.Input_IO as io
-from Input.Settings import Settings
+import pandas as pd
+import StochasticModel
from EnhancedWNTR.network.model import WaterNetworkModel
-#from wntrfr.network.model import WaterNetworkModel #INote: chanaged from enhanced wntr to wntr 1. It may break EPANET compatibility
-from restoration.registry import Registry
-from restoration.model import Restoration
-from Project import Project
+from Input.Settings import Settings
+from Project import Project
+from restoration.model import Restoration
+
+# from wntrfr.network.model import WaterNetworkModel #INote: changed from enhanced wntr to wntr 1. It may break EPANET compatibility
+from restoration.registry import Registry
logging.basicConfig(level=50)
-class Starter():
-
- def createProjectFile(self, project_settings, damage_list, project_file_name):
+
+class Starter: # noqa: D101
+ def createProjectFile(self, project_settings, damage_list, project_file_name): # noqa: D102, N802, PLR6301
project = Project(project_settings, damage_list)
- project_file_addr = os.path.join(project_settings.process['result_directory'], project_file_name)
- with open(project_file_addr, 'wb') as f:
+ project_file_addr = os.path.join( # noqa: PTH118
+ project_settings.process['result_directory'], project_file_name
+ )
+ with open(project_file_addr, 'wb') as f: # noqa: PTH123
pickle.dump(project, f)
-
- def run(self, project_file=None):
- """
- Runs the ptogram. It initiates the Settings class and based on the
+
+ def run(self, project_file=None): # noqa: C901
+ """Runs the ptogram. It initiates the Settings class and based on the
settings, run the program in either single scenario, multiple serial or
multiple parallel mode.
@@ -47,74 +46,105 @@ def run(self, project_file=None):
-------
None.
- """
+ """ # noqa: D205, D401
settings = Settings()
- if type(project_file) != type(None):
+ if project_file is not None:
project_file = str(project_file)
- if type(project_file) == str:
- if project_file.split(".")[-1].lower() == "prj":
+ if type(project_file) == str: # noqa: E721
+ if project_file.split('.')[-1].lower() == 'prj':
settings.importProject(project_file)
- elif project_file.split(".")[-1].lower() == "json":
+ elif project_file.split('.')[-1].lower() == 'json':
settings.importJsonSettings(project_file)
project_file = None
else:
- raise ValueError("The input file has an unrgnizable extension: {}".format(project_file.split(".")[-1].lower()) )
-# =============================================================================
-# else:
-# raise ValueError("project type unrecognized")
-# =============================================================================
-
-
- damage_list = io.read_damage_list(settings.process['pipe_damage_file_list'], settings.process['pipe_damage_file_directory'])
+ raise ValueError(
+ 'The input file has an unrgnizable extension: {}'.format( # noqa: EM103
+ project_file.split('.')[-1].lower()
+ )
+ )
+ # =============================================================================
+ # else:
+ # raise ValueError("project type unrecognized")
+ # =============================================================================
+
+ damage_list = io.read_damage_list(
+ settings.process['pipe_damage_file_list'],
+ settings.process['pipe_damage_file_directory'],
+ )
settings.process.settings['list'] = damage_list
- if type(project_file) == type(None):
- self.createProjectFile(settings, damage_list, "project.prj")
- #raise
- if settings.process['number_of_proccessor']==1: #Single mode
- #get damage list as Pandas Dataframe
+ if project_file is None:
+ self.createProjectFile(settings, damage_list, 'project.prj')
+ # raise
+ if settings.process['number_of_proccessor'] == 1: # Single mode
+ # get damage list as Pandas Dataframe
if settings.process['number_of_damages'] == 'multiple':
damage_list_size = len(damage_list)
- for i in range(0, damage_list_size):
- print(i, flush=True)
- settings.initializeScenarioSettings(i) #initialize scenario-specific settings for each list/usefule for sensitivity analysis
- scenario_name = damage_list.loc[i, 'Scenario Name']
+ for i in range(damage_list_size):
+ print(i, flush=True) # noqa: T201
+ settings.initializeScenarioSettings(
+ i
+ ) # initialize scenario-specific settings for each list/useful for sensitivity analysis
+ scenario_name = damage_list.loc[i, 'Scenario Name']
pipe_damage_name = damage_list.loc[i, 'Pipe Damage']
tank_damage_name = damage_list.loc[i, 'Tank Damage']
- self.run_local_single(pipe_damage_name, scenario_name, settings, nodal_damage_file_name = damage_list.loc[i,'Nodal Damage'], pump_damage_file_name = damage_list.loc[i,'Pump Damage'], tank_damage_file_name = tank_damage_name)
-
+ self.run_local_single(
+ pipe_damage_name,
+ scenario_name,
+ settings,
+ nodal_damage_file_name=damage_list.loc[i, 'Nodal Damage'],
+ pump_damage_file_name=damage_list.loc[i, 'Pump Damage'],
+ tank_damage_file_name=tank_damage_name,
+ )
+
elif settings.process['number_of_damages'] == 'single':
t1 = time.time()
- settings.initializeScenarioSettings(0) #initialize scenario-specific settings for the first line of damage list
+ settings.initializeScenarioSettings(
+ 0
+ ) # initialize scenario-specific settings for the first line of damage list
scenario_name = damage_list.loc[0, 'Scenario Name']
pipe_damage_name = damage_list.loc[0, 'Pipe Damage']
tank_damage_name = damage_list.loc[0, 'Tank Damage']
- self.run_local_single(pipe_damage_name, scenario_name, settings, nodal_damage_file_name = damage_list.loc[0,'Nodal Damage'], pump_damage_file_name = damage_list.loc[0,'Pump Damage'], tank_damage_file_name = tank_damage_name)
+ self.run_local_single(
+ pipe_damage_name,
+ scenario_name,
+ settings,
+ nodal_damage_file_name=damage_list.loc[0, 'Nodal Damage'],
+ pump_damage_file_name=damage_list.loc[0, 'Pump Damage'],
+ tank_damage_file_name=tank_damage_name,
+ )
t2 = time.time()
- print('Time of Single run is: ' + repr((t2-t1)/3600) + '(hr)')
+ print('Time of Single run is: ' + repr((t2 - t1) / 3600) + '(hr)') # noqa: T201
else:
- raise ValueError("Unknown value for settings['number_of_damages']")
-
- elif settings.process['number_of_proccessor']>1:
+ raise ValueError("Unknown value for settings['number_of_damages']") # noqa: EM101, TRY003
+
+ elif settings.process['number_of_proccessor'] > 1:
self.run_mpi(settings)
else:
- raise ValueError('Number of proccessor must be equal to or more than 1')
-
-
- def run_local_single(self, file_name, scenario_name, settings, worker_rank=None, nodal_damage_file_name=None, pump_damage_file_name=None, tank_damage_file_name=None):
- """
- Runs a single scenario on the local machine.
+ raise ValueError('Number of processor must be equal to or more than 1') # noqa: EM101, TRY003
+
+ def run_local_single( # noqa: C901
+ self,
+ file_name,
+ scenario_name,
+ settings,
+ worker_rank=None,
+ nodal_damage_file_name=None,
+ pump_damage_file_name=None,
+ tank_damage_file_name=None,
+ ):
+ """Runs a single scenario on the local machine.
Parameters
----------
file_name : str
File damage file name.
scenario_name : str
- scneario name.
+ scenario name.
settings : Settings
Settings object.
worker_rank : int, optional
- Specifies the rank of the currnet woker. If the scneario is being run as single or multiple-serial mode, the can be anything. It is used for naming temp files. The default is None.
+ Specifies the rank of the current worker. If the scenario is being run as single or multiple-serial mode, the can be anything. It is used for naming temp files. The default is None.
nodal_damage_file_name : str, optional
nodal damages file name. The default is None.
pump_damage_file_name : TYPE, optional
@@ -131,262 +161,445 @@ def run_local_single(self, file_name, scenario_name, settings, worker_rank=None,
-------
None.
- """
- print(scenario_name + ' - ' + file_name + ' - ' + nodal_damage_file_name + ' - ' + str(pump_damage_file_name), flush=True)
- if settings.process['number_of_proccessor'] > 1 and worker_rank == None:
- raise ValueError('for multiple proccessor analysis, worker_rank_must be provided')
-
- if type(file_name) != str:
- file_name = str(file_name) #for number-only names to convert from int/float to str
-
- if type(tank_damage_file_name) != str:
- tank_damage_file_name = str(tank_damage_file_name) #for number-only names to convert from int/float to str
-
- if type(nodal_damage_file_name)!=str:
- nodal_damage_file_name = str(nodal_damage_file_name) #for number-only names to convert from int/float to str
-
- if type(pump_damage_file_name) != str:
- pump_damage_file_name = str(pump_damage_file_name) #for number-only names to convert from int/float to str
-
- if settings.scenario['Pipe_damage_input_method' ] == 'pickle':
- pipe_damages = io.read_pipe_damage_seperate_pickle_file(settings.process['pipe_damage_file_directory'], file_name)
- node_damages = io.read_node_damage_seperate_pickle_file(settings.process['pipe_damage_file_directory'], nodal_damage_file_name)
- tank_damages = io.read_tank_damage_seperate_pickle_file(settings.process['pipe_damage_file_directory'], tank_damage_file_name)
- pump_damages = io.read_pump_damage_seperate_pickle_file(settings.process['pipe_damage_file_directory'], pump_damage_file_name)
+ """ # noqa: D401
+ print( # noqa: T201
+ scenario_name
+ + ' - '
+ + file_name
+ + ' - '
+ + nodal_damage_file_name
+ + ' - '
+ + str(pump_damage_file_name),
+ flush=True,
+ )
+ if settings.process['number_of_proccessor'] > 1 and worker_rank == None: # noqa: E711
+ raise ValueError( # noqa: TRY003
+ 'for multiple processor analysis, worker_rank_must be provided' # noqa: EM101
+ )
+
+ if type(file_name) != str: # noqa: E721
+ file_name = str(
+ file_name
+ ) # for number-only names to convert from int/float to str
+
+ if type(tank_damage_file_name) != str: # noqa: E721
+ tank_damage_file_name = str(
+ tank_damage_file_name
+ ) # for number-only names to convert from int/float to str
+
+ if type(nodal_damage_file_name) != str: # noqa: E721
+ nodal_damage_file_name = str(
+ nodal_damage_file_name
+ ) # for number-only names to convert from int/float to str
+
+ if type(pump_damage_file_name) != str: # noqa: E721
+ pump_damage_file_name = str(
+ pump_damage_file_name
+ ) # for number-only names to convert from int/float to str
+
+ if settings.scenario['Pipe_damage_input_method'] == 'pickle':
+ pipe_damages = io.read_pipe_damage_seperate_pickle_file(
+ settings.process['pipe_damage_file_directory'], file_name
+ )
+ node_damages = io.read_node_damage_seperate_pickle_file(
+ settings.process['pipe_damage_file_directory'],
+ nodal_damage_file_name,
+ )
+ tank_damages = io.read_tank_damage_seperate_pickle_file(
+ settings.process['pipe_damage_file_directory'], tank_damage_file_name
+ )
+ pump_damages = io.read_pump_damage_seperate_pickle_file(
+ settings.process['pipe_damage_file_directory'], pump_damage_file_name
+ )
elif settings.scenario['Pipe_damage_input_method'] == 'excel':
- pipe_damages = io.read_pipe_damage_seperate_EXCEL_file(settings.process['pipe_damage_file_directory'], file_name)
- node_damages = io.read_node_damage_seperate_EXCEL_file(settings.process['pipe_damage_file_directory'], nodal_damage_file_name)
- tank_damages = io.read_tank_damage_seperate_EXCEL_file(settings.process['pipe_damage_file_directory'], tank_damage_file_name)
- pump_damages = io.read_pump_damage_seperate_EXCEL_file(settings.process['pipe_damage_file_directory'], pump_damage_file_name)
+ pipe_damages = io.read_pipe_damage_seperate_EXCEL_file(
+ settings.process['pipe_damage_file_directory'], file_name
+ )
+ node_damages = io.read_node_damage_seperate_EXCEL_file(
+ settings.process['pipe_damage_file_directory'],
+ nodal_damage_file_name,
+ )
+ tank_damages = io.read_tank_damage_seperate_EXCEL_file(
+ settings.process['pipe_damage_file_directory'], tank_damage_file_name
+ )
+ pump_damages = io.read_pump_damage_seperate_EXCEL_file(
+ settings.process['pipe_damage_file_directory'], pump_damage_file_name
+ )
else:
- raise ValueError("Unknown value for settings['Pipe_damage_input_method']")
-
- if pipe_damages.empty == True and node_damages.empty == True and tank_damages.empty == True and pump_damages.empty == True and settings.process['ignore_empty_damage']:
- return 2 #means it didn't run due to lack of any damage in pipe lines
-
+ raise ValueError( # noqa: TRY003
+ "Unknown value for settings['Pipe_damage_input_method']" # noqa: EM101
+ )
+
+ if (
+ pipe_damages.empty == True # noqa: E712
+ and node_damages.empty == True # noqa: E712
+ and tank_damages.empty == True # noqa: E712
+ and pump_damages.empty == True # noqa: E712
+ and settings.process['ignore_empty_damage']
+ ):
+ return 2 # means it didn't run due to lack of any damage in pipe lines
+
"""
reads WDN definition and checks set the settinsg defined from settings
"""
wn = WaterNetworkModel(settings.process['WN_INP'])
-
+
delta_t_h = settings['hydraulic_time_step']
wn.options.time.hydraulic_timestep = int(delta_t_h)
- #wn.options.time.pattern_timestep = int(delta_t_h)
- #wn.options.time.pattern_timestep = int(delta_t_h)
- #Sina What about rule time step. Also one may want to change pattern time step
-
+ # wn.options.time.pattern_timestep = int(delta_t_h)
+ # wn.options.time.pattern_timestep = int(delta_t_h)
+ # Sina What about rule time step. Also one may want to change pattern time step
+
demand_node_name_list = []
for junction_name, junction in wn.junctions():
if junction.demand_timeseries_list[0].base_value > 0:
- junction.demand_timeseries_list[0].base_value = junction.demand_timeseries_list[0].base_value * settings.process['demand_ratio']
+ junction.demand_timeseries_list[0].base_value = ( # noqa: PLR6104
+ junction.demand_timeseries_list[0].base_value
+ * settings.process['demand_ratio']
+ )
demand_node_name_list.append(junction_name)
-
- registry = Registry(wn, settings, demand_node_name_list, scenario_name)
- self.registry = registry
- self.damage = Damage.Damage(registry, settings.scenario)
- ##All these data can immigrate to registry
- self.registry.damage = self.damage
- self.damage.pipe_all_damages = pipe_damages
- self.damage.node_damage = node_damages
- if tank_damages.empty == False:
- self.damage.tank_damage = tank_damages['Tank_ID']
- if pump_damages.empty == False:
- self.damage.damaged_pumps = pump_damages['Pump_ID']
-
- restoration = Restoration(settings.scenario['Restortion_config_file'], registry, self.damage)
+
+ registry = Registry(wn, settings, demand_node_name_list, scenario_name)
+ self.registry = registry
+ self.damage = Damage.Damage(registry, settings.scenario)
+ # All these data can immigrate to registry
+ self.registry.damage = self.damage
+ self.damage.pipe_all_damages = pipe_damages
+ self.damage.node_damage = node_damages
+ if tank_damages.empty == False: # noqa: E712
+ self.damage.tank_damage = tank_damages['Tank_ID']
+ if pump_damages.empty == False: # noqa: E712
+ self.damage.damaged_pumps = pump_damages['Pump_ID']
+
+ restoration = Restoration(
+ settings.scenario['Restortion_config_file'], registry, self.damage
+ )
restoration.pump_restoration = pump_damages
restoration.tank_restoration = tank_damages
-
- self.sm = StochasticModel.StochasticModel(wn, self.damage, self.registry, simulation_end_time=settings.process['RUN_TIME'] , restoration = restoration , mode='PDD', i_restoration=settings.process['Restoration_on'])
- result = self.sm.runLinearScenario(self.damage, settings, worker_rank)
+ self.sm = StochasticModel.StochasticModel(
+ wn,
+ self.damage,
+ self.registry,
+ simulation_end_time=settings.process['RUN_TIME'],
+ restoration=restoration,
+ mode='PDD',
+ i_restoration=settings.process['Restoration_on'],
+ )
+
+ result = self.sm.runLinearScenario(self.damage, settings, worker_rank)
self.res = result
io.save_single(settings, result, scenario_name, registry)
return 1
-
- def run_mpi(self, settings):
- from mpi4py import MPI
- import mpi4py
+
+ def run_mpi(self, settings): # noqa: C901, D102
+ import mpi4py # noqa: PLC0415
+ from mpi4py import MPI # noqa: PLC0415
+
comm = MPI.COMM_WORLD
mpi4py.rc.recv_mprobe = False
-
- pipe_damage_list= io.read_damage_list(settings.process['pipe_damage_file_list' ], settings.process['pipe_damage_file_directory'])
- if settings.process['mpi_resume'] == True:
+ pipe_damage_list = io.read_damage_list(
+ settings.process['pipe_damage_file_list'],
+ settings.process['pipe_damage_file_directory'],
+ )
+
+ if settings.process['mpi_resume'] == True: # noqa: E712
pipe_damage_list = pipe_damage_list.set_index('Scenario Name')
- #_done_file = pd.read_csv('done.csv')
- #_done_file = _done_file.transpose().reset_index().transpose().set_index(0)
+ # _done_file = pd.read_csv('done.csv')
+ # _done_file = _done_file.transpose().reset_index().transpose().set_index(0)
file_lists = os.listdir(settings.process['result_directory'])
- done_scenario_list=[]
+ done_scenario_list = []
for name in file_lists:
-
if name.split('.')[-1] != 'res':
continue
split_k = name.split('.res')[:-1]
- #print(split_k)
- kk = ""
+ # print(split_k)
+ kk = ''
for portiong in split_k:
kk += portiong
- if kk not in done_scenario_list and kk in pipe_damage_list.index:
+ if kk not in done_scenario_list and kk in pipe_damage_list.index:
done_scenario_list.append(kk)
-
+
pipe_damage_list = pipe_damage_list.drop(done_scenario_list)
pipe_damage_list = pipe_damage_list.reset_index()
-
+
if comm.rank == 0:
- scn_name_list = pipe_damage_list['Scenario Name'].to_list()
- file_name_list = pipe_damage_list['Pipe Damage'].to_list()
+ scn_name_list = pipe_damage_list['Scenario Name'].to_list()
+ file_name_list = pipe_damage_list['Pipe Damage'].to_list()
else:
- file_name_list=[]
-
- if comm.rank == 0:
+ file_name_list = []
+
+ if comm.rank == 0: # noqa: PLR1702
time_jobs_saved = time.time()
- jobs = pd.DataFrame(columns=['scenario_name', 'file_name', 'worker', 'Done', 'time_assigned', 'time_confirmed'])
+ jobs = pd.DataFrame(
+ columns=[
+ 'scenario_name',
+ 'file_name',
+ 'worker',
+ 'Done',
+ 'time_assigned',
+ 'time_confirmed',
+ ]
+ )
jobs['scenario_name'] = scn_name_list
- jobs['file_name'] = file_name_list
- jobs['worker'] = None
- jobs['Done'] = "False"
+ jobs['file_name'] = file_name_list
+ jobs['worker'] = None
+ jobs['Done'] = 'False'
jobs['time_assigned'] = None
- jobs['time_confirmed']= None
-
- workers = pd.Series(data=-1, index=[1+i for i in range(settings.process['number_of_proccessor']-1)])
-
- iContinue = True
+ jobs['time_confirmed'] = None
+
+ workers = pd.Series(
+ data=-1,
+ index=[
+ 1 + i
+ for i in range(settings.process['number_of_proccessor'] - 1)
+ ],
+ )
+
+ iContinue = True # noqa: N806
while iContinue:
-
- if (time.time()-time_jobs_saved)>120:
- jobs.to_excel('temp-jobs.xlsx') #only for more informaton about the latest job status for the user in teh real time
+ if (time.time() - time_jobs_saved) > 120: # noqa: PLR2004
+ jobs.to_excel(
+ 'temp-jobs.xlsx'
+ ) # only for more information about the latest job status for the user in the real time
time_jobs_saved = time.time()
-
+
if comm.iprobe():
- status=MPI.Status()
+ status = MPI.Status()
recieved_msg = comm.recv(status=status)
- worker_rank = status.Get_source()
- if recieved_msg==1 or recieved_msg==2 or recieved_msg==3: #check if the job is done
+ worker_rank = status.Get_source()
+ if (
+ recieved_msg == 1 or recieved_msg == 2 or recieved_msg == 3 # noqa: PLR1714, PLR2004
+ ): # check if the job is done
msg_interpretation = None
- if recieved_msg==1:
+ if recieved_msg == 1:
msg_interpretation = 'done'
- elif recieved_msg==2:
+ elif recieved_msg == 2: # noqa: PLR2004
msg_interpretation = 'done w/o simulation'
- elif recieved_msg==3:
+ elif recieved_msg == 3: # noqa: PLR2004
msg_interpretation = 'exception happened'
-
- print('messaged recieved= '+repr(msg_interpretation)+' rank recivied= '+repr(worker_rank))
+
+ print( # noqa: T201
+ 'messaged received= '
+ + repr(msg_interpretation)
+ + ' rank recivied= '
+ + repr(worker_rank)
+ )
# In both cases it means the jobs is done, only in different ways
else:
- raise ValueError('Recieved message from worker is not recognized: ' + str(recieved_msg) + ', ' + str(worker_rank))
-
+ raise ValueError(
+ 'Recieved message from worker is not recognized: '
+ + str(recieved_msg)
+ + ', '
+ + str(worker_rank)
+ )
+
jobs_index = workers.loc[worker_rank]
- if recieved_msg==1:
- jobs.loc[jobs_index, 'Done']='True'
- elif recieved_msg==2:
- jobs.loc[jobs_index, 'Done']='No need'
- elif recieved_msg==3:
- jobs.loc[jobs_index, 'Done']='exception'
-
- jobs.loc[jobs_index,'time_confirmed']=time.time()
- workers.loc[worker_rank]=-1
-
- time_began = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(jobs.loc[jobs_index, 'time_assigned']))
- time_end = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(jobs.loc[jobs_index, 'time_confirmed']))
- time_lapsed = jobs.loc[jobs_index,'time_confirmed']-jobs.loc[jobs_index,'time_assigned']
- with open('done.csv', 'a', encoding='utf-8', buffering=1000000) as f: #shows teh order of done jobs
- f.write( jobs.loc[jobs_index, 'scenario_name'] + ',' + jobs.loc[jobs_index, 'file_name']+','+str(jobs.loc[jobs_index, 'worker'])+','+str(time_lapsed)+','+str(time_began)+','+str(time_end)+'\n')
-
- binary_vector =(jobs['worker'].isna())
+ if recieved_msg == 1:
+ jobs.loc[jobs_index, 'Done'] = 'True'
+ elif recieved_msg == 2: # noqa: PLR2004
+ jobs.loc[jobs_index, 'Done'] = 'No need'
+ elif recieved_msg == 3: # noqa: PLR2004
+ jobs.loc[jobs_index, 'Done'] = 'exception'
+
+ jobs.loc[jobs_index, 'time_confirmed'] = time.time()
+ workers.loc[worker_rank] = -1
+
+ time_began = time.strftime(
+ '%Y-%m-%d %H:%M:%S',
+ time.localtime(jobs.loc[jobs_index, 'time_assigned']),
+ )
+ time_end = time.strftime(
+ '%Y-%m-%d %H:%M:%S',
+ time.localtime(jobs.loc[jobs_index, 'time_confirmed']),
+ )
+ time_lapsed = (
+ jobs.loc[jobs_index, 'time_confirmed']
+ - jobs.loc[jobs_index, 'time_assigned']
+ )
+ with open( # noqa: PTH123
+ 'done.csv', 'a', encoding='utf-8', buffering=1000000
+ ) as f: # shows the order of done jobs
+ f.write(
+ jobs.loc[jobs_index, 'scenario_name']
+ + ','
+ + jobs.loc[jobs_index, 'file_name']
+ + ','
+ + str(jobs.loc[jobs_index, 'worker'])
+ + ','
+ + str(time_lapsed)
+ + ','
+ + str(time_began)
+ + ','
+ + str(time_end)
+ + '\n'
+ )
+
+ binary_vector = jobs['worker'].isna()
not_assigned_data = jobs[binary_vector]
- free_workers = workers[workers==-1]
- time_constraint=False
-
- if len(not_assigned_data)>0 and len(free_workers)>0 and time_constraint==False:
- jobs_index = not_assigned_data.index[0]
- worker_rank = free_workers.index[0]
- print('trying to send '+repr(jobs_index)+' to '+repr(worker_rank), flush=True)
+ free_workers = workers[workers == -1]
+ time_constraint = False
+
+ if (
+ len(not_assigned_data) > 0
+ and len(free_workers) > 0
+ and time_constraint == False # noqa: E712
+ ):
+ jobs_index = not_assigned_data.index[0]
+ worker_rank = free_workers.index[0]
+ print( # noqa: T201
+ 'trying to send '
+ + repr(jobs_index)
+ + ' to '
+ + repr(worker_rank),
+ flush=True,
+ )
comm.isend(jobs_index, worker_rank, tag=0)
-
- workers.loc[worker_rank]=jobs_index
- jobs.loc[jobs_index, 'worker']=worker_rank
- jobs.loc[jobs_index, 'time_assigned']=time.time()
-
- time_began = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(jobs.loc[jobs_index, 'time_assigned']))
- with open('runing.csv', 'a', encoding='utf-8', buffering=1000000) as f:
- f.write( jobs.loc[jobs_index, 'scenario_name'] + ',' + jobs.loc[jobs_index, 'file_name']+','+str(jobs.loc[jobs_index, 'worker'])+','+str(time_began)+'\n')
-
- binary_vector = (jobs['Done']=='False')
- iContinue = (binary_vector.any() and (not time_constraint) )
-
- #Finish workers with sending them a dummy data with tag=100 (death tag)
+
+ workers.loc[worker_rank] = jobs_index
+ jobs.loc[jobs_index, 'worker'] = worker_rank
+ jobs.loc[jobs_index, 'time_assigned'] = time.time()
+
+ time_began = time.strftime(
+ '%Y-%m-%d %H:%M:%S',
+ time.localtime(jobs.loc[jobs_index, 'time_assigned']),
+ )
+ with open( # noqa: PTH123
+ 'runing.csv', 'a', encoding='utf-8', buffering=1000000
+ ) as f:
+ f.write(
+ jobs.loc[jobs_index, 'scenario_name']
+ + ','
+ + jobs.loc[jobs_index, 'file_name']
+ + ','
+ + str(jobs.loc[jobs_index, 'worker'])
+ + ','
+ + str(time_began)
+ + '\n'
+ )
+
+ binary_vector = jobs['Done'] == 'False'
+ iContinue = binary_vector.any() and (not time_constraint) # noqa: N806
+
+ # Finish workers with sending them a dummy data with tag=100 (death tag)
for i in range(1, settings.process['number_of_proccessor']):
- print('Death msg (tag=100) is sent to all workers. RIP!', flush=True)
- comm.send('None',dest=i ,tag=100)
- jobs['time_lapsed']=jobs['time_confirmed']-jobs['time_assigned']
- jobs['time_assigned']=jobs.apply(lambda x: time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(x.loc['time_assigned'])), axis=1)
- jobs['time_confirmed']=jobs.apply(lambda x: time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(x.loc['time_confirmed'])), axis=1)
+ print('Death msg (tag=100) is sent to all workers. RIP!', flush=True) # noqa: T201
+ comm.send('None', dest=i, tag=100)
+ jobs['time_lapsed'] = jobs['time_confirmed'] - jobs['time_assigned']
+ jobs['time_assigned'] = jobs.apply(
+ lambda x: time.strftime(
+ '%Y-%m-%d %H:%M:%S', time.localtime(x.loc['time_assigned'])
+ ),
+ axis=1,
+ )
+ jobs['time_confirmed'] = jobs.apply(
+ lambda x: time.strftime(
+ '%Y-%m-%d %H:%M:%S', time.localtime(x.loc['time_confirmed'])
+ ),
+ axis=1,
+ )
jobs.to_excel('jobs.xlsx')
- print('MAIN NODE FINISHED. Going under!', flush=True)
-
+ print('MAIN NODE FINISHED. Going under!', flush=True) # noqa: T201
+
else:
- worker_exit_flag=None
+ worker_exit_flag = None
while True:
if comm.iprobe(source=0):
status = MPI.Status()
- print('trying to recieve msg. -> rank= '+repr(comm.rank),flush=True)
+ print( # noqa: T201
+ 'trying to receive msg. -> rank= ' + repr(comm.rank),
+ flush=True,
+ )
scenario_index = comm.recv(source=0, status=status)
-
- if status.Get_tag()!=100:
- scenario_name = pipe_damage_list.loc[scenario_index,'Scenario Name']
+
+ if status.Get_tag() != 100: # noqa: PLR2004
+ scenario_name = pipe_damage_list.loc[
+ scenario_index, 'Scenario Name'
+ ]
settings.initializeScenarioSettings(scenario_index)
- print('Rank= '+repr(comm.rank)+' is assigned to '+str(scenario_index)+' : '+str(scenario_name), flush=True)
- #row = pipe_damage_list[pipe_damage_list['scenario_name']==scenario_name]
- row = pipe_damage_list.loc[scenario_index]
- file_name = row['Pipe Damage']
- nodal_name = row['Nodal Damage']
- pump_damage = row['Pump Damage']
+ print( # noqa: T201
+ 'Rank= '
+ + repr(comm.rank)
+ + ' is assigned to '
+ + str(scenario_index)
+ + ' : '
+ + str(scenario_name),
+ flush=True,
+ )
+ # row = pipe_damage_list[pipe_damage_list['scenario_name']==scenario_name]
+ row = pipe_damage_list.loc[scenario_index]
+ file_name = row['Pipe Damage']
+ nodal_name = row['Nodal Damage']
+ pump_damage = row['Pump Damage']
tank_damage_name = row['Tank Damage']
try:
- run_flag = self.run_local_single(file_name, scenario_name, settings, worker_rank=repr(scenario_name)+'_'+repr(comm.rank), nodal_damage_file_name=nodal_name, pump_damage_file_name = pump_damage, tank_damage_file_name = tank_damage_name)
- print('run_flag for worker: '+ repr(comm.rank)+' --> '+repr(run_flag))
+ run_flag = self.run_local_single(
+ file_name,
+ scenario_name,
+ settings,
+ worker_rank=repr(scenario_name)
+ + '_'
+ + repr(comm.rank),
+ nodal_damage_file_name=nodal_name,
+ pump_damage_file_name=pump_damage,
+ tank_damage_file_name=tank_damage_name,
+ )
+ print( # noqa: T201
+ 'run_flag for worker: '
+ + repr(comm.rank)
+ + ' --> '
+ + repr(run_flag)
+ )
comm.isend(run_flag, dest=0)
- except Exception as e:
-
+ except Exception: # noqa: BLE001
error_dump_file = None
- if type(scenario_name) == str:
- error_dump_file = "dump_"+scenario_name+".pkl"
+ if type(scenario_name) == str: # noqa: E721
+ error_dump_file = 'dump_' + scenario_name + '.pkl'
else:
- error_dump_file = "dump_"+repr(scenario_name)+".pkl"
-
- with open(error_dump_file, "wb") as f:
+ error_dump_file = (
+ 'dump_' + repr(scenario_name) + '.pkl'
+ )
+
+ with open(error_dump_file, 'wb') as f: # noqa: PTH123
pickle.dump(self, f)
-
+
comm.isend(3, dest=0)
- last_time_message_recv=time.time()
+ last_time_message_recv = time.time()
else:
- worker_exit_flag='Death message recieved!'
+ worker_exit_flag = 'Death message received!' # oh my God!
break
-
- if (time.time()-last_time_message_recv) > settings.process['maximun_worker_idle_time']:
- worker_exit_flag='Maximum time reached.'
+
+ if (time.time() - last_time_message_recv) > settings.process[
+ 'maximun_worker_idle_time'
+ ]:
+ worker_exit_flag = 'Maximum time reached.'
break
- print(repr(worker_exit_flag)+" I'm OUT -> Rank= "+repr(comm.rank), flush=True)
-
- def checkArgument(self, argv):
- if len(argv) > 2:
- print("REWET USAGE is as [./REWET Project.prj: optional]")
- if len(argv) == 1:
+ print( # noqa: T201
+ repr(worker_exit_flag) + " I'm OUT -> Rank= " + repr(comm.rank),
+ flush=True,
+ )
+
+ def checkArgument(self, argv): # noqa: D102, N802, PLR6301
+ if len(argv) > 2: # noqa: PLR2004
+ print('REWET USAGE is as [./REWET Project.prj: optional]') # noqa: T201
+ if len(argv) == 1: # noqa: SIM103
return False
- else:
+ else: # noqa: RET505
return True
-if __name__ == "__main__":
+
+if __name__ == '__main__':
import sys
+
start = Starter()
if_project = start.checkArgument(sys.argv)
if if_project:
- if os.path.exists(sys.argv[1]):
+ if os.path.exists(sys.argv[1]): # noqa: PTH110
tt = start.run(sys.argv[1])
else:
- print("Project file address is not valid: " + repr(sys.argv[1]) )
+ print('Project file address is not valid: ' + repr(sys.argv[1])) # noqa: T201
else:
tt = start.run()
diff --git a/modules/systemPerformance/REWET/REWET/main.py b/modules/systemPerformance/REWET/REWET/main.py
index 28a5ce460..440a40336 100644
--- a/modules/systemPerformance/REWET/REWET/main.py
+++ b/modules/systemPerformance/REWET/REWET/main.py
@@ -1,70 +1,81 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Jan 10 14:23:09 2024
+"""Created on Wed Jan 10 14:23:09 2024
@author: snaeimi
-This is the main final to run REWET. This file superscedes intital.py to run
+This is the main final to run REWET. This file supersedes initial.py to run
REWET. In order to keep the backward compatibility, the initial.py is kepy,
so one can run initial.py to run REWET. currently, REWET's GUI still works with
initial.py. Main.py is going to be the most developed tool.
-"""
+""" # noqa: CPY001, D400
-import sys
-import os
import argparse
+import os
+import sys
+
from initial import Starter
-if __name__ == "__main__":
- argParser = argparse.ArgumentParser(prog="REWET V0.2",
- description="REstoration tool for Restoration of Water after Event Tool is a package for modeling damages and restoration in water network. You can specify settings in with providing a JSON. An exampel JSON file is provided in example folder. Modify the exampel folder and provide its path as an input. If not provided, the default settings valeus from the input/settings.py will be ran. thus, you can alterbatively modify values in settings for a single run."
- )
-
- argParser.add_argument("--json", "-j", default=None,
- help="json settings file")
-
- argParser.add_argument("--project", "-p", default=None,
- help="REWET project file")
-
+if __name__ == '__main__':
+ argParser = argparse.ArgumentParser( # noqa: N816
+ prog='REWET V0.2',
+ description='REstoration tool for Restoration of Water after Event Tool is a package for modeling damages and restoration in water network. You can specify settings in with providing a JSON. An example JSON file is provided in example folder. Modify the example folder and provide its path as an input. If not provided, the default settings values from the input/settings.py will be ran. thus, you can alternatively modify values in settings for a single run.',
+ )
+
+ argParser.add_argument('--json', '-j', default=None, help='json settings file')
+
+ argParser.add_argument(
+ '--project', '-p', default=None, help='REWET project file'
+ )
+
parse_namespace = argParser.parse_args()
-
+
starter = Starter()
- # No file is pecified, so the default values in settinsg file is going to
+ # No file is pacified, so the default values in settings file is going to
# be ran.
-
- if parse_namespace.json == None and parse_namespace.project == None:
+
+ if parse_namespace.json == None and parse_namespace.project == None: # noqa: E711
import warnings
+
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=FutureWarning)
starter.run()
sys.exit(0)
- elif parse_namespace.json != None and parse_namespace.project == None:
- if parse_namespace.json.split(".")[-1].upper() != "JSON":
- print("ERROR in json file name: ", parse_namespace.json,
- "The json file must have json extention")
+ elif parse_namespace.json != None and parse_namespace.project == None: # noqa: E711
+ if parse_namespace.json.split('.')[-1].upper() != 'JSON':
+ print( # noqa: T201
+ 'ERROR in json file name: ',
+ parse_namespace.json,
+ 'The json file must have json extention',
+ )
sys.exit(0)
- elif not os.path.exists(parse_namespace.json):
- print("ERROR in json file: ", parse_namespace.json,
- "does not exist")
+ elif not os.path.exists(parse_namespace.json): # noqa: PTH110
+ print('ERROR in json file: ', parse_namespace.json, 'does not exist') # noqa: T201
else:
starter.run(parse_namespace.json)
-
- elif parse_namespace.json == None and parse_namespace.project != None:
- if parse_namespace.project.split(".")[-1].upper() != "PRJ":
- print("ERROR in project file name: ", parse_namespace.project,
- "The project file must have PRJ extention")
+
+ elif parse_namespace.json == None and parse_namespace.project != None: # noqa: E711
+ if parse_namespace.project.split('.')[-1].upper() != 'PRJ':
+ print( # noqa: T201
+ 'ERROR in project file name: ',
+ parse_namespace.project,
+ 'The project file must have PRJ extention',
+ )
sys.exit(0)
- elif not os.path.exists(parse_namespace.project):
- print("ERROR in project file: ", parse_namespace.project,
- "does not exist")
+ elif not os.path.exists(parse_namespace.project): # noqa: PTH110
+ print( # noqa: T201
+ 'ERROR in project file: ', parse_namespace.project, 'does not exist'
+ )
else:
starter.run(parse_namespace.project)
-
+
else:
- print("ERROR in arguments\n",
- "Either of the json or project file arguments must be used")
-
+ print( # noqa: T201
+ 'ERROR in arguments\n',
+ 'Either of the json or project file arguments must be used',
+ )
+
else:
- print("Main File has been ran with not being the main module (i.e.,\
- __name__ is not \"__main__\"")
\ No newline at end of file
+ print( # noqa: T201
+ 'Main File has been ran with not being the main module (i.e.,\
+ __name__ is not "__main__"'
+ )
diff --git a/modules/systemPerformance/REWET/REWET/repair.py b/modules/systemPerformance/REWET/REWET/repair.py
index 4eae8177a..cff05efe2 100644
--- a/modules/systemPerformance/REWET/REWET/repair.py
+++ b/modules/systemPerformance/REWET/REWET/repair.py
@@ -1,360 +1,578 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Tue Feb 2 20:22:09 2021
+"""Created on Tue Feb 2 20:22:09 2021
@author: snaeimi
-"""
+""" # noqa: CPY001, D400
+
+import math
+from collections import OrderedDict
from wntrfr.network.model import LinkStatus
-from collections import OrderedDict
-LINK_TYPE_COLLECTIVES = {"BYPASS_PIPE", "ADDED_PIPE_A", "ADDED_PIPE_B", "ADDED_PIPE_C", "ADDED_PIPE_D", "ADDED_PUMP_A", "ADDED_PUMP_B", "PIPE_CLOSED_FROM_OPEN", "PIPE_CLOSED_FROM_CV"}
-NODE_TYPE_COLLECTIVES = {"MIDDLE_NODE_A", "MIDDLE_NODE_B", "ADDED_RESERVOIR_A", "ADDED_RESERVOIR_B"}
-NON_COLLECTIVES = {"NODE_DEMAND_AFTER","NODE_DEMAND_BEFORE", "NODE_A_DEMAND_BEFORE", "NODE_A_DEMAND_AFTER", "NODE_B_DEMAND_BEFORE", "NODE_B_DEMAND_AFTER", "NODE_A", "NODE_B", "NON_COL_ADDED_PIPE", "NON_COL_PIPE_CLOSED_FROM_OPEN", "NON_COL_PIPE_CLOSED_FROM_CV"}
-NC_FALSE_FLAG = {'NODE_A', 'NODE_B', 'NODE_A_DEMAND_BEFORE', 'NODE_A_DEMAND_AFTER', 'NODE_B_DEMAND_BEFORE', 'NODE_B_DEMAND_AFTER', "NODE_DEMAND_AFTER","NODE_DEMAND_BEFORE"}
+LINK_TYPE_COLLECTIVES = {
+ 'BYPASS_PIPE',
+ 'ADDED_PIPE_A',
+ 'ADDED_PIPE_B',
+ 'ADDED_PIPE_C',
+ 'ADDED_PIPE_D',
+ 'ADDED_PUMP_A',
+ 'ADDED_PUMP_B',
+ 'PIPE_CLOSED_FROM_OPEN',
+ 'PIPE_CLOSED_FROM_CV',
+}
+NODE_TYPE_COLLECTIVES = {
+ 'MIDDLE_NODE_A',
+ 'MIDDLE_NODE_B',
+ 'ADDED_RESERVOIR_A',
+ 'ADDED_RESERVOIR_B',
+}
+NON_COLLECTIVES = {
+ 'NODE_DEMAND_AFTER',
+ 'NODE_DEMAND_BEFORE',
+ 'NODE_A_DEMAND_BEFORE',
+ 'NODE_A_DEMAND_AFTER',
+ 'NODE_B_DEMAND_BEFORE',
+ 'NODE_B_DEMAND_AFTER',
+ 'NODE_A',
+ 'NODE_B',
+ 'NON_COL_ADDED_PIPE',
+ 'NON_COL_PIPE_CLOSED_FROM_OPEN',
+ 'NON_COL_PIPE_CLOSED_FROM_CV',
+}
+NC_FALSE_FLAG = {
+ 'NODE_A',
+ 'NODE_B',
+ 'NODE_A_DEMAND_BEFORE',
+ 'NODE_A_DEMAND_AFTER',
+ 'NODE_B_DEMAND_BEFORE',
+ 'NODE_B_DEMAND_AFTER',
+ 'NODE_DEMAND_AFTER',
+ 'NODE_DEMAND_BEFORE',
+}
+
-class Repair():
+class Repair: # noqa: D101
def __init__(self, registry):
self._registry = registry
-
-
-
- def closeSecondLeakingPipe(self, damage_node_name, wn):
- if self._registry.getDamageData('PIPE',False).loc[damage_node_name,'damage_type'] != 'leak':
- raise ValueError('Damage type is not leak in node '+damage_node_name)
-
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(damage_node_name)
- pipe_B = wn.get_link(pipe_B_name)
-
- pipe_B.status = LinkStatus.Closed
+
+ def closeSecondLeakingPipe(self, damage_node_name, wn): # noqa: N802, D102
+ if (
+ self._registry.getDamageData('PIPE', False).loc[ # noqa: FBT003
+ damage_node_name, 'damage_type'
+ ]
+ != 'leak'
+ ):
+ raise ValueError('Damage type is not leak in node ' + damage_node_name)
+
+ pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData( # noqa: F841, N806
+ damage_node_name
+ )
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
+ pipe_B.status = LinkStatus.Closed
pipe_B.initial_status = LinkStatus.Closed
-
-
-
- def bypassPipe(self, damage_node_name, middle_pipe_size, damage_type, wn, length=None, friction=None):
- #if self._registry.getDamageData('PIPE',False).loc[damage_node_name,'damage_type'] != 'leak':
- #raise ValueError('Damage type is not leak in node '+damage_node_name)
-
- if damage_type=='leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(damage_node_name)
- elif damage_type=='break':
- pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = self._registry.getBreakData(damage_node_name)
+
+ def bypassPipe( # noqa: N802, D102
+ self,
+ damage_node_name,
+ middle_pipe_size,
+ damage_type,
+ wn,
+ length=None,
+ friction=None,
+ ):
+ # if self._registry.getDamageData('PIPE',False).loc[damage_node_name,'damage_type'] != 'leak':
+ # raise ValueError('Damage type is not leak in node '+damage_node_name)
+
+ if damage_type == 'leak':
+ pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData( # noqa: N806
+ damage_node_name
+ )
+ elif damage_type == 'break':
+ pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = ( # noqa: F841, N806
+ self._registry.getBreakData(damage_node_name)
+ )
org_pipe_data = self._registry.getOriginalPipenodes(orginal_pipe_name)
-
- orginal_node_A_name = org_pipe_data['start_node_name']
- orginal_node_B_name = org_pipe_data['end_node_name']
- orginal_pipe_length = org_pipe_data['length']
+
+ orginal_node_A_name = org_pipe_data['start_node_name'] # noqa: N806
+ orginal_node_B_name = org_pipe_data['end_node_name'] # noqa: N806
+ orginal_pipe_length = org_pipe_data['length']
orginal_pipe_roughness = org_pipe_data['roughness']
-
- if length != None:
+
+ if length != None: # noqa: E711
pipe_length = length
else:
pipe_length = orginal_pipe_length
-
- if friction != None:
+
+ if friction != None: # noqa: E711
pipe_friction = friction
else:
pipe_friction = orginal_pipe_roughness
- new_pipe_name = orginal_pipe_name+'-Byp'
+ new_pipe_name = orginal_pipe_name + '-Byp'
if middle_pipe_size > 0:
- wn.add_pipe(new_pipe_name, orginal_node_A_name, orginal_node_B_name, length=pipe_length, diameter=middle_pipe_size, roughness=pipe_friction)
-
- #For the sake of multiple damages in one pipe the following line is marked the the line after it is added
-
-
-
- damage_data = self._registry.getDamageData('pipe' ,iCopy=False)
- redefined_damage_data = damage_data[damage_data['Orginal_element']==orginal_pipe_name]
-
+ wn.add_pipe(
+ new_pipe_name,
+ orginal_node_A_name,
+ orginal_node_B_name,
+ length=pipe_length,
+ diameter=middle_pipe_size,
+ roughness=pipe_friction,
+ )
+
+ # For the sake of multiple damages in one pipe the following line is marked the the line after it is added
+
+ damage_data = self._registry.getDamageData('pipe', iCopy=False)
+ redefined_damage_data = damage_data[
+ damage_data['Orginal_element'] == orginal_pipe_name
+ ]
+
for cur_damage_node_name, cur_damage in redefined_damage_data.iterrows():
- history=OrderedDict()
+ history = OrderedDict()
if middle_pipe_size > 0:
- history['BYPASS_PIPE'] = new_pipe_name #Bypass pipe doesn't get removed unless all damages in the orginal pipe is removed
+ history['BYPASS_PIPE'] = (
+ new_pipe_name # Bypass pipe doesn't get removed unless all damages in the original pipe is removed
+ )
cur_damage_type = cur_damage['damage_type']
- if cur_damage_type=='leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(cur_damage_node_name)
-
- pipe_B = wn.get_link(pipe_B_name)
+ if cur_damage_type == 'leak':
+ pipe_A_name, pipe_B_name, orginal_pipe_name = ( # noqa: F841, N806
+ self._registry.getLeakData(cur_damage_node_name)
+ )
- elif cur_damage_type=='break':
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806, F841
+
+ elif cur_damage_type == 'break':
pass
-
+
else:
- raise ValueError('Unrecognozed damaged type: '+ cur_damage_type)
-
- self._registry.addFunctionDataToRestorationRegistry(cur_damage_node_name, history, 'bypass')
-
- #local reconnection, for instance, for fire truck reconnection
- def reconnectPipe(self, damage_node_name, middle_pipe_size, damage_type, wn):
- history=OrderedDict()
-
- if damage_type=='leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(damage_node_name)
-
- pipe_A = wn.get_link(pipe_A_name)
- pipe_B = wn.get_link(pipe_B_name)
+ raise ValueError('Unrecognozed damaged type: ' + cur_damage_type)
+
+ self._registry.addFunctionDataToRestorationRegistry(
+ cur_damage_node_name, history, 'bypass'
+ )
+
+ # local reconnection, for instance, for fire truck reconnection
+ def reconnectPipe(self, damage_node_name, middle_pipe_size, damage_type, wn): # noqa: N802, D102
+ history = OrderedDict()
+
+ if damage_type == 'leak':
+ pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData( # noqa: N806
+ damage_node_name
+ )
+
+ pipe_A = wn.get_link(pipe_A_name) # noqa: N806
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
if pipe_A.status == 1:
history['NON_COL_PIPE_CLOSED_FROM_OPEN'] = pipe_A_name
- elif pipe_A.status == 3:
- history['NON_COL_PIPE_CLOSED_FROM_CV'] = pipe_A_name
- pipe_A.initial_status=LinkStatus(0)
-
- if middle_pipe_size==None:
- middle_pipe_size=pipe_A.diameter
-
- beg_node_of_pipe_A = pipe_A.start_node
- end_node_of_pipe_B = pipe_B.end_node
+ elif pipe_A.status == 3: # noqa: PLR2004
+ history['NON_COL_PIPE_CLOSED_FROM_CV'] = pipe_A_name
+ pipe_A.initial_status = LinkStatus(0)
+
+ if middle_pipe_size == None: # noqa: E711
+ middle_pipe_size = pipe_A.diameter
+
+ beg_node_of_pipe_A = pipe_A.start_node # noqa: N806
+ end_node_of_pipe_B = pipe_B.end_node # noqa: N806
new_length = pipe_A.length + pipe_B.length
-
- #For the sake of multiple damages in one pipe the following line is marked the the line after it is added
- new_pipe_name = pipe_B_name+'-Red'
-
- wn.add_pipe(new_pipe_name, beg_node_of_pipe_A.name, end_node_of_pipe_B.name, length=new_length, diameter=middle_pipe_size, roughness=pipe_A.roughness)
-
- history['NON_COL_ADDED_PIPE']=new_pipe_name
-
- elif damage_type=='break':
- pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = self._registry.getBreakData(damage_node_name)
-
- pipe_A = wn.get_link(pipe_A_name)
- pipe_B = wn.get_link(pipe_B_name)
-
- if middle_pipe_size==None:
- middle_pipe_size=pipe_A.diameter
-
- beg_node_of_pipe_A = pipe_A.start_node
- end_node_of_pipe_B = pipe_B.end_node
+
+ # For the sake of multiple damages in one pipe the following line is marked the the line after it is added
+ new_pipe_name = pipe_B_name + '-Red'
+
+ wn.add_pipe(
+ new_pipe_name,
+ beg_node_of_pipe_A.name,
+ end_node_of_pipe_B.name,
+ length=new_length,
+ diameter=middle_pipe_size,
+ roughness=pipe_A.roughness,
+ )
+
+ history['NON_COL_ADDED_PIPE'] = new_pipe_name
+
+ elif damage_type == 'break':
+ pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = ( # noqa: F841, N806
+ self._registry.getBreakData(damage_node_name)
+ )
+
+ pipe_A = wn.get_link(pipe_A_name) # noqa: N806
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
+ if middle_pipe_size == None: # noqa: E711
+ middle_pipe_size = pipe_A.diameter
+
+ beg_node_of_pipe_A = pipe_A.start_node # noqa: N806
+ end_node_of_pipe_B = pipe_B.end_node # noqa: N806
new_length = pipe_A.length + pipe_B.length
-
- #For the sake of multiple damages in one pipe the following line is marked the the line after it is added
- new_pipe_name = pipe_B_name+'-Red'
-
- wn.add_pipe(new_pipe_name, beg_node_of_pipe_A.name, end_node_of_pipe_B.name, length=new_length, diameter=middle_pipe_size, roughness=pipe_A.roughness)
- history['NON_COL_ADDED_PIPE']=new_pipe_name
-
+
+ # For the sake of multiple damages in one pipe the following line is marked the the line after it is added
+ new_pipe_name = pipe_B_name + '-Red'
+
+ wn.add_pipe(
+ new_pipe_name,
+ beg_node_of_pipe_A.name,
+ end_node_of_pipe_B.name,
+ length=new_length,
+ diameter=middle_pipe_size,
+ roughness=pipe_A.roughness,
+ )
+ history['NON_COL_ADDED_PIPE'] = new_pipe_name
+
else:
- raise ValueError('Unrecognozed damaged type: '+ damage_type)
-
- self._registry.addFunctionDataToRestorationRegistry(damage_node_name, history, 'reconnect')
-
- def removeLeak(self, damage_node_name, damage_type, wn, factor=1):
+ raise ValueError('Unrecognozed damaged type: ' + damage_type)
+
+ self._registry.addFunctionDataToRestorationRegistry(
+ damage_node_name, history, 'reconnect'
+ )
+
+ def removeLeak(self, damage_node_name, damage_type, wn, factor=1): # noqa: C901, N802, D102
history = OrderedDict()
-
+
opening = 1 - factor
-
+
damage_data = self._registry.getDamageData('pipe', iCopy=False)
- orginal_pipe_name = damage_data.loc[damage_node_name,'Orginal_element']
- refined_damage_data = damage_data[damage_data['Orginal_element']==orginal_pipe_name]
-
+ orginal_pipe_name = damage_data.loc[damage_node_name, 'Orginal_element']
+ refined_damage_data = damage_data[
+ damage_data['Orginal_element'] == orginal_pipe_name
+ ]
+
damaged_node_name_list_on_orginal_pipe = refined_damage_data.index.to_list()
damage_type_list = refined_damage_data['damage_type'].to_dict()
-
+
for cur_damage_node_name in damaged_node_name_list_on_orginal_pipe:
cur_damage_type = damage_type_list[cur_damage_node_name]
-
+
if cur_damage_type == 'leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(cur_damage_node_name)
-
- node_A = wn.get_node(cur_damage_node_name)
-
+ pipe_A_name, pipe_B_name, orginal_pipe_name = ( # noqa: N806
+ self._registry.getLeakData(cur_damage_node_name)
+ )
+
+ node_A = wn.get_node(cur_damage_node_name) # noqa: N806
+
if pipe_B_name in wn.pipe_name_list:
- pipe_B = wn.get_link(pipe_B_name)
-
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
if pipe_B.status == 1:
- history['PIPE_CLOSED_FROM_OPEN']=pipe_B_name
- elif pipe_B.status == 3:
- history['PIPE_CLOSED_FROM_CV']=pipe_B_name
-
+ history['PIPE_CLOSED_FROM_OPEN'] = pipe_B_name
+ elif pipe_B.status == 3: # noqa: PLR2004
+ history['PIPE_CLOSED_FROM_CV'] = pipe_B_name
+
pipe_B.initial_status = LinkStatus(0)
- history['NODE_A_DEMAND_BEFORE'] = node_A._leak_area
- node_A_leak_area = opening * node_A._leak_area
+ history['NODE_A_DEMAND_BEFORE'] = node_A._leak_area # noqa: SLF001
+ node_A_leak_area = opening * node_A._leak_area # noqa: SLF001, N806
node_A.add_leak(wn, node_A_leak_area, discharge_coeff=1)
- history['NODE_A_DEMAND_AFTER'] = node_A._leak_area
-
- if abs(opening) < 0.001:
+ history['NODE_A_DEMAND_AFTER'] = node_A._leak_area # noqa: SLF001
+
+ if abs(opening) < 0.001: # noqa: PLR2004
node_A.remove_leak(wn)
history['NODE_A'] = 'REMOVED'
else:
history['NODE_A'] = 'REDUCED'
-
+
elif cur_damage_type == 'break':
- pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = self._registry.getBreakData(cur_damage_node_name)
+ (
+ pipe_A_name, # noqa: F841, N806
+ pipe_B_name, # noqa: N806
+ orginal_pipe_name,
+ node_A_name, # noqa: N806
+ node_B_name, # noqa: N806
+ ) = self._registry.getBreakData(cur_damage_node_name)
if cur_damage_node_name != node_A_name:
- raise ValueError("Cur damage and pipe_name are not the same: " +repr(cur_damage_node_name) +" - "+repr(node_A_name))
-
- node_A = wn.get_node(cur_damage_node_name)
-
- history['NODE_A_DEMAND_BEFORE'] = node_A._leak_area
- node_A_leak_area = opening * node_A._leak_area
+ raise ValueError(
+ 'Cur damage and pipe_name are not the same: '
+ + repr(cur_damage_node_name)
+ + ' - '
+ + repr(node_A_name)
+ )
+
+ node_A = wn.get_node(cur_damage_node_name) # noqa: N806
+
+ history['NODE_A_DEMAND_BEFORE'] = node_A._leak_area # noqa: SLF001
+ node_A_leak_area = opening * node_A._leak_area # noqa: SLF001, N806
node_A.add_leak(wn, node_A_leak_area, discharge_coeff=1)
-
- history['NODE_A_DEMAND_AFTER'] = node_A._leak_area
-
- if abs(opening) < 0.001:
+
+ history['NODE_A_DEMAND_AFTER'] = node_A._leak_area # noqa: SLF001
+
+ if abs(opening) < 0.001: # noqa: PLR2004
node_A.remove_leak(wn)
- node_A._leak_area = 0
+ node_A._leak_area = 0 # noqa: SLF001
history['NODE_A'] = 'REMOVED'
else:
history['NODE_A'] = 'REDUCED'
- node_B = wn.get_node(node_B_name)
-
- history['NODE_B_DEMAND_BEFORE']=node_B._leak_area
- node_B_leak_area = opening * node_B._leak_area
+ node_B = wn.get_node(node_B_name) # noqa: N806
+
+ history['NODE_B_DEMAND_BEFORE'] = node_B._leak_area # noqa: SLF001
+ node_B_leak_area = opening * node_B._leak_area # noqa: SLF001, N806
node_B.add_leak(wn, node_B_leak_area, discharge_coeff=1)
- history['NODE_B_DEMAND_AFTER']=node_B._leak_area
-
- if abs(opening) < 0.001:
+ history['NODE_B_DEMAND_AFTER'] = node_B._leak_area # noqa: SLF001
+
+ if abs(opening) < 0.001: # noqa: PLR2004
node_B.remove_leak(wn)
- node_B._leak_area = 0
+ node_B._leak_area = 0 # noqa: SLF001
history['NODE_B'] = 'REMOVED'
else:
history['NODE_B'] = 'REDUCED'
-
+
else:
- raise ValueError('Unknown Damage type:'+repr(damage_type))
-
- self._registry.addFunctionDataToRestorationRegistry(damage_node_name, history, 'removeLeak')
-
- def addReservoir(self, damage_node_name, damage_type, _type, pump, wn):
+ raise ValueError('Unknown Damage type:' + repr(damage_type))
+
+ self._registry.addFunctionDataToRestorationRegistry(
+ damage_node_name, history, 'removeLeak'
+ )
+
+ def addReservoir(self, damage_node_name, damage_type, _type, pump, wn): # noqa: C901, N802, D102
history = OrderedDict()
- if damage_type=='leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(damage_node_name)
- elif damage_type=='break':
- pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = self._registry.getBreakData(damage_node_name)
+ if damage_type == 'leak':
+ pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData( # noqa: N806
+ damage_node_name
+ )
+ elif damage_type == 'break':
+ pipe_A_name, pipe_B_name, orginal_pipe_name, node_A_name, node_B_name = ( # noqa: F841, N806
+ self._registry.getBreakData(damage_node_name)
+ )
else:
- raise ValueError('Unknown damage type in '+damage_node_name+', '+damage_type)
-
- pipe_A = wn.get_link(pipe_A_name)
- pipe_B = wn.get_link(pipe_B_name)
- first_node_pipe_A = pipe_A.start_node
- second_node_pipe_B = pipe_B.end_node
-
- _coord_A = (first_node_pipe_A.coordinates[0]+10, first_node_pipe_A.coordinates[1]+10)
- new_reservoir_A = first_node_pipe_A.name + '-added'
- wn.add_reservoir(new_reservoir_A , base_head = first_node_pipe_A.elevation, coordinates=_coord_A)
-
- _coord_B = (second_node_pipe_B.coordinates[0]+10, second_node_pipe_B.coordinates[1]+10)
- new_reservoir_B = second_node_pipe_B.name + '-added'
- wn.add_reservoir(new_reservoir_B, base_head = second_node_pipe_B.elevation, coordinates=_coord_B)
+ raise ValueError(
+ 'Unknown damage type in ' + damage_node_name + ', ' + damage_type
+ )
+
+ pipe_A = wn.get_link(pipe_A_name) # noqa: N806
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+ first_node_pipe_A = pipe_A.start_node # noqa: N806
+ second_node_pipe_B = pipe_B.end_node # noqa: N806
+
+ _coord_A = ( # noqa: N806
+ first_node_pipe_A.coordinates[0] + 10,
+ first_node_pipe_A.coordinates[1] + 10,
+ )
+ new_reservoir_A = first_node_pipe_A.name + '-added' # noqa: N806
+ wn.add_reservoir(
+ new_reservoir_A,
+ base_head=first_node_pipe_A.elevation,
+ coordinates=_coord_A,
+ )
+
+ _coord_B = ( # noqa: N806
+ second_node_pipe_B.coordinates[0] + 10,
+ second_node_pipe_B.coordinates[1] + 10,
+ )
+ new_reservoir_B = second_node_pipe_B.name + '-added' # noqa: N806
+ wn.add_reservoir(
+ new_reservoir_B,
+ base_head=second_node_pipe_B.elevation,
+ coordinates=_coord_B,
+ )
history['ADDED_RESERVOIR_A'] = new_reservoir_A
history['ADDED_RESERVOIR_B'] = new_reservoir_B
-
- if _type==None:
- _pipe_size = pipe_A.diameter
+
+ if _type == None: # noqa: E711
+ _pipe_size = pipe_A.diameter
new_pipe_name_1 = damage_node_name + '-lK1'
new_pipe_name_2 = damage_node_name + '-lK2'
- wn.add_pipe(new_pipe_name_1, new_reservoir_A, first_node_pipe_A.name, diameter = _pipe_size, length=5, check_valve=True)
- history['ADDED_PIPE_A'] = new_pipe_name_1 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- wn.add_pipe(new_pipe_name_2, new_reservoir_B, second_node_pipe_B.name, diameter = _pipe_size, length=5, check_valve=True)
- history['ADDED_PIPE_B'] = new_pipe_name_2 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
-
- elif _type=='PUMP':
+ wn.add_pipe(
+ new_pipe_name_1,
+ new_reservoir_A,
+ first_node_pipe_A.name,
+ diameter=_pipe_size,
+ length=5,
+ check_valve=True,
+ )
+ history['ADDED_PIPE_A'] = (
+ new_pipe_name_1 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ wn.add_pipe(
+ new_pipe_name_2,
+ new_reservoir_B,
+ second_node_pipe_B.name,
+ diameter=_pipe_size,
+ length=5,
+ check_valve=True,
+ )
+ history['ADDED_PIPE_B'] = (
+ new_pipe_name_2 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+
+ elif _type == 'PUMP':
if 'POWER' in pump:
_power = pump['POWER']
new_pump_name_1 = damage_node_name + '-RP1'
new_pump_name_2 = damage_node_name + '-RP2'
- wn.add_pump(new_pump_name_1, new_reservoir_A, first_node_pipe_A.name, pump_parameter = _power)
- wn.add_pump(new_pump_name_2, new_reservoir_B, second_node_pipe_B.name, pump_parameter = _power)
- history['ADDED_PUMP_A'] = new_pump_name_1 #ٌIt Added Pumps is collective now. Won;t be removed till all damaegs in the pipe is removed
- history['ADDED_PUMP_B'] = new_pump_name_2 #ٌIt Added Pumps is collective now. Won;t be removed till all damaegs in the pipe is removed
+ wn.add_pump(
+ new_pump_name_1,
+ new_reservoir_A,
+ first_node_pipe_A.name,
+ pump_parameter=_power,
+ )
+ wn.add_pump(
+ new_pump_name_2,
+ new_reservoir_B,
+ second_node_pipe_B.name,
+ pump_parameter=_power,
+ )
+ history['ADDED_PUMP_A'] = (
+ new_pump_name_1 # ٌIt Added Pumps is collective now. Won;t be removed till all damaegs in the pipe is removed
+ )
+ history['ADDED_PUMP_B'] = (
+ new_pump_name_2 # ٌIt Added Pumps is collective now. Won;t be removed till all damaegs in the pipe is removed
+ )
else:
- raise ValueError('Invalid Pump Type: '+repr(pump.keys()))
+ raise ValueError('Invalid Pump Type: ' + repr(pump.keys()))
elif _type == 'ADDEDELEVATION':
- _pipe_size = pipe_A.diameter
+ _pipe_size = pipe_A.diameter
new_pipe_name_1 = damage_node_name + '-RP1'
new_pipe_name_2 = damage_node_name + '-RP2'
-
+
new_valve_name_1 = damage_node_name + '-RV1'
new_valve_name_2 = damage_node_name + '-RV2'
-
- new_RP_middle_name1 = damage_node_name + '-mn1'
- new_RP_middle_name2 = damage_node_name + '-mn2'
-
- coord1 = (first_node_pipe_A.coordinates[0]+5 , first_node_pipe_A.coordinates[1]+5 )
- coord2 = (second_node_pipe_B.coordinates[0]+5, second_node_pipe_B.coordinates[1]+5)
-
+
+ new_RP_middle_name1 = damage_node_name + '-mn1' # noqa: N806
+ new_RP_middle_name2 = damage_node_name + '-mn2' # noqa: N806
+
+ coord1 = (
+ first_node_pipe_A.coordinates[0] + 5,
+ first_node_pipe_A.coordinates[1] + 5,
+ )
+ coord2 = (
+ second_node_pipe_B.coordinates[0] + 5,
+ second_node_pipe_B.coordinates[1] + 5,
+ )
+
elavation1 = first_node_pipe_A.elevation
elavation2 = second_node_pipe_B.elevation
-
- wn.add_junction(new_RP_middle_name1, elevation=elavation1, coordinates= coord1)
- wn.add_junction(new_RP_middle_name2, elevation=elavation2, coordinates= coord2)
-
- wn.add_pipe(new_pipe_name_1, new_reservoir_A, new_RP_middle_name1, diameter = _pipe_size, length = 1, roughness =100000000, minor_loss = 7, check_valve = True)
- wn.add_pipe(new_pipe_name_2, new_reservoir_B, new_RP_middle_name2, diameter = _pipe_size, length = 1, roughness =100000000, minor_loss = 7, check_valve = True)
-
- wn.add_valve(new_valve_name_1, new_RP_middle_name1, first_node_pipe_A.name, valve_type = 'FCV', setting=0.2500)
- wn.add_valve(new_valve_name_2, new_RP_middle_name2, second_node_pipe_B.name, valve_type = 'FCV', setting=0.2500)
-
- res_A = wn.get_node(new_reservoir_A)
- res_B = wn.get_node(new_reservoir_B)
-
- res_A.base_head = res_A.base_head + 20
- res_B.base_head = res_B.base_head + 20
-
- history['MIDDLE_NODE_A'] = new_RP_middle_name1 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- history['MIDDLE_NODE_B'] = new_RP_middle_name2 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- history['ADDED_PIPE_A'] = new_pipe_name_1 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- history['ADDED_PIPE_B'] = new_pipe_name_2 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- history['ADDED_PIPE_C'] = new_valve_name_1 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
- history['ADDED_PIPE_D'] = new_valve_name_2 #ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
-
+
+ wn.add_junction(
+ new_RP_middle_name1, elevation=elavation1, coordinates=coord1
+ )
+ wn.add_junction(
+ new_RP_middle_name2, elevation=elavation2, coordinates=coord2
+ )
+
+ wn.add_pipe(
+ new_pipe_name_1,
+ new_reservoir_A,
+ new_RP_middle_name1,
+ diameter=_pipe_size,
+ length=1,
+ roughness=100000000,
+ minor_loss=7,
+ check_valve=True,
+ )
+ wn.add_pipe(
+ new_pipe_name_2,
+ new_reservoir_B,
+ new_RP_middle_name2,
+ diameter=_pipe_size,
+ length=1,
+ roughness=100000000,
+ minor_loss=7,
+ check_valve=True,
+ )
+
+ wn.add_valve(
+ new_valve_name_1,
+ new_RP_middle_name1,
+ first_node_pipe_A.name,
+ valve_type='FCV',
+ setting=0.2500,
+ )
+ wn.add_valve(
+ new_valve_name_2,
+ new_RP_middle_name2,
+ second_node_pipe_B.name,
+ valve_type='FCV',
+ setting=0.2500,
+ )
+
+ res_A = wn.get_node(new_reservoir_A) # noqa: N806
+ res_B = wn.get_node(new_reservoir_B) # noqa: N806
+
+ res_A.base_head = res_A.base_head + 20 # noqa: PLR6104
+ res_B.base_head = res_B.base_head + 20 # noqa: PLR6104
+
+ history['MIDDLE_NODE_A'] = (
+ new_RP_middle_name1 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ history['MIDDLE_NODE_B'] = (
+ new_RP_middle_name2 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ history['ADDED_PIPE_A'] = (
+ new_pipe_name_1 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ history['ADDED_PIPE_B'] = (
+ new_pipe_name_2 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ history['ADDED_PIPE_C'] = (
+ new_valve_name_1 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+ history['ADDED_PIPE_D'] = (
+ new_valve_name_2 # ٌIt Added Pipe is collective now. Won't be removed till all damaegs in the pipe is removed
+ )
+
else:
- raise ValueError('Unknown Reservoir type')
-
- damage_data = self._registry.getDamageData('pipe' ,iCopy=False)
- redefined_damage_data = damage_data[damage_data['Orginal_element']==orginal_pipe_name]
-
+ raise ValueError('Unknown Reservoir type') # noqa: EM101, TRY003
+
+ damage_data = self._registry.getDamageData('pipe', iCopy=False)
+ redefined_damage_data = damage_data[
+ damage_data['Orginal_element'] == orginal_pipe_name
+ ]
+
for cur_damage_node_name, cur_damage in redefined_damage_data.iterrows():
cur_damage_type = cur_damage['damage_type']
- if cur_damage_type=='leak':
- pipe_A_name, pipe_B_name, orginal_pipe_name = self._registry.getLeakData(cur_damage_node_name)
-
- pipe_B = wn.get_link(pipe_B_name)
-
+ if cur_damage_type == 'leak':
+ pipe_A_name, pipe_B_name, orginal_pipe_name = ( # noqa: N806
+ self._registry.getLeakData(cur_damage_node_name)
+ )
+
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
if pipe_B.status == 1:
history['PIPE_CLOSED_FROM_OPEN'] = pipe_B_name
- elif pipe_B.status == 3:
- history['PIPE_CLOSED_FROM_CV'] = pipe_B_name
-
+ elif pipe_B.status == 3: # noqa: PLR2004
+ history['PIPE_CLOSED_FROM_CV'] = pipe_B_name
+
pipe_B.initial_status = LinkStatus(0)
elif cur_damage_type == 'break':
pass
else:
- raise ValueError('Unrecognozed damaged type: '+ cur_damage_type)
-
- self._registry.addFunctionDataToRestorationRegistry(damage_node_name, history, 'addReservoir')
-
- def removeDemand(self, node_name, factor, wn):
- history=OrderedDict()
-
+ raise ValueError('Unrecognozed damaged type: ' + cur_damage_type)
+
+ self._registry.addFunctionDataToRestorationRegistry(
+ damage_node_name, history, 'addReservoir'
+ )
+
+ def removeDemand(self, node_name, factor, wn): # noqa: N802, D102
+ history = OrderedDict()
+
if factor < 0 or factor > 1:
- raise ValueError('In node '+node_name+' factor is not valid: '+repr(factor))
-
- demand_after_removal_factor = (1-factor)
+ raise ValueError(
+ 'In node ' + node_name + ' factor is not valid: ' + repr(factor)
+ )
+
+ demand_after_removal_factor = 1 - factor
node = wn.get_node(node_name)
cur_demand = node.demand_timeseries_list[0].base_value
- #self._registry.getDamageData('DISTNODE', iCopy=False).loc[node_name,'Demand1'] = cur_demand
+ # self._registry.getDamageData('DISTNODE', iCopy=False).loc[node_name,'Demand1'] = cur_demand
history['NODE_DEMAND_BEFORE'] = cur_demand
- if abs(cur_demand)<0:
- return ValueError('Node '+repr(node_name)+' is has zerovalue: '+repr(cur_demand))
-
+ if abs(cur_demand) < 0:
+ return ValueError(
+ 'Node ' + repr(node_name) + ' is has zerovalue: ' + repr(cur_demand)
+ )
+
new_demand = demand_after_removal_factor * cur_demand
node.demand_timeseries_list[0].base_value = new_demand
history['NODE_DEMAND_AFTER'] = new_demand
-
- self._registry.addFunctionDataToRestorationRegistry(node_name, history, 'removeDemand')
-
- def removeExplicitNodalLeak(self, node_name, factor, wn):
+
+ self._registry.addFunctionDataToRestorationRegistry( # noqa: RET503
+ node_name, history, 'removeDemand'
+ )
+
+ def removeExplicitNodalLeak(self, node_name, factor, wn): # noqa: N802, D102
history = OrderedDict()
damage_data = self._registry.getEquavalantDamageHistory(node_name)
pipe_name = damage_data['new_pipe_name']
-
+
current_number_of_damages = damage_data['current_number_of_damage']
- if factor ==1:
+ if factor == 1:
pipe = wn.get_link(pipe_name)
pipe.cv = False
pipe.initial_status = LinkStatus(0)
@@ -363,262 +581,356 @@ def removeExplicitNodalLeak(self, node_name, factor, wn):
ned = self._registry.nodal_equavalant_diameter
pipe = wn.get_link(pipe_name)
diameter = pipe.diameter
- diameter = (factor**0.5)*current_number_of_damages*ned
+ diameter = (factor**0.5) * current_number_of_damages * ned
history['EXPLICIT_PIPE_DIAMETER_CHANAGED'] = diameter
pipe.diameter = diameter
-
- self._registry.addFunctionDataToRestorationRegistry(node_name, history, 'removeExplicitLeak')
-
- def removeNodeTemporaryRepair(self, damage_node_name, wn):
+
+ self._registry.addFunctionDataToRestorationRegistry(
+ node_name, history, 'removeExplicitLeak'
+ )
+
+ def removeNodeTemporaryRepair(self, damage_node_name, wn): # noqa: N802, D102
if_damage_removed = False
-
- restoration_table = self._registry._restoration_table
- selected_restoration_table = restoration_table[restoration_table['node_name']==damage_node_name]
-
- for ind, rec_id in selected_restoration_table.record_index.items():
- change_list = self._registry._record_registry[rec_id]
-
- for change, name in ((k, change_list[k]) for k in reversed(change_list)):
-
- if 'removeExplicitLeak' == change:
+
+ restoration_table = self._registry._restoration_table # noqa: SLF001
+ selected_restoration_table = restoration_table[
+ restoration_table['node_name'] == damage_node_name
+ ]
+
+ for ind, rec_id in selected_restoration_table.record_index.items(): # noqa: B007, PERF102
+ change_list = self._registry._record_registry[rec_id] # noqa: SLF001
+
+ for change, name in ((k, change_list[k]) for k in reversed(change_list)): # noqa: B007
+ if change == 'removeExplicitLeak':
pass
- elif 'NODE_DEMAND_AFTER' == change or 'NODE_DEMAND_BEFORE' == change:
- if self._registry.settings['damage_node_model'] == 'Predefined_demand':
+ elif change == 'NODE_DEMAND_AFTER' or change == 'NODE_DEMAND_BEFORE': # noqa: PLR1714
+ if (
+ self._registry.settings['damage_node_model']
+ == 'Predefined_demand'
+ ):
self.repair.reduceDemand()
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_emitter':
- self.restoreDistributionOrginalDemand(damage_node_name, wn)
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_reservoir':
+ elif (
+ self._registry.settings['damage_node_model']
+ == 'equal_diameter_emitter'
+ ) or (
+ self._registry.settings['damage_node_model']
+ == 'equal_diameter_reservoir'
+ ):
self.restoreDistributionOrginalDemand(damage_node_name, wn)
else:
- raise ValueError("unknow method")
-
-
- if if_damage_removed == False:
+ raise ValueError('unknow method') # noqa: EM101, TRY003
+
+ if if_damage_removed == False: # noqa: E712
self.removeDISTNodeExplicitLeak(damage_node_name, wn)
-
- def removePipeRepair(self, damaged_node_name, wn, action):
- restoration_table = self._registry._restoration_table
- selected_restoration_table = restoration_table[restoration_table['node_name']==damaged_node_name]
-
- for ind, rec_id in selected_restoration_table.record_index.items():
- change_list = self._registry._record_registry[rec_id]
-
- to_pop_list=[]
-
-
+
+ def removePipeRepair(self, damaged_node_name, wn, action): # noqa: C901, N802, D102
+ restoration_table = self._registry._restoration_table # noqa: SLF001
+ selected_restoration_table = restoration_table[
+ restoration_table['node_name'] == damaged_node_name
+ ]
+
+ for ind, rec_id in selected_restoration_table.record_index.items(): # noqa: PLR1702
+ change_list = self._registry._record_registry[rec_id] # noqa: SLF001
+
+ to_pop_list = []
+
for change, name in ((k, change_list[k]) for k in reversed(change_list)):
- flag=True
- if 'ADDED_PIPE' == change or 'ADDED_PUMP' == change:
+ flag = True
+ if change == 'ADDED_PIPE' or change == 'ADDED_PUMP': # noqa: PLR1714
wn.remove_link(name)
-
+
i_link_collective = False
i_node_collective = False
if change in LINK_TYPE_COLLECTIVES:
i_link_collective = True
if change in NODE_TYPE_COLLECTIVES:
i_node_collective = True
-
+
if i_link_collective or i_node_collective:
- damage_data = self._registry.getDamageData('pipe', iCopy=False)
- orginal_pipe_name = damage_data.loc[damaged_node_name, 'Orginal_element']
- refined_damage_data = damage_data[(damage_data['Orginal_element']==orginal_pipe_name) & (damage_data['discovered']==True)]
- if (refined_damage_data[action]==True).all():
-
+ damage_data = self._registry.getDamageData('pipe', iCopy=False)
+ orginal_pipe_name = damage_data.loc[
+ damaged_node_name, 'Orginal_element'
+ ]
+ refined_damage_data = damage_data[
+ (damage_data['Orginal_element'] == orginal_pipe_name)
+ & (damage_data['discovered'] == True) # noqa: E712
+ ]
+ if (refined_damage_data[action] == True).all(): # noqa: E712
if i_link_collective:
- if change == 'BYPASS_PIPE':
- wn.remove_link(name)
- elif change == 'ADDED_PIPE_A':
- wn.remove_link(name)
- elif change == 'ADDED_PIPE_B':
- wn.remove_link(name)
- elif change == 'ADDED_PIPE_C':
- wn.remove_link(name)
- elif change == 'ADDED_PIPE_D':
- wn.remove_link(name)
- elif change == 'ADDED_PUMP_A':
- wn.remove_link(name)
- elif change == 'ADDED_PUMP_B':
+ if (
+ change == 'BYPASS_PIPE' # noqa: PLR0916, PLR1714
+ or change == 'ADDED_PIPE_A'
+ or (
+ change == 'ADDED_PIPE_B' # noqa: PLR1714
+ or change == 'ADDED_PIPE_C'
+ )
+ or (
+ change == 'ADDED_PIPE_D' # noqa: PLR1714
+ or change == 'ADDED_PUMP_A'
+ or change == 'ADDED_PUMP_B'
+ )
+ ):
wn.remove_link(name)
elif change == 'PIPE_CLOSED_FROM_OPEN':
if name in wn.pipe_name_list:
- wn.get_link(name).initial_status=LinkStatus(1)
+ wn.get_link(name).initial_status = LinkStatus(1)
elif change == 'PIPE_CLOSED_FROM_CV':
if name in wn.pipe_name_list:
- wn.get_link(name).initial_status=LinkStatus(3)
+ wn.get_link(name).initial_status = LinkStatus(3)
else:
- raise ValueError('Unknown change indicator in restoration registry: '+repr(change))
-
+ raise ValueError(
+ 'Unknown change indicator in restoration registry: '
+ + repr(change)
+ )
+
elif i_node_collective:
wn.remove_node(name)
else:
- raise ValueError('Unknown change indicator in restoration registry: '+repr(change))
-
+ raise ValueError(
+ 'Unknown change indicator in restoration registry: '
+ + repr(change)
+ )
+
elif change in NON_COLLECTIVES:
-
if change == 'NON_COL_ADDED_PIPE':
wn.remove_link(name)
elif change == 'NON_COL_PIPE_CLOSED_FROM_OPEN':
- wn.get_link(name).initial_status=LinkStatus(1)
+ wn.get_link(name).initial_status = LinkStatus(1)
elif change in NC_FALSE_FLAG:
- flag=False
+ flag = False
else:
- raise ValueError('Unknown change indicator in restoration registry: '+repr(change))
-
+ raise ValueError(
+ 'Unknown change indicator in restoration registry: '
+ + repr(change)
+ )
+
else:
- raise ValueError('Unknown change indicator in restoration registry: '+repr(change))
-
+ raise ValueError(
+ 'Unknown change indicator in restoration registry: '
+ + repr(change)
+ )
+
if flag:
to_pop_list.append(change)
-
+
for pop_key in to_pop_list:
change_list.pop(pop_key)
-
- if len(change_list)==0:
- restoration_table.drop(ind, inplace=True)
-
-
- def repairPipe(self, damage_node_name, damage_type, wn):
- if damage_type=='leak':
-
- pipe_A_name, pipe_B_name = self._registry.getCertainLeakData(damage_node_name, wn)
-
- pipe_A = wn.get_link(pipe_A_name)
- pipe_B = wn.get_link(pipe_B_name)
-
- end_node_of_pipe_B = pipe_B.end_node
+
+ if len(change_list) == 0:
+ restoration_table.drop(ind, inplace=True) # noqa: PD002
+
+ def repairPipe(self, damage_node_name, damage_type, wn): # noqa: N802, D102
+ if damage_type == 'leak':
+ pipe_A_name, pipe_B_name = self._registry.getCertainLeakData( # noqa: N806
+ damage_node_name, wn
+ )
+
+ pipe_A = wn.get_link(pipe_A_name) # noqa: N806
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
+ end_node_of_pipe_B = pipe_B.end_node # noqa: N806
new_length = pipe_A.length + pipe_B.length
-
- pipe_A.length = new_length
+
+ pipe_A.length = new_length
pipe_A.end_node = end_node_of_pipe_B
-
+
wn.remove_link(pipe_B_name)
- wn.remove_node(damage_node_name,with_control=True)
-
-
- elif damage_type=='break':
- pipe_A_name, pipe_B_name, node_A_name, node_B_name = self._registry.getCertainBreakData(damage_node_name, wn)
-
- pipe_A = wn.get_link(pipe_A_name)
- pipe_B = wn.get_link(pipe_B_name)
-
- end_node_of_pipe_B = pipe_B.end_node
+ wn.remove_node(damage_node_name, with_control=True)
+
+ elif damage_type == 'break':
+ pipe_A_name, pipe_B_name, node_A_name, node_B_name = ( # noqa: N806
+ self._registry.getCertainBreakData(damage_node_name, wn)
+ )
+
+ pipe_A = wn.get_link(pipe_A_name) # noqa: N806
+ pipe_B = wn.get_link(pipe_B_name) # noqa: N806
+
+ end_node_of_pipe_B = pipe_B.end_node # noqa: N806
new_length = pipe_A.length + pipe_B.length
-
- pipe_A.length = new_length
+
+ pipe_A.length = new_length
pipe_A.end_node = end_node_of_pipe_B
-
+
wn.remove_link(pipe_B_name)
- wn.remove_node(node_A_name,with_control=True)
- wn.remove_node(node_B_name,with_control=True)
-
- def restorePumps(self, pump_name_list, wn):
+ wn.remove_node(node_A_name, with_control=True)
+ wn.remove_node(node_B_name, with_control=True)
+
+ def restorePumps(self, pump_name_list, wn): # noqa: D102, N802, PLR6301
for pump_name in pump_name_list:
- wn.get_link(pump_name).initial_status=LinkStatus(1)
-
- def restoreTanks(self, tank_name_list, wn):
+ wn.get_link(pump_name).initial_status = LinkStatus(1)
+
+ def restoreTanks(self, tank_name_list, wn): # noqa: D102, N802, PLR6301
for tank_name in tank_name_list:
- made_up_mid_node_name = tank_name+'_tank_mid'
- made_up_pipe_name = tank_name+'_tank_mid_pipe'
-
+ made_up_mid_node_name = tank_name + '_tank_mid'
+ made_up_pipe_name = tank_name + '_tank_mid_pipe'
+
wn.remove_link(made_up_pipe_name)
-
- link_name_list_connected_to_node = wn.get_links_for_node(made_up_mid_node_name)
-
+
+ link_name_list_connected_to_node = wn.get_links_for_node(
+ made_up_mid_node_name
+ )
+
tank_node = wn.get_node(tank_name)
for link_name in link_name_list_connected_to_node:
-
link = wn.get_link(link_name)
if made_up_mid_node_name == link.start_node.name:
link.start_node = tank_node
elif made_up_mid_node_name == link.end_node.name:
link.end_node = tank_node
-
- wn.remove_node(made_up_mid_node_name,with_control=True)
-
-
-
-
- def removeDISTNodeIsolation(self, damaged_node_name, wn):
- post_incident_node_demand = self._registry.getDamageData('DISTNODE').loc[damaged_node_name,'Demand2']
-
+
+ wn.remove_node(made_up_mid_node_name, with_control=True)
+
+ def removeDISTNodeIsolation(self, damaged_node_name, wn): # noqa: N802, D102
+ post_incident_node_demand = self._registry.getDamageData('DISTNODE').loc[
+ damaged_node_name, 'Demand2'
+ ]
+
node = wn.get_node(damaged_node_name)
node.demand_timeseries_list[0].base_value = post_incident_node_demand
-
- def restoreDistributionOrginalDemand(self, damaged_node_name, wn):
+
+ def restoreDistributionOrginalDemand(self, damaged_node_name, wn): # noqa: N802, D102
if self._registry.settings['damage_node_model'] == 'Predefined_demand':
- pre_incident_node_demand = self._registry.getDamageData('DISTNODE', iCopy=False).loc[damaged_node_name,'Demand1']
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_emitter' or self._registry.settings['damage_node_model'] == 'equal_diameter_reservoir':
+ pre_incident_node_demand = self._registry.getDamageData(
+ 'DISTNODE', iCopy=False
+ ).loc[damaged_node_name, 'Demand1']
+ elif (
+ self._registry.settings['damage_node_model'] == 'equal_diameter_emitter'
+ or self._registry.settings['damage_node_model']
+ == 'equal_diameter_reservoir'
+ ):
damage_table = self._registry.getDamageData('DISTNODE', iCopy=False)
- virtual_nodes_damage_tabel = damage_table[damage_table['virtual_of'] == damaged_node_name]
+ virtual_nodes_damage_tabel = damage_table[
+ damage_table['virtual_of'] == damaged_node_name
+ ]
pre_incident_node_demand = virtual_nodes_damage_tabel.iloc[0]['Demand1']
else:
- raise ValueError("unknow method")
-
+ raise ValueError('unknow method') # noqa: EM101, TRY003
+
node = wn.get_node(damaged_node_name)
node.demand_timeseries_list[0].base_value = pre_incident_node_demand
-
- def removeDISTNodeExplicitLeak(self, damaged_node_name, wn):
+
+ def removeDISTNodeExplicitLeak(self, damaged_node_name, wn): # noqa: N802, D102
temp = self._registry.active_nodal_damages
- value_key = {v:k for k, v in temp.items()}
+ value_key = {v: k for k, v in temp.items()}
_key = value_key[damaged_node_name]
self._registry.active_nodal_damages.pop(_key)
-
+
temp = self._registry.getEquavalantDamageHistory(damaged_node_name)
- pipe_name = temp['new_pipe_name']
- reservoir_name = temp['new_node_name']
+ pipe_name = temp['new_pipe_name']
+ reservoir_name = temp['new_node_name']
wn.remove_link(pipe_name)
wn.remove_node(reservoir_name, with_control=True)
if reservoir_name in wn.node_name_list:
- raise
-
+ raise # noqa: PLE0704
+
self._registry.removeEquavalantDamageHistory(damaged_node_name)
-
- def modifyDISTNodeDemandLinearMode(self, damage_node_name, real_node_name, wn, repaired_number, total_number):
+
+ def modifyDISTNodeDemandLinearMode( # noqa: N802, D102
+ self,
+ damage_node_name,
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ ):
damage_table = self._registry.getDamageData('DISTNODE', iCopy=False)
- pre_incident_demand = damage_table.loc[damage_node_name, 'Demand1']
+ pre_incident_demand = damage_table.loc[damage_node_name, 'Demand1']
post_incident_demand = damage_table.loc[damage_node_name, 'Demand2']
- delta = (total_number - repaired_number)/total_number * (post_incident_demand - pre_incident_demand)
+ delta = (
+ (total_number - repaired_number)
+ / total_number
+ * (post_incident_demand - pre_incident_demand)
+ )
new_demand = pre_incident_demand + delta
node = wn.get_node(real_node_name)
node.demand_timeseries_list[0].base_value = new_demand
-
- def modifyDISTNodeExplicitLeakEmitter(self, damage_node_name, real_node_name, wn, repaired_number, total_number):
- nodal_data = self._registry._nodal_data[real_node_name]
+
+ def modifyDISTNodeExplicitLeakEmitter( # noqa: N802, D102
+ self,
+ damage_node_name,
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ ):
+ nodal_data = self._registry._nodal_data[real_node_name] # noqa: SLF001
pipe_length = nodal_data['pipe_length']
mean_pressure = nodal_data['mean_pressure']
new_node_name = nodal_data['new_node_name']
- orginal_flow = nodal_data['orginal_flow']
+ orginal_flow = nodal_data['orginal_flow']
number_of_damages = total_number - repaired_number
- cd, mp0 = self._registry.damage.getEmitterCdAndElevation(real_node_name, wn, number_of_damages, pipe_length, mean_pressure, orginal_flow)
+ cd, mp0 = self._registry.damage.getEmitterCdAndElevation( # noqa: F841
+ real_node_name,
+ wn,
+ number_of_damages,
+ pipe_length,
+ mean_pressure,
+ orginal_flow,
+ )
node = wn.get_node(new_node_name)
-
- #print(real_node_name)
- if cd >= node._emitter_coefficient:
- raise ValueError("something wrong here: "+repr(cd)+" - "+repr(node._emitter_coefficient)+" "+str(damage_node_name)+" "+str(real_node_name))
-
- node._emitter_coefficient = cd
-
- def modifyDISTNodeExplicitLeakReservoir(self, damage_node_name, real_node_name, wn, repaired_number, total_number):
- nodal_data = self._registry._nodal_data[real_node_name]
+
+ # print(real_node_name)
+ if cd >= node._emitter_coefficient: # noqa: SLF001
+ raise ValueError(
+ 'something wrong here: '
+ + repr(cd)
+ + ' - '
+ + repr(node._emitter_coefficient) # noqa: SLF001
+ + ' '
+ + str(damage_node_name)
+ + ' '
+ + str(real_node_name)
+ )
+
+ node._emitter_coefficient = cd # noqa: SLF001
+
+ def modifyDISTNodeExplicitLeakReservoir( # noqa: N802, D102
+ self,
+ damage_node_name, # noqa: ARG002
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ ):
+ nodal_data = self._registry._nodal_data[real_node_name] # noqa: SLF001
pipe_length = nodal_data['pipe_length']
mean_pressure = nodal_data['mean_pressure']
pipe_name = nodal_data['new_pipe_name']
- orginal_flow = nodal_data['orginal_flow']
+ orginal_flow = nodal_data['orginal_flow']
number_of_damages = total_number - repaired_number
- cd, mp0 = self._registry.damage.getEmitterCdAndElevation(real_node_name, wn, number_of_damages, pipe_length, mean_pressure, orginal_flow)
- node = wn.get_node(real_node_name)
+ cd, mp0 = self._registry.damage.getEmitterCdAndElevation( # noqa: F841
+ real_node_name,
+ wn,
+ number_of_damages,
+ pipe_length,
+ mean_pressure,
+ orginal_flow,
+ )
+ node = wn.get_node(real_node_name) # noqa: F841
q = orginal_flow
- nd = self._registry.damage.getNd(mean_pressure, number_of_damages, total_number)
- equavalant_pipe_diameter = ( ((nd-1)*q)**2 /(0.125*9.81*3.14**2 * mean_pressure) )**(1/4) * 1
+ nd = self._registry.damage.getNd(
+ mean_pressure, number_of_damages, total_number
+ )
+ equavalant_pipe_diameter = ( # noqa: F841
+ ((nd - 1) * q) ** 2 / (0.125 * 9.81 * math.pi**2 * mean_pressure)
+ ) ** (1 / 4) * 1
pipe = wn.get_link(pipe_name)
- #if equavalant_pipe_diameter >= pipe.diameter:
- #raise ValueError("something wrong here: "+repr(equavalant_pipe_diameter)+" - "+repr(pipe.diameter))
- pipe.diameter = pipe.diameter / 2
-
- def modifyDISTNodeExplicitLeak(self, real_damage_node_name, virtual_node_name, wn, method, damaged_number):
- if method=='equal_diameter':
- emitter_name = self._registry.virtual_node_data[virtual_node_name]['emitter_node']
- node = wn.get_node(emitter_name)
-
\ No newline at end of file
+ # if equavalant_pipe_diameter >= pipe.diameter:
+ # raise ValueError("something wrong here: "+repr(equavalant_pipe_diameter)+" - "+repr(pipe.diameter))
+ pipe.diameter = pipe.diameter / 2 # noqa: PLR6104
+
+ def modifyDISTNodeExplicitLeak( # noqa: N802, D102
+ self,
+ real_damage_node_name, # noqa: ARG002
+ virtual_node_name,
+ wn,
+ method,
+ damaged_number, # noqa: ARG002
+ ):
+ if method == 'equal_diameter':
+ emitter_name = self._registry.virtual_node_data[virtual_node_name][
+ 'emitter_node'
+ ]
+ node = wn.get_node(emitter_name) # noqa: F841
diff --git a/modules/systemPerformance/REWET/REWET/restoration/base.py b/modules/systemPerformance/REWET/REWET/restoration/base.py
index d3471f7f5..fba5a594f 100644
--- a/modules/systemPerformance/REWET/REWET/restoration/base.py
+++ b/modules/systemPerformance/REWET/REWET/restoration/base.py
@@ -1,95 +1,111 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Fri Dec 25 04:00:43 2020
+"""Created on Fri Dec 25 04:00:43 2020
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
+
+import copy
+import logging
+import random
+
import networkx as nx
-import pandas as pd
import numpy as np
-import random
-import logging
-import copy
+import pandas as pd
logger = logging.getLogger(__name__)
-def get_node_name(node_name, table):
- if "virtual_of" in table.columns:
- real_node_name = table.loc[node_name, "virtual_of"]
- if real_node_name == None or real_node_name==np.nan: #SINA: probably NP.NAN does not work here. Correct it.
+
+def get_node_name(node_name, table): # noqa: D103
+ if 'virtual_of' in table.columns:
+ real_node_name = table.loc[node_name, 'virtual_of']
+ if (
+ real_node_name == None or real_node_name == np.nan # noqa: E711, PLR1714, PLW0177
+ ): # SINA: probably NP.NAN does not work here. Correct it.
real_node_name = node_name
return real_node_name
- else:
+ else: # noqa: RET505
return node_name
-class Coordination():
- def __init__(self,X=None, Y=None, system=None):
- self.x = X
- self.y = Y
+
+class Coordination: # noqa: D101
+ def __init__(self, X=None, Y=None, system=None): # noqa: N803
+ self.x = X
+ self.y = Y
self.system = system
-
- def set_coord(self, X,Y, system=None):
+
+ def set_coord(self, X, Y, system=None): # noqa: ARG002, N803, D102
self.x = X
self.y = Y
-
- def get_coord(self):
- return (self.x , self.y)
-
- def set_system(self, system):
+
+ def get_coord(self): # noqa: D102
+ return (self.x, self.y)
+
+ def set_system(self, system): # noqa: D102
self.system = system
-
-class Location():
+
+
+class Location: # noqa: D101
def __init__(self, name, x, y):
self.name = name
- self.coord=Coordination(x,y)
-
+ self.coord = Coordination(x, y)
+
+
# =============================================================================
# class restoration_base():
# def __init__(self):
# self.coord = coordination()
# self.ID = None
# self.Object_typ = None
-#
+#
# =============================================================================
-
-
-class AgentData():
- def __init__(self, agent_name, agent_type, cur_x, cur_y, shift_name, base_name, base_x, base_y, shift_obj , agent_speed):
-
- if type(agent_type) != str:
- raise ValueError('agent type must be string')
- #if type(definition) != pd.Series:
- #raise ValueError('definiton must be a Pandas series')
-
- if type(cur_x) != float:
- raise ValueError("cur_x must be float")
- if type(cur_y) != float:
- raise ValueError("cur_y must be float")
- if type(base_x) != float:
- raise ValueError("base_x must be float")
- if type(base_y) != float:
- raise ValueError("base_y must be float")
-
- self.name = agent_name
- self.agent_type = agent_type
- self.current_location = Location('current', cur_x, cur_y)
- self.base_location = Location(base_name, base_x, base_y)
- self.shift = AgentShift(self.name, shift_name)
- self._shifting = shift_obj
- self._avg_speed = agent_speed #20*3/3.6
- self.isWorking = False
- self.cur_job_location = None
- self.cur_job_action = None
- self.cur_job_entity = None
- self._time_of_arival = None
+
+
+class AgentData: # noqa: D101
+ def __init__(
+ self,
+ agent_name,
+ agent_type,
+ cur_x,
+ cur_y,
+ shift_name,
+ base_name,
+ base_x,
+ base_y,
+ shift_obj,
+ agent_speed,
+ ):
+ if type(agent_type) != str: # noqa: E721
+ raise ValueError('agent type must be string') # noqa: EM101, TRY003
+ # if type(definition) != pd.Series:
+ # raise ValueError('definiton must be a Pandas series')
+
+ if type(cur_x) != float: # noqa: E721
+ raise ValueError('cur_x must be float') # noqa: EM101, TRY003
+ if type(cur_y) != float: # noqa: E721
+ raise ValueError('cur_y must be float') # noqa: EM101, TRY003
+ if type(base_x) != float: # noqa: E721
+ raise ValueError('base_x must be float') # noqa: EM101, TRY003
+ if type(base_y) != float: # noqa: E721
+ raise ValueError('base_y must be float') # noqa: EM101, TRY003
+
+ self.name = agent_name
+ self.agent_type = agent_type
+ self.current_location = Location('current', cur_x, cur_y)
+ self.base_location = Location(base_name, base_x, base_y)
+ self.shift = AgentShift(self.name, shift_name)
+ self._shifting = shift_obj
+ self._avg_speed = agent_speed # 20*3/3.6
+ self.isWorking = False
+ self.cur_job_location = None
+ self.cur_job_action = None
+ self.cur_job_entity = None
+ self._time_of_arival = None
self._time_of_job_done = None
- self.cur_job_ongoing = None
+ self.cur_job_ongoing = None
self.cur_job_effect_definition_name = None
- self.cur_job_method_name = None
-
- def isOnShift(self, time):
- """
- Checks if a time is on an agent's shift
+ self.cur_job_method_name = None
+
+ def isOnShift(self, time): # noqa: N802
+ """Checks if a time is on an agent's shift
Parameters
----------
@@ -101,104 +117,109 @@ def isOnShift(self, time):
bool
Is true if the time is on the agent's shift.
- """
-
- shift_name = self.shift._shift_name
+ """ # noqa: D400, D401
+ shift_name = self.shift._shift_name # noqa: SLF001
(time_start, time_finish) = self._shifting.getShiftTimes(shift_name)
-
- if type(time) != int and type(time) != float:
- raise ValueError('time must be integer ' + type(time))
-
+
+ if type(time) != int and type(time) != float: # noqa: E721
+ raise ValueError('time must be integer ' + type(time)) # noqa: DOC501
+
time = int(time)
- time = time % (24*3600)
-
+ time = time % (24 * 3600) # noqa: PLR6104
+
if time_start > time_finish:
- new_time_finish = time_finish + 24*3600
+ new_time_finish = time_finish + 24 * 3600
time_finish = new_time_finish
if time < time_start:
- time = time + 24*3600
-
- if time >= time_start and time < time_finish:
+ time = time + 24 * 3600 # noqa: PLR6104
+
+ if time >= time_start and time < time_finish: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
-
- def getDistanceFromCoordinate(self, destination_coordination):
- coord = self.current_location.coord.get_coord()
- cur_x = coord[0]
- cur_y = coord[1]
-
- dest_x = destination_coordination[0]
- dest_y = destination_coordination[1]
-
- distance = ((cur_x-dest_x)**2+(cur_y-dest_y)**2)**0.5
- return distance
-
- def _estimateTimeOfArival(self, destination_coordination):
- distance_with_method_of_choice = self.getDistanceFromCoordinate(destination_coordination)
+
+ def getDistanceFromCoordinate(self, destination_coordination): # noqa: N802, D102
+ coord = self.current_location.coord.get_coord()
+ cur_x = coord[0]
+ cur_y = coord[1]
+
+ dest_x = destination_coordination[0]
+ dest_y = destination_coordination[1]
+
+ distance = ((cur_x - dest_x) ** 2 + (cur_y - dest_y) ** 2) ** 0.5
+ return distance # noqa: RET504
+
+ def _estimateTimeOfArival(self, destination_coordination): # noqa: N802
+ distance_with_method_of_choice = self.getDistanceFromCoordinate(
+ destination_coordination
+ )
time = distance_with_method_of_choice / self._avg_speed
-
- return time
-
- def getAgentShiftEndTime(self, cur_time):
- num_of_days = int(cur_time/(24*3600))
-
- shift_name = self.shift._shift_name
+
+ return time # noqa: RET504
+
+ def getAgentShiftEndTime(self, cur_time): # noqa: N802, D102
+ num_of_days = int(cur_time / (24 * 3600))
+
+ shift_name = self.shift._shift_name # noqa: SLF001
(time_start, time_finish) = self._shifting.getShiftTimes(shift_name)
-
- if time_start < time_finish:
- return time_finish+24*3600*num_of_days
- else:
- if cur_time%(24*3600)<=time_finish:
- return time_finish+24*3600*(num_of_days)
- else:
- return time_finish+24*3600*(num_of_days+1)
-
- def getShiftLength(self):
-
- shift_name = self.shift._shift_name
+
+ if time_start < time_finish or cur_time % (24 * 3600) <= time_finish:
+ return time_finish + 24 * 3600 * num_of_days
+ else: # noqa: RET505
+ return time_finish + 24 * 3600 * (num_of_days + 1)
+
+ def getShiftLength(self): # noqa: N802, D102
+ shift_name = self.shift._shift_name # noqa: SLF001
(time_start, time_finish) = self._shifting.getShiftTimes(shift_name)
-
- if time_start temp.index.max():
return temp[temp.index.max()]
- else:
+ else: # noqa: RET505
temp[time] = np.nan
- temp.sort_index(inplace = True)
- temp.interpolate(method='index', inplace = True)
+ temp.sort_index(inplace=True) # noqa: PD002
+ temp.interpolate(method='index', inplace=True) # noqa: PD002
return temp[time]
- def getDefaultAvailabilityRatio(agent_type, self):
- if agent_type == "WQOperator" or agent_type == "WQWorker":
+
+ def getDefaultAvailabilityRatio(agent_type, self): # noqa: ARG002, N802, N805, D102
+ if agent_type == 'WQOperator' or agent_type == 'WQWorker': # noqa: PLR1714
return 0
- else:
+ else: # noqa: RET505
return 1
-
- def assignsJobToAgent(self, agent_name, node_name, entity, action, time, wn, reminded_time , number_of_damages, orginal_element):
- if self._agents.loc[agent_name, 'active'] != True:
+
+ def assignsJobToAgent( # noqa: C901, N802, D102
+ self,
+ agent_name,
+ node_name,
+ entity,
+ action,
+ time,
+ wn,
+ reminded_time,
+ number_of_damages,
+ orginal_element,
+ ):
+ if self._agents.loc[agent_name, 'active'] != True: # noqa: E712
raise ValueError('Agent ' + agent_name + ' is not active')
- if self._agents.loc[agent_name, 'ready'] != True:
+ if self._agents.loc[agent_name, 'ready'] != True: # noqa: E712
raise ValueError('Agent ' + agent_name + ' is not ready')
-
- if self._agents.loc[agent_name, 'data'].isOnShift(time) != True:
+
+ if self._agents.loc[agent_name, 'data'].isOnShift(time) != True: # noqa: E712
raise ValueError('Agent ' + agent_name + ' is not on shift')
-
- if self._agents.loc[agent_name, 'data'].isWorking == True:
+
+ if self._agents.loc[agent_name, 'data'].isWorking == True: # noqa: E712
raise ValueError('Agent ' + agent_name + ' is working')
-
- #logger.debug('Assiging job to '+agent_name)
+
+ # logger.debug('Assiging job to '+agent_name)
real_node_name = node_name
- if self._jobs._rm.entity[entity]=='DISTNODE':
- damage_data = self._jobs._rm._registry.getDamageData('DISTNODE', iCopy=False)
+ if self._jobs._rm.entity[entity] == 'DISTNODE': # noqa: SLF001
+ damage_data = self._jobs._rm._registry.getDamageData( # noqa: SLF001
+ 'DISTNODE', iCopy=False
+ )
if 'virtual_of' in damage_data.columns:
real_node_name = get_node_name(node_name, damage_data)
-
+
coord = wn.get_node(real_node_name).coordinates
agent_type = self._agents.loc[agent_name, 'type']
-
- _ETA = self._agents.loc[agent_name, 'data']._estimateTimeOfArival(coord)
- effect_definition_name = self._jobs.getEffectDefinitionName(agent_type, action, entity)
- method_name = self._jobs.chooseMethodForCurrentJob(node_name, effect_definition_name, entity)
-
- if method_name==None:
- raise ValueError("No method is applicale for " + repr(effect_definition_name))
-
- if reminded_time==None:
- _ETJ = self._jobs.getAJobEstimate(orginal_element, agent_type, entity, action, method_name, number_of_damages)
+
+ _ETA = self._agents.loc[agent_name, 'data']._estimateTimeOfArival(coord) # noqa: SLF001, N806
+ effect_definition_name = self._jobs.getEffectDefinitionName(
+ agent_type, action, entity
+ )
+ method_name = self._jobs.chooseMethodForCurrentJob(
+ node_name, effect_definition_name, entity
+ )
+
+ if method_name == None: # noqa: E711
+ raise ValueError(
+ 'No method is applicale for ' + repr(effect_definition_name)
+ )
+
+ if reminded_time == None: # noqa: E711
+ _ETJ = self._jobs.getAJobEstimate( # noqa: N806
+ orginal_element,
+ agent_type,
+ entity,
+ action,
+ method_name,
+ number_of_damages,
+ )
else:
- _ETJ=int(reminded_time)
- if reminded_time<0:
- raise ValueError('Something wrong here: '+repr(reminded_time))
-
-
-
- if effect_definition_name!='CHECK':
- method_line = self._jobs._effect_data[effect_definition_name][method_name]
+ _ETJ = int(reminded_time) # noqa: N806
+ if reminded_time < 0:
+ raise ValueError('Something wrong here: ' + repr(reminded_time))
+
+ if effect_definition_name != 'CHECK':
+ method_line = self._jobs._effect_data[effect_definition_name][ # noqa: SLF001
+ method_name
+ ]
else:
- method_line = [{'EFFECT':'CHECK'}]
-
+ method_line = [{'EFFECT': 'CHECK'}]
+
effects_only = [i['EFFECT'] for i in method_line]
-
+
collective = None
if 'SKIP' in effects_only:
- return (False, "SKIP", None, collective)
- elif 'FASTCHECK' in effects_only:
- return (False, "FASTCHECK", None, collective)
+ return (False, 'SKIP', None, collective)
+ elif 'FASTCHECK' in effects_only: # noqa: RET505
+ return (False, 'FASTCHECK', None, collective)
elif 'RECONNECT' in effects_only:
- collective='BYPASS'
+ collective = 'BYPASS'
elif 'ADD_RESERVOIR' in effects_only:
- collective='ADD_RESERVOIR'
+ collective = 'ADD_RESERVOIR'
elif 'REMOVE_LEAK' in effects_only:
- collective='REMOVE_LEAK'
- elif "ISOLATE_DN" in effects_only:
- collective='ISOLATE_DN'
-
+ collective = 'REMOVE_LEAK'
+ elif 'ISOLATE_DN' in effects_only:
+ collective = 'ISOLATE_DN'
+
if _ETA < 0 or _ETJ <= 0:
- print(str(_ETA)+ ' '+str(effect_definition_name)+ ' '+str(orginal_element))
- print(str(method_name)+ ' '+str(_ETJ))
- raise ValueError('Subzero ETA or sub-equal-zero ETJ')
-
- end_time = time + _ETA + _ETJ
- agent_shift_change_time = self._agents.loc[agent_name,'data'].getAgentShiftEndTime(time)
- shift_length = self._agents.loc[agent_name,'data'].getShiftLength()
-
- minimum_job_time = self._jobs._rm._registry.settings['minimum_job_time']
- if end_time<=agent_shift_change_time:
- iget="INSIDE_SHIFT"
- iOnGoing=False
- elif end_time>agent_shift_change_time and _ETJ > (shift_length-2*3600) and (time + _ETA + 2*3600) < agent_shift_change_time:
- iget="OUTSIDE_SHIFT"
- iOnGoing=True
+ print( # noqa: T201
+ str(_ETA)
+ + ' '
+ + str(effect_definition_name)
+ + ' '
+ + str(orginal_element)
+ )
+ print(str(method_name) + ' ' + str(_ETJ)) # noqa: T201
+ raise ValueError('Subzero ETA or sub-equal-zero ETJ') # noqa: EM101, TRY003
+
+ end_time = time + _ETA + _ETJ
+ agent_shift_change_time = self._agents.loc[
+ agent_name, 'data'
+ ].getAgentShiftEndTime(time)
+ shift_length = self._agents.loc[agent_name, 'data'].getShiftLength()
+
+ minimum_job_time = self._jobs._rm._registry.settings['minimum_job_time'] # noqa: SLF001, F841
+ if end_time <= agent_shift_change_time:
+ iget = 'INSIDE_SHIFT'
+ iOnGoing = False # noqa: N806
+ elif (
+ end_time > agent_shift_change_time
+ and (shift_length - 2 * 3600) < _ETJ
+ and (time + _ETA + 2 * 3600) < agent_shift_change_time
+ ):
+ iget = 'OUTSIDE_SHIFT'
+ iOnGoing = True # noqa: N806
else:
- #logger.warning(agent_name+', '+node_name+', '+repr(end_time))
- iget="ShortOfTime"
-
+ # logger.warning(agent_name+', '+node_name+', '+repr(end_time))
+ iget = 'ShortOfTime'
+
if iget == 'ShortOfTime':
return (False, iget, None, collective)
- self._agents.loc[agent_name, 'data'].current_location.coord.set_coord(coord[0],coord[1])
- self._agents.loc[agent_name, 'data'].setJob(node_name, action, entity, effect_definition_name, method_name, time+_ETA,time+_ETA+_ETJ, iOnGoing)
+ self._agents.loc[agent_name, 'data'].current_location.coord.set_coord(
+ coord[0], coord[1]
+ )
+ self._agents.loc[agent_name, 'data'].setJob(
+ node_name,
+ action,
+ entity,
+ effect_definition_name,
+ method_name,
+ time + _ETA,
+ time + _ETA + _ETJ,
+ iOnGoing,
+ )
self._agents.loc[agent_name, 'ready'] = False
- self.restoration_log_book.addAgentActionToLogBook(agent_name, node_name, entity, action, time, end_time, _ETA, effect_definition_name ,method_name, iFinished=not iOnGoing)
+ self.restoration_log_book.addAgentActionToLogBook(
+ agent_name,
+ node_name,
+ entity,
+ action,
+ time,
+ end_time,
+ _ETA,
+ effect_definition_name,
+ method_name,
+ iFinished=not iOnGoing,
+ )
return (True, iget, _ETJ, collective)
-
- def getJobEndTime(self, agent_name, icheck=True):
+
+ def getJobEndTime(self, agent_name, icheck=True): # noqa: FBT002, N802, D102
end_time = self._agents.loc[agent_name, 'data'].job_end_time
- if icheck == True and end_time == None:
- raise ValueError('No Time is assigned to agent')
- if icheck == True and self._agents.loc[agent_name, 'data'].isWorking == False:
- raise ValueError('The agent is not working')
+ if icheck == True and end_time == None: # noqa: E711, E712
+ raise ValueError('No Time is assigned to agent') # noqa: EM101, TRY003
+ if (
+ icheck == True # noqa: E712
+ and self._agents.loc[agent_name, 'data'].isWorking == False # noqa: E712
+ ):
+ raise ValueError('The agent is not working') # noqa: EM101, TRY003
return end_time
-
- def getJobArivalTime(self, agent_name, icheck=True):
- arival_time = self._agents.loc[agent_name, 'data']._time_of_arival
- if icheck == True and arival_time == None:
- raise ValueError('No Time is assigned to agent')
- if icheck == True and self._agents.loc[agent_name, 'data'].isWorking == False:
- raise ValueError('The agent is not working')
+
+ def getJobArivalTime(self, agent_name, icheck=True): # noqa: FBT002, N802, D102
+ arival_time = self._agents.loc[agent_name, 'data']._time_of_arival # noqa: SLF001
+ if icheck == True and arival_time == None: # noqa: E711, E712
+ raise ValueError('No Time is assigned to agent') # noqa: EM101, TRY003
+ if (
+ icheck == True # noqa: E712
+ and self._agents.loc[agent_name, 'data'].isWorking == False # noqa: E712
+ ):
+ raise ValueError('The agent is not working') # noqa: EM101, TRY003
return arival_time
-
- def releaseAgent(self, agent_name):
- if self._agents.loc[agent_name, 'ready'] == True:
+
+ def releaseAgent(self, agent_name): # noqa: N802, D102
+ if self._agents.loc[agent_name, 'ready'] == True: # noqa: E712
raise ValueError(agent_name + ' is already ready')
- if self._agents.loc[agent_name, 'active'] != True:
+ if self._agents.loc[agent_name, 'active'] != True: # noqa: E712
raise ValueError(agent_name + ' is not active')
- if self._agents.loc[agent_name, 'data'].isWorking == False:
- raise ValueError(agent_name+' is not working')
-
+ if self._agents.loc[agent_name, 'data'].isWorking == False: # noqa: E712
+ raise ValueError(agent_name + ' is not working')
self._agents.loc[agent_name, 'ready'] = True
-
- self._agents.loc[agent_name, 'data'].isWorking = False
- self._agents.loc[agent_name, 'data'].cur_job_location = None
- self._agents.loc[agent_name, 'data'].cur_job_action = None
- self._agents.loc[agent_name, 'data'].cur_job_entity = None
- self._agents.loc[agent_name, 'data']._time_of_arival = None
+
+ self._agents.loc[agent_name, 'data'].isWorking = False
+ self._agents.loc[agent_name, 'data'].cur_job_location = None
+ self._agents.loc[agent_name, 'data'].cur_job_action = None
+ self._agents.loc[agent_name, 'data'].cur_job_entity = None
+ self._agents.loc[agent_name, 'data']._time_of_arival = None # noqa: SLF001
self._agents.loc[agent_name, 'data'].cur_job_effect_definition_name = None
- self._agents.loc[agent_name, 'data'].cur_job_method_name = None
- self._agents.loc[agent_name, 'data'].job_end_time = None
- self._agents.loc[agent_name, 'data'].cur_job_ongoing = None
-
-
-
-class AgentShift():
- def __init__(self, agent_name, name):#, shifting_obj):
+ self._agents.loc[agent_name, 'data'].cur_job_method_name = None
+ self._agents.loc[agent_name, 'data'].job_end_time = None
+ self._agents.loc[agent_name, 'data'].cur_job_ongoing = None
+
+
+class AgentShift: # noqa: D101
+ # , shifting_obj):
+ def __init__(self, agent_name, name):
self._agent_name = agent_name
- self._shift_name = name
- #shifting_obj.addAgentShift(self._agent_name, self._shift_name)
-
+ self._shift_name = name
+ # shifting_obj.addAgentShift(self._agent_name, self._shift_name)
-
-
-class Shifting():
+
+class Shifting: # noqa: D101
def __init__(self):
self._all_agent_shift_data = {}
- self._shift_data = pd.DataFrame(columns=['begining', 'end'])
-
- def addShift(self, name, begining, ending):
- """
- Adds a shift to shift registry
+ self._shift_data = pd.DataFrame(columns=['begining', 'end'])
+
+ def addShift(self, name, beginning, ending): # noqa: N802
+ """Adds a shift to shift registry
Parameters
----------
name : str
Shift's name.
- begining : int
- shift's begining time.
+ beginning : int
+ shift's beginning time.
ending : int
shifts ending time.
@@ -486,400 +591,537 @@ def addShift(self, name, begining, ending):
ValueError
if shift name is already in the registry,
if the name is not string,
- if the begining time is not int,
+ if the beginning time is not int,
if the ending time is not int,
- if begining time is bigger than 24*3600,
+ if beginning time is bigger than 24*3600,
if ending time is bigger than 24*3600.
Returns
-------
None.
- """
+ """ # noqa: D400, D401
if name in self._shift_data:
- raise ValueError('Shift name already registered')
- if type(begining) != int and type(begining) != float:
- raise ValueError('Begining time must be integer: ' + str(type(begining)) )
- if type(ending) != int and type(ending) != float:
- raise ValueError('Ending time must be integer: ' + str(type(ending)) )
- if begining > 24*3600:
- raise ValueError("begining time is bigger than 24*3600" + str(begining))
- if ending > 24*3600:
+ raise ValueError('Shift name already registered') # noqa: EM101, TRY003
+ if type(beginning) != int and type(beginning) != float: # noqa: E721
+ raise ValueError(
+ 'Beginning time must be integer: ' + str(type(beginning))
+ )
+ if type(ending) != int and type(ending) != float: # noqa: E721
+ raise ValueError('Ending time must be integer: ' + str(type(ending)))
+ if beginning > 24 * 3600:
+ raise ValueError(
+ 'beginning time is bigger than 24*3600' + str(beginning)
+ )
+ if ending > 24 * 3600:
raise ValueError('Ending time is bigger than 24*3600' + str(ending))
- begining = int(begining)
+ beginning = int(beginning)
ending = int(ending)
-
- self._shift_data.loc[name] = [begining, ending]
-
- def getShiftTimes(self, name):
- return (self._shift_data['begining'].loc[name], self._shift_data['end'].loc[name])
-
-
-
- def getNextShiftTime(self, time):
- daily_time = time%(24*3600)
- num_of_days = int(time/(24*3600 ))
-
- next_shift_candidate=pd.Series()
-
+
+ self._shift_data.loc[name] = [beginning, ending]
+
+ def getShiftTimes(self, name): # noqa: N802, D102
+ return (
+ self._shift_data['begining'].loc[name],
+ self._shift_data['end'].loc[name],
+ )
+
+ def getNextShiftTime(self, time): # noqa: N802, D102
+ daily_time = time % (24 * 3600)
+ num_of_days = int(time / (24 * 3600))
+
+ next_shift_candidate = pd.Series()
+
for shift_name, shift_data in self._shift_data.iterrows():
beg_time = shift_data[0]
end_time = shift_data[1]
-
+
if beg_time > end_time and daily_time < end_time:
- beg_time -= 24*3600
- elif beg_time > end_time and daily_time >= end_time:
- #beg_time += 24*3600*num_of_days
- end_time += 24*3600
-
+ beg_time -= 24 * 3600
+ elif beg_time > end_time and daily_time >= end_time:
+ # beg_time += 24*3600*num_of_days
+ end_time += 24 * 3600
+
if daily_time < end_time and daily_time >= beg_time:
- next_shift_candidate.loc[shift_name]=end_time+24*3600*num_of_days
+ next_shift_candidate.loc[shift_name] = (
+ end_time + 24 * 3600 * num_of_days
+ )
change_shift_time = next_shift_candidate.min()
- #if beg_time > end_time:
- #next_shift_time = time +(change_shift_time - daily_time)
- #else:
-
- return change_shift_time
-
-
-
- def assignShiftToAgent(self, agent_ID, shift_name):
- """
- Assigns shoft to agent
+ # if beg_time > end_time:
+ # next_shift_time = time +(change_shift_time - daily_time)
+ # else:
+
+ return change_shift_time # noqa: RET504
+
+ def assignShiftToAgent(self, agent_ID, shift_name): # noqa: N802, N803
+ """Assigns shift to agent
Parameters
----------
agent_ID : str
Agent's ID.
shift_name : str
- Shoft's name
+ Shift's name
Raises
------
ValueError
- if agent_ID is already in agent's shoft data(probably already assigned,
+ if agent_ID is already in agent's shift data(probably already assigned,
if shift name does not exist in shift registry.
Returns
-------
None.
- """
+ """ # noqa: D400, D401
if agent_ID in self._all_agent_shift_data:
- raise ValueError("The agent ID currently in Agent ALl Shifts")
+ raise ValueError('The agent ID currently in Agent ALl Shifts') # noqa: EM101, TRY003
if shift_name not in self._shift_data:
- raise ValueError("shift data is not in registered as shifts")
-
+ raise ValueError('shift data is not in registered as shifts') # noqa: EM101, TRY003
+
self._all_agent_shift_data[agent_ID] = shift_name
-
-
-class DispatchRule():
- def __init__(self, settings, method = "deterministic", exclude=None):
- self.settings = settings
- self._rules = {}
+class DispatchRule: # noqa: D101
+ def __init__(self, settings, method='deterministic', exclude=None):
+ self.settings = settings
+ self._rules = {}
self._cumulative = {}
-
- if "PIPE" not in exclude:
-
- self._rules["PIPE"] = self.settings['pipe_damage_discovery_model']['time_discovery_ratio']
- #data2=pd.Series([0.90, 0.01, 0.01, 0.04, 0.04, 0, 0], index = [3600*n for n in [0, 12, 24, 36, 48, 60, 72]])
-
- if "DISTNODE" not in exclude:
- self._rules["DISTNODE"] = self.settings['node_damage_discovery_model']['time_discovery_ratio']
- #data=pd.Series([0, 0.67, 0.07, 0.07, 0.07, 0.07, 0.05], index = [3600*n for n in [0, 12, 24, 36, 48, 60, 72]])
-
-
- self._rules['GNODE'] = self.settings['Gnode_damage_discovery_model']['time_discovery_ratio']
- self._rules['TANK'] = self.settings['tank_damage_discovery_model']['time_discovery_ratio']
- self._rules['PUMP'] = self.settings['pump_damage_discovery_model']['time_discovery_ratio']
- self._rules['RESERVOIR'] = self.settings['reservoir_damage_discovery_model']['time_discovery_ratio']
-
- if method == "deterministic":
+
+ if 'PIPE' not in exclude:
+ self._rules['PIPE'] = self.settings['pipe_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+ # data2=pd.Series([0.90, 0.01, 0.01, 0.04, 0.04, 0, 0], index = [3600*n for n in [0, 12, 24, 36, 48, 60, 72]])
+
+ if 'DISTNODE' not in exclude:
+ self._rules['DISTNODE'] = self.settings['node_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+ # data=pd.Series([0, 0.67, 0.07, 0.07, 0.07, 0.07, 0.05], index = [3600*n for n in [0, 12, 24, 36, 48, 60, 72]])
+
+ self._rules['GNODE'] = self.settings['Gnode_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+ self._rules['TANK'] = self.settings['tank_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+ self._rules['PUMP'] = self.settings['pump_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+ self._rules['RESERVOIR'] = self.settings['reservoir_damage_discovery_model'][
+ 'time_discovery_ratio'
+ ]
+
+ if method == 'deterministic':
pass
else:
- raise ValueError("Unknown dispatch Rule: " + method)
-
- #for key in exclude:
- #self._rules.pop(key)
-
- for key, d in self._rules.items():
+ raise ValueError('Unknown dispatch Rule: ' + method)
+
+ # for key in exclude:
+ # self._rules.pop(key)
+
+ for key, d in self._rules.items(): # noqa: B007, PERF102
self._cumulative[key] = self._rules[key].cumsum()
-
-
- def getDiscoveredPrecentage(self, time):
+
+ def getDiscoveredPrecentage(self, time): # noqa: N802, D102
res = {}
for key in self._cumulative:
temp = self._cumulative[key].copy()
if time in temp.index:
- res[key]=temp[time]
+ res[key] = temp[time]
elif time < temp.index.min():
res[key] = temp[temp.index.min()]
elif time > temp.index.max():
res[key] = temp[temp.index.max()]
else:
temp[time] = np.nan
- temp.sort_index(inplace = True)
- temp.interpolate(method='index', inplace = True)
+ temp.sort_index(inplace=True) # noqa: PD002
+ temp.interpolate(method='index', inplace=True) # noqa: PD002
res[key] = temp[time]
return res
-
-class Dispatch():
- def __init__(self, restoration, settings, discovery_interval = 0, method='old'):
+
+
+class Dispatch: # noqa: D101
+ def __init__(self, restoration, settings, discovery_interval=0, method='old'):
self.settings = settings
self.method = method
self.discovery_interval = discovery_interval
self._rm = restoration
- self._discovered_entity={}
+ self._discovered_entity = {}
self._init_time = self._rm.restoration_start_time
-
+
exclude = []
-
+
if settings['pipe_damage_discovery_model']['method'] == 'leak_based':
exclude.append('PIPE')
elif settings['pipe_damage_discovery_model']['method'] == 'time_based':
pass
else:
- raise ValueError("Unknown pipe damage discovery method in settings: " + repr(settings['pipe_damage_discovery_model']['method']))
-
+ raise ValueError(
+ 'Unknown pipe damage discovery method in settings: '
+ + repr(settings['pipe_damage_discovery_model']['method'])
+ )
+
if settings['node_damage_discovery_model']['method'] == 'leak_based':
exclude.append('DISTNODE')
elif settings['node_damage_discovery_model']['method'] == 'time_based':
pass
else:
- raise ValueError("Unknown Node damage discovery method in settings: " + repr(settings['node_damage_discovery_model']['method']))
-
+ raise ValueError(
+ 'Unknown Node damage discovery method in settings: '
+ + repr(settings['node_damage_discovery_model']['method'])
+ )
+
self._rules = DispatchRule(settings, exclude=exclude)
self._last_discovered_number = {}
for el in self._rm.ELEMENTS:
if el in exclude:
continue
self._last_discovered_number[el] = 0
-
- self._rm._registry.addAttrToPipeDamageTable('discovered', False)
- self._rm._registry.addAttrToDistNodeDamageTable('discovered', False)
-
- def updateDiscovery(self, time):
+
+ self._rm._registry.addAttrToPipeDamageTable('discovered', False) # noqa: FBT003, SLF001
+ self._rm._registry.addAttrToDistNodeDamageTable('discovered', False) # noqa: FBT003, SLF001
+
+ def updateDiscovery(self, time): # noqa: C901, N802, D102
if time < self._rm.restoration_start_time:
- print("Time is less than init time")
-
+ print('Time is less than init time') # noqa: T201
+
else:
- #if self.method == 'old':
- #time_since_dispatch_activity = time - self._rm.restoration_start_time
- #discovered_ratios = self._rules.getDiscoveredPrecentage(time_since_dispatch_activity)
- #discovered_damage_numbers = self._getDamageNumbers(discovered_ratios)
- #self._updateDamagesNumbers(discovered_damage_numbers)
-
- if self.settings['pipe_damage_discovery_model']['method'] == 'leak_based':
-
- pipe_leak_criteria = self.settings['pipe_damage_discovery_model']['leak_amount']
- pipe_leak_time_span = self.settings['pipe_damage_discovery_model']['leak_time' ]
-
- pipe_damage_table = self._rm._registry._pipe_damage_table
- not_discovered_pipe_damage_table = pipe_damage_table[pipe_damage_table['discovered']==False]
- to_be_checked_node_list = list(not_discovered_pipe_damage_table.index)
- breaks_not_discovered_pipe_damage_table = pipe_damage_table[(pipe_damage_table['discovered']==False) & (pipe_damage_table['damage_type']=='break')]
- not_discovered_break_node_B = self._rm._registry._pipe_break_history.loc[breaks_not_discovered_pipe_damage_table.index, 'Node_B']
- not_dicovered_node_B_list = not_discovered_break_node_B.to_list()
+ # if self.method == 'old':
+ # time_since_dispatch_activity = time - self._rm.restoration_start_time
+ # discovered_ratios = self._rules.getDiscoveredPrecentage(time_since_dispatch_activity)
+ # discovered_damage_numbers = self._getDamageNumbers(discovered_ratios)
+ # self._updateDamagesNumbers(discovered_damage_numbers)
+
+ if (
+ self.settings['pipe_damage_discovery_model']['method']
+ == 'leak_based'
+ ):
+ pipe_leak_criteria = self.settings['pipe_damage_discovery_model'][
+ 'leak_amount'
+ ]
+ pipe_leak_time_span = self.settings['pipe_damage_discovery_model'][
+ 'leak_time'
+ ]
+
+ pipe_damage_table = self._rm._registry._pipe_damage_table # noqa: SLF001
+ not_discovered_pipe_damage_table = pipe_damage_table[
+ pipe_damage_table['discovered'] == False # noqa: E712
+ ]
+ to_be_checked_node_list = list(
+ not_discovered_pipe_damage_table.index
+ )
+ breaks_not_discovered_pipe_damage_table = pipe_damage_table[
+ (pipe_damage_table['discovered'] == False) # noqa: E712
+ & (pipe_damage_table['damage_type'] == 'break')
+ ]
+ not_discovered_break_node_B = ( # noqa: N806
+ self._rm._registry._pipe_break_history.loc[ # noqa: SLF001
+ breaks_not_discovered_pipe_damage_table.index, 'Node_B'
+ ]
+ )
+ not_dicovered_node_B_list = not_discovered_break_node_B.to_list() # noqa: N806
to_be_checked_node_list.extend(not_dicovered_node_B_list)
- #break_pair = zip(breaks_not_discovered_pipe_damage_table, not_discovered_break_node_B)
- #not_discovered_pipe_damage_name_list = list(not_discovered_pipe_damage_table.index)
- #breaks_not_discovered_pipe_damage_table
- #all_nodes_name_list = set(self._rm._registry.result.columns)
- available_nodes = set(self._rm._registry.result.node['demand'].columns)
+ # break_pair = zip(breaks_not_discovered_pipe_damage_table, not_discovered_break_node_B)
+ # not_discovered_pipe_damage_name_list = list(not_discovered_pipe_damage_table.index)
+ # breaks_not_discovered_pipe_damage_table
+ # all_nodes_name_list = set(self._rm._registry.result.columns)
+ available_nodes = set(
+ self._rm._registry.result.node['demand'].columns # noqa: SLF001
+ )
to_be_checked_node_list = set(to_be_checked_node_list)
- shared_nodes_name_list = to_be_checked_node_list.union(available_nodes) - (available_nodes - to_be_checked_node_list) - (to_be_checked_node_list - available_nodes)
+ shared_nodes_name_list = (
+ to_be_checked_node_list.union(available_nodes)
+ - (available_nodes - to_be_checked_node_list)
+ - (to_be_checked_node_list - available_nodes)
+ )
if len(shared_nodes_name_list) > 0:
- leaking_nodes_result = self._rm._registry.result.node['demand'][list(shared_nodes_name_list)]
-
- leaking_nodes_result = leaking_nodes_result.loc[(leaking_nodes_result.index > (time - pipe_leak_time_span) )]
+ leaking_nodes_result = self._rm._registry.result.node['demand'][ # noqa: SLF001
+ list(shared_nodes_name_list)
+ ]
+
+ leaking_nodes_result = leaking_nodes_result.loc[
+ (leaking_nodes_result.index > (time - pipe_leak_time_span))
+ ]
discovered_bool = leaking_nodes_result >= pipe_leak_criteria
discovered_bool_temp = discovered_bool.any()
- discovered_bool_temp = discovered_bool_temp[discovered_bool_temp==True]
+ discovered_bool_temp = discovered_bool_temp[
+ discovered_bool_temp == True # noqa: E712
+ ]
to_be_discoverd = discovered_bool_temp.index.to_list()
-
- #time1 = leaking_nodes_result.index[1:]
- #time2 = leaking_nodes_result.index[0:-1]
- #time_dif = (pd.Series(time1) - pd.Series(time2))
- #leaking_nodes_result = leaking_nodes_result.drop(leaking_nodes_result.index[-1])
-
- #leaking_nodes_result.index = time_dif.to_numpy()
- #leaking_nodes_result = leaking_nodes_result.apply(lambda x: x.values * x.index)
- #summed_water_loss = leaking_nodes_result.sum()
- #to_be_discoverd = summed_water_loss[summed_water_loss > 3600*2*0.2]
- discovery_list = set()
- #to_be_discoverd = list(to_be_discoverd.index)
+
+ # time1 = leaking_nodes_result.index[1:]
+ # time2 = leaking_nodes_result.index[0:-1]
+ # time_dif = (pd.Series(time1) - pd.Series(time2))
+ # leaking_nodes_result = leaking_nodes_result.drop(leaking_nodes_result.index[-1])
+
+ # leaking_nodes_result.index = time_dif.to_numpy()
+ # leaking_nodes_result = leaking_nodes_result.apply(lambda x: x.values * x.index)
+ # summed_water_loss = leaking_nodes_result.sum()
+ # to_be_discoverd = summed_water_loss[summed_water_loss > 3600*2*0.2]
+ discovery_list = set()
+ # to_be_discoverd = list(to_be_discoverd.index)
for discovery_candidate in to_be_discoverd:
if discovery_candidate in not_dicovered_node_B_list:
- candidate_break_A = not_discovered_break_node_B[not_discovered_break_node_B == discovery_candidate].index[0]
+ candidate_break_A = not_discovered_break_node_B[ # noqa: N806
+ not_discovered_break_node_B == discovery_candidate
+ ].index[0]
discovery_list.add(candidate_break_A)
else:
discovery_list.add(discovery_candidate)
- #discovery_list = list(discovery_list)
+ # discovery_list = list(discovery_list)
pipe_damage_table.loc[discovery_list, 'discovered'] = True
-
- if self.settings['node_damage_discovery_model']['method'] == 'leak_based':
-
- node_leak_criteria = self.settings['node_damage_discovery_model']['leak_amount']
- node_leak_time_span = self.settings['node_damage_discovery_model']['leak_time' ]
-
-
- nodal_damage_table = self._rm._registry._node_damage_table
- not_discovered_nodal_damage_table = nodal_damage_table[nodal_damage_table['discovered']==False]
+
+ if (
+ self.settings['node_damage_discovery_model']['method']
+ == 'leak_based'
+ ):
+ node_leak_criteria = self.settings['node_damage_discovery_model'][
+ 'leak_amount'
+ ]
+ node_leak_time_span = self.settings['node_damage_discovery_model'][
+ 'leak_time'
+ ]
+
+ nodal_damage_table = self._rm._registry._node_damage_table # noqa: SLF001
+ not_discovered_nodal_damage_table = nodal_damage_table[
+ nodal_damage_table['discovered'] == False # noqa: E712
+ ]
if 'virtual_of' in not_discovered_nodal_damage_table.columns:
- to_be_checked_node_list = list(not_discovered_nodal_damage_table['virtual_of'])
+ to_be_checked_node_list = list(
+ not_discovered_nodal_damage_table['virtual_of']
+ )
else:
- to_be_checked_node_list = list(not_discovered_nodal_damage_table.index)
- available_leak_nodes = set(self._rm._registry.result.node['leak'].columns)
+ to_be_checked_node_list = list(
+ not_discovered_nodal_damage_table.index
+ )
+ available_leak_nodes = set(
+ self._rm._registry.result.node['leak'].columns # noqa: SLF001
+ )
to_be_checked_node_list = set(to_be_checked_node_list)
- shared_nodes_name_list = to_be_checked_node_list.union(available_leak_nodes) - (available_leak_nodes - to_be_checked_node_list) - (to_be_checked_node_list - available_leak_nodes)
+ shared_nodes_name_list = (
+ to_be_checked_node_list.union(available_leak_nodes)
+ - (available_leak_nodes - to_be_checked_node_list)
+ - (to_be_checked_node_list - available_leak_nodes)
+ )
if len(shared_nodes_name_list) > 0:
shared_nodes_name_list = list(shared_nodes_name_list)
- leaking_nodes_result = self._rm._registry.result.node['leak'][shared_nodes_name_list]
+ leaking_nodes_result = self._rm._registry.result.node['leak'][ # noqa: SLF001
+ shared_nodes_name_list
+ ]
leaking_nodes_result = leaking_nodes_result.sort_index()
-
+
if 'virtual_of' in not_discovered_nodal_damage_table.columns:
- leaking_number_of_damages = not_discovered_nodal_damage_table.groupby('virtual_of')["Number_of_damages"].sum()
+ leaking_number_of_damages = (
+ not_discovered_nodal_damage_table.groupby('virtual_of')[
+ 'Number_of_damages'
+ ].sum()
+ )
else:
- leaking_number_of_damages = not_discovered_nodal_damage_table.loc[shared_nodes_name_list, "Number_of_damages"]
-
- leaking_nodes_result = leaking_nodes_result.loc[(leaking_nodes_result.index > (time - node_leak_time_span) )]
- normalized_summed_water_loss = leaking_nodes_result / leaking_number_of_damages
- discovered_bool = normalized_summed_water_loss >= node_leak_criteria
+ leaking_number_of_damages = (
+ not_discovered_nodal_damage_table.loc[
+ shared_nodes_name_list, 'Number_of_damages'
+ ]
+ )
+
+ leaking_nodes_result = leaking_nodes_result.loc[
+ (leaking_nodes_result.index > (time - node_leak_time_span))
+ ]
+ normalized_summed_water_loss = (
+ leaking_nodes_result / leaking_number_of_damages
+ )
+ discovered_bool = (
+ normalized_summed_water_loss >= node_leak_criteria
+ )
discovered_bool_temp = discovered_bool.any()
- discovered_bool_temp = discovered_bool_temp[discovered_bool_temp==True]
+ discovered_bool_temp = discovered_bool_temp[
+ discovered_bool_temp == True # noqa: E712
+ ]
discovered_list = discovered_bool_temp.index.to_list()
if 'virtual_of' in not_discovered_nodal_damage_table.columns:
- discovered_list = (nodal_damage_table[nodal_damage_table['virtual_of'].isin(discovered_list)]).index
+ discovered_list = (
+ nodal_damage_table[
+ nodal_damage_table['virtual_of'].isin(
+ discovered_list
+ )
+ ]
+ ).index
nodal_damage_table.loc[discovered_list, 'discovered'] = True
- #else:
-
-
+ # else:
+
time_since_dispatch_activity = time - self._rm.restoration_start_time
- discovered_ratios = self._rules.getDiscoveredPrecentage(time_since_dispatch_activity)
- discovered_damage_numbers = self._getDamageNumbers(discovered_ratios)
+ discovered_ratios = self._rules.getDiscoveredPrecentage(
+ time_since_dispatch_activity
+ )
+ discovered_damage_numbers = self._getDamageNumbers(discovered_ratios)
self._updateDamagesNumbers(discovered_damage_numbers)
- #else:
- #raise ValueError('Unknown method: '+repr(self.method))
-
-
- def _getDamageNumbers(self, discovered_ratios):
-
+ # else:
+ # raise ValueError('Unknown method: '+repr(self.method))
+
+ def _getDamageNumbers(self, discovered_ratios): # noqa: N802
num_damaged_entity = {}
-
+
for el in discovered_ratios:
- if discovered_ratios[el]-1>0:
- if discovered_ratios[el]-1.000001>0:
- raise ValueError('ratio is bigger than 1: ' + str(discovered_ratios[el]) + ' in element = ' + el)
- else:
- discovered_ratios[el]=int(1)
- temp = len(self._rm._registry.getDamageData(el))
+ if discovered_ratios[el] - 1 > 0:
+ if discovered_ratios[el] - 1.000001 > 0:
+ raise ValueError(
+ 'ratio is bigger than 1: '
+ + str(discovered_ratios[el])
+ + ' in element = '
+ + el
+ )
+ else: # noqa: RET506
+ discovered_ratios[el] = 1
+ temp = len(self._rm._registry.getDamageData(el)) # noqa: SLF001
num_damaged_entity[el] = int(np.round(temp * discovered_ratios[el]))
return num_damaged_entity
-
- def _updateDamagesNumbers(self, discovered_numbers):
+
+ def _updateDamagesNumbers(self, discovered_numbers): # noqa: N802
for el in discovered_numbers:
-
if self._last_discovered_number[el] > discovered_numbers[el]:
- raise ValueError('Discovered number is less than what it used to be in element ' + el)
- elif self._last_discovered_number[el] < discovered_numbers[el]:
-
- refined_damaged_table = self._rm._registry.getDamageData(el)
+ raise ValueError(
+ 'Discovered number is less than what it used to be in element '
+ + el
+ )
+ elif self._last_discovered_number[el] < discovered_numbers[el]: # noqa: RET506
+ refined_damaged_table = self._rm._registry.getDamageData(el) # noqa: SLF001
if len(refined_damaged_table) < discovered_numbers[el]:
- raise ValueError('discovered number is bigger than all damages in element' + el)
-
- discovered_damage_table = refined_damaged_table[refined_damaged_table['discovered'] == True]
+ raise ValueError(
+ 'discovered number is bigger than all damages in element'
+ + el
+ )
+
+ discovered_damage_table = refined_damaged_table[
+ refined_damaged_table['discovered'] == True # noqa: E712
+ ]
if discovered_numbers[el] <= len(discovered_damage_table):
continue
- undiscovered_damage_table = refined_damaged_table[refined_damaged_table['discovered'] == False]
-
-# =============================================================================
-# used_number = []
-# i = 0
-# while i < (discovered_numbers[el] - self._last_discovered_number[el]):
-# picked_number = random.randint(0,len(undiscovered_damage_table)-1)
-# if picked_number not in used_number:
-# used_number.append(picked_number)
-# i += 1
-# else:
-# pass
-# =============================================================================
+ undiscovered_damage_table = refined_damaged_table[
+ refined_damaged_table['discovered'] == False # noqa: E712
+ ]
+
+ # =============================================================================
+ # used_number = []
+ # i = 0
+ # while i < (discovered_numbers[el] - self._last_discovered_number[el]):
+ # picked_number = random.randint(0,len(undiscovered_damage_table)-1)
+ # if picked_number not in used_number:
+ # used_number.append(picked_number)
+ # i += 1
+ # else:
+ # pass
+ # =============================================================================
if len(undiscovered_damage_table) > 0:
- used_number = random.sample(range(0, len(undiscovered_damage_table) ),discovered_numbers[el] - len(discovered_damage_table))
+ used_number = random.sample(
+ range(len(undiscovered_damage_table)),
+ discovered_numbers[el] - len(discovered_damage_table),
+ )
else:
used_number = []
for i in used_number:
temp_index = undiscovered_damage_table.index[i]
- self._rm._registry.updateElementDamageTable(el, 'discovered', temp_index, True, icheck=True)
-
- if el =="PIPE":
- refined_damaged_table = self._rm._registry.getDamageData(el)
- discovered_damage_table = refined_damaged_table[refined_damaged_table['discovered'] == True]
+ self._rm._registry.updateElementDamageTable( # noqa: SLF001
+ el,
+ 'discovered',
+ temp_index,
+ True, # noqa: FBT003
+ icheck=True,
+ )
+
+ if el == 'PIPE':
+ refined_damaged_table = self._rm._registry.getDamageData(el) # noqa: SLF001
+ discovered_damage_table = refined_damaged_table[
+ refined_damaged_table['discovered'] == True # noqa: E712
+ ]
self._last_discovered_number[el] = discovered_numbers[el]
-
-
-class Priority():
+
+
+class Priority: # noqa: D101
def __init__(self, restoration):
- self._data = {}
- self._rm = restoration
-
- def addData(self, agent_type,priority, order):
+ self._data = {}
+ self._rm = restoration
+
+ def addData(self, agent_type, priority, order): # noqa: N802, D102
if agent_type not in self._data:
self._data[agent_type] = pd.Series(index=[priority], data=[order])
else:
temp = self._data[agent_type]
if priority in temp.index:
- raise ValueError('Prority redefiend. type: '+ agent_type + ' & priority: ' + str(priority))
- self._data[agent_type].loc[priority]= order
-
- def getPriority(self, agent_type, priority):
+ raise ValueError(
+ 'Prority redefined. type: '
+ + agent_type
+ + ' & priority: '
+ + str(priority)
+ )
+ self._data[agent_type].loc[priority] = order
+
+ def getPriority(self, agent_type, priority): # noqa: N802, D102
if agent_type not in self._data:
- raise ValueError("The agent type("+repr(agent_type)+") is not defined in the prioity:"+repr(priority))
-
+ raise ValueError(
+ 'The agent type('
+ + repr(agent_type)
+ + ') is not defined in the prioity:'
+ + repr(priority)
+ )
+
temp = self._data[agent_type]
-
+
if priority not in temp.index:
- raise ValueError('prioirty not in priority data. Agent_type: ' + agent_type + ' & PriorityL ' + priority)
-
+ raise ValueError(
+ 'prioirty not in priority data. Agent_type: '
+ + agent_type
+ + ' & PriorityL '
+ + priority
+ )
+
return temp.loc[priority]
-
- def getHydSigDamageGroups(self):
+
+ def getHydSigDamageGroups(self): # noqa: N802, D102
damage_group_list = set()
for crew_type in self._data:
whole_priority_list = self._data[crew_type]
- primary_priority_list = whole_priority_list.loc[1]
+ primary_priority_list = whole_priority_list.loc[1]
secondary_priority_list = whole_priority_list.loc[2]
i = 0
for cur_second_priority in secondary_priority_list:
- if cur_second_priority.upper() == "HYDSIG":
+ if cur_second_priority.upper() == 'HYDSIG':
cur_damage_group = primary_priority_list[i][1]
damage_group_list.add(cur_damage_group)
- i += 1
+ i += 1 # noqa: SIM113
return damage_group_list
-
- def sortDamageTable(self, wn, entity_data, entity, agent_type, target_priority_index, order_index, target_priority = None):
- all_priority_data = self._data[agent_type]
+
+ def sortDamageTable( # noqa: C901, N802, D102
+ self,
+ wn,
+ entity_data,
+ entity,
+ agent_type,
+ target_priority_index,
+ order_index,
+ target_priority=None,
+ ):
+ all_priority_data = self._data[agent_type]
target_priority_list = all_priority_data.loc[target_priority_index]
-
+
if len(target_priority_list) == 0:
return entity_data
- name_sugest = 'Priority_'+str(target_priority_index)+'_dist'
-
- if target_priority == None:
+ name_sugest = 'Priority_' + str(target_priority_index) + '_dist'
+
+ if target_priority == None: # noqa: E711
target_priority = target_priority_list[order_index]
-
- if target_priority == None:
+
+ if target_priority == None: # noqa: E711
return entity_data
- elif target_priority in self._rm.proximity_points:
- Proximity_list = self._rm.proximity_points[target_priority]
+ elif target_priority in self._rm.proximity_points: # noqa: RET505
+ Proximity_list = self._rm.proximity_points[target_priority] # noqa: N806
node_name_list = list(entity_data.index)
for node_name in node_name_list:
- #Sina: you can enhance the run time speed with having x, y coordinates in the damage table and not producing and droping them each time
+ # Sina: you can enhance the run time speed with having x, y coordinates in the damage table and not producing and dropping them each time
node_name_vir = get_node_name(node_name, entity_data)
coord = wn.get_node(node_name_vir).coordinates
entity_data.loc[node_name, 'X_COORD'] = coord[0]
@@ -887,218 +1129,320 @@ def sortDamageTable(self, wn, entity_data, entity, agent_type, target_priority_i
counter = 1
columns_to_drop = []
for x, y in Proximity_list:
- name_sug_c = name_sugest+'_'+str(counter)
+ name_sug_c = name_sugest + '_' + str(counter)
columns_to_drop.append(name_sug_c)
- entity_data[name_sug_c] = ( (entity_data['X_COORD']-x)**2 + (entity_data['Y_COORD']-y)**2 ) ** 0.5
+ entity_data[name_sug_c] = (
+ (entity_data['X_COORD'] - x) ** 2
+ + (entity_data['Y_COORD'] - y) ** 2
+ ) ** 0.5
counter += 1
- dist_only_entity_table = entity_data[columns_to_drop]
- min_dist_entity_table = dist_only_entity_table.min(axis=1)
+ dist_only_entity_table = entity_data[columns_to_drop]
+ min_dist_entity_table = dist_only_entity_table.min(axis=1)
entity_data.loc[:, name_sugest] = min_dist_entity_table
- entity_data.sort_values(by=name_sugest, ascending=True, inplace =True)
- columns_to_drop.append(name_sugest)
+ entity_data.sort_values(by=name_sugest, ascending=True, inplace=True) # noqa: PD002
+ columns_to_drop.append(name_sugest) # noqa: FURB113
columns_to_drop.append('X_COORD')
columns_to_drop.append('Y_COORD')
- entity_data.drop(columns=columns_to_drop, inplace= True)
-
- #Sina: It does nothing. When there are less damage location within
- #the priority definition for the crew type, thsi works fine, but
- #when there are more damage location within the priority definiton,
- #it does not gurantee that only teh cloest damage locations to the
- #crew-type agents are matched to jobs
- elif target_priority.upper() == "CLOSEST":
+ entity_data.drop(columns=columns_to_drop, inplace=True) # noqa: PD002
+
+ # Sina: It does nothing. When there are less damage location within
+ # the priority definition for the crew type, this works fine, but
+ # when there are more damage location within the priority definition,
+ # it does not guarantee that only the closet damage locations to the
+ # crew-type agents are matched to jobs
+ elif target_priority.upper() == 'CLOSEST':
pass
- elif target_priority.upper() == "HYDSIGLASTFLOW":
+ elif target_priority.upper() == 'HYDSIGLASTFLOW':
element_type = self._rm.entity[entity]
- if element_type != "PIPE":
- entity_data = self.sortDamageTable(entity_data, entity, agent_type, target_priority_index, order_index, target_priority = "CLOSEST")
+ if element_type != 'PIPE':
+ entity_data = self.sortDamageTable(
+ entity_data,
+ entity,
+ agent_type,
+ target_priority_index,
+ order_index,
+ target_priority='CLOSEST',
+ )
else:
- all_time_index = self._rm._registry.result.link["flowrate"].index[:self._rm.restoration_start_time+1]
- pipe_name_list = entity_data.loc[:,"Orginal_element"]
- last_valid_time = [cur_time for cur_time in all_time_index if cur_time not in self._rm._registry.result.maximum_trial_time]
+ all_time_index = self._rm._registry.result.link[ # noqa: SLF001
+ 'flowrate'
+ ].index[: self._rm.restoration_start_time + 1]
+ pipe_name_list = entity_data.loc[:, 'Orginal_element']
+ last_valid_time = [
+ cur_time
+ for cur_time in all_time_index
+ if cur_time not in self._rm._registry.result.maximum_trial_time # noqa: SLF001
+ ]
last_valid_time.sort()
if len(last_valid_time) == 0:
last_valid_time = self._rm.restoration_start_time
else:
last_valid_time = last_valid_time[-1]
-
- name_sugest = 'Priority_'+str(target_priority_index)+'_dist'
- flow_rate = self._rm._registry.result.link["flowrate"].loc[last_valid_time, pipe_name_list].abs()
+
+ name_sugest = 'Priority_' + str(target_priority_index) + '_dist'
+ flow_rate = (
+ self._rm._registry.result.link['flowrate'] # noqa: SLF001
+ .loc[last_valid_time, pipe_name_list]
+ .abs()
+ )
entity_data.loc[:, name_sugest] = flow_rate.to_list()
- entity_data.sort_values(name_sugest, ascending=False, inplace=True)
- entity_data.drop(columns=name_sugest, inplace=True)
-
- elif target_priority in self._rm.proximity_points and target_priority != "WaterSource2":
+ entity_data.sort_values(name_sugest, ascending=False, inplace=True) # noqa: PD002
+ entity_data.drop(columns=name_sugest, inplace=True) # noqa: PD002
- all_node_table = self._rm._registry.all_node_table
- Proximity_list = self._rm.proximity_points[target_priority]
+ elif (
+ target_priority in self._rm.proximity_points
+ and target_priority != 'WaterSource2'
+ ):
+ all_node_table = self._rm._registry.all_node_table # noqa: SLF001
+ Proximity_list = self._rm.proximity_points[target_priority] # noqa: N806
node_name_list = list(entity_data.index)
for node_name in node_name_list:
- #Sina: you can enhance the run time speed with having x, y coordinates in the damage table and not producing and droping them each time
+ # Sina: you can enhance the run time speed with having x, y coordinates in the damage table and not producing and dropping them each time
node_name_vir = get_node_name(node_name, entity_data)
coord = wn.get_node(node_name_vir).coordinates
entity_data.loc[node_name, 'X_COORD'] = coord[0]
entity_data.loc[node_name, 'Y_COORD'] = coord[1]
counter = 1
columns_to_drop = []
-
+
g = nx.MultiDiGraph()
-
+
for name, node in wn.nodes():
g.add_node(name)
- nx.set_node_attributes(g, name='pos', values={name: node.coordinates})
+ nx.set_node_attributes(
+ g, name='pos', values={name: node.coordinates}
+ )
nx.set_node_attributes(g, name='type', values={name: node.node_type})
-
+
for name, link in wn.links():
start_node = link.start_node_name
end_node = link.end_node_name
g.add_edge(start_node, end_node, key=name)
- nx.set_edge_attributes(g, name='type',
- values={(start_node, end_node, name): link.link_type})
-
+ nx.set_edge_attributes(
+ g,
+ name='type',
+ values={(start_node, end_node, name): link.link_type},
+ )
+
try:
- length = link.length
- d = link.diameter
+ length = link.length
+ d = link.diameter
roughness = link.roughness
- cost = length / np.power(d, 4.8655) / np.power(roughness, 1.852) + 1 / d
- except:
+ cost = (
+ length / np.power(d, 4.8655) / np.power(roughness, 1.852)
+ + 1 / d
+ )
+ except: # noqa: E722
cost = 0.00001
-
- weight = cost
-
- nx.set_edge_attributes(g, name='weight',
- values={(start_node, end_node, name): weight})
-
+
+ weight = cost
+
+ nx.set_edge_attributes(
+ g, name='weight', values={(start_node, end_node, name): weight}
+ )
+
g = g.to_undirected()
-
+
for x, y in Proximity_list:
- point_length_vector = np.square(all_node_table['X_COORD'] - x) + np.square(all_node_table['Y_COORD'] - y)
- point_length_vector = np.sqrt(point_length_vector )
+ point_length_vector = np.square(
+ all_node_table['X_COORD'] - x
+ ) + np.square(all_node_table['Y_COORD'] - y)
+ point_length_vector = np.sqrt(point_length_vector)
closest_node_name = point_length_vector.idxmin()
-
- #print("closest_node_name= "+str(closest_node_name))
-
- orginal_pipe_name_list = entity_data["Orginal_element"]
- damaged_pipe_node_list = [self._rm._registry.undamaged_link_node_list[link_node_names] for link_node_names in orginal_pipe_name_list]
+
+ # print("closest_node_name= "+str(closest_node_name))
+
+ orginal_pipe_name_list = entity_data['Orginal_element']
+ damaged_pipe_node_list = [
+ self._rm._registry.undamaged_link_node_list[link_node_names] # noqa: SLF001
+ for link_node_names in orginal_pipe_name_list
+ ]
try:
- shortest_path_length = [min(nx.shortest_path_length(g, closest_node_name, pipe_nodes_name[0], "weight"), nx.shortest_path_length(g, closest_node_name, pipe_nodes_name[1], "weight")) for pipe_nodes_name in damaged_pipe_node_list]
+ shortest_path_length = [
+ min(
+ nx.shortest_path_length(
+ g, closest_node_name, pipe_nodes_name[0], 'weight'
+ ),
+ nx.shortest_path_length(
+ g, closest_node_name, pipe_nodes_name[1], 'weight'
+ ),
+ )
+ for pipe_nodes_name in damaged_pipe_node_list
+ ]
except nx.NetworkXNoPath:
shortest_path_length = []
for pipe_nodes_name in damaged_pipe_node_list:
start_node_name = pipe_nodes_name[0]
- end_node_name = pipe_nodes_name[1]
-
+ end_node_name = pipe_nodes_name[1]
+
try:
- closest_path_from_start = nx.shortest_path_length(g, closest_node_name, start_node_name, "weight")
+ closest_path_from_start = nx.shortest_path_length(
+ g, closest_node_name, start_node_name, 'weight'
+ )
except nx.NetworkXNoPath:
closest_path_from_start = 10000000000000.0
-
+
try:
- closest_path_from_end = nx.shortest_path_length(g, closest_node_name, end_node_name, "weight")
+ closest_path_from_end = nx.shortest_path_length(
+ g, closest_node_name, end_node_name, 'weight'
+ )
except nx.NetworkXNoPath:
closest_path_from_end = 10000000000000.0
-
- cur_shortest_path_length = min(closest_path_from_start, closest_path_from_end)
+
+ cur_shortest_path_length = min(
+ closest_path_from_start, closest_path_from_end
+ )
shortest_path_length.append(cur_shortest_path_length)
- #print(shortest_path_length)
-
- name_sug_c = name_sugest+'_'+str(counter)
+ # print(shortest_path_length)
+
+ name_sug_c = name_sugest + '_' + str(counter)
columns_to_drop.append(name_sug_c)
entity_data[name_sug_c] = shortest_path_length
counter += 1
- dist_only_entity_table = entity_data[columns_to_drop]
- min_dist_entity_table = dist_only_entity_table.min(axis=1)
+ dist_only_entity_table = entity_data[columns_to_drop]
+ min_dist_entity_table = dist_only_entity_table.min(axis=1)
entity_data.loc[:, name_sugest] = min_dist_entity_table
- entity_data.sort_values(by=name_sugest, ascending=True, inplace =True)
+ entity_data.sort_values(by=name_sugest, ascending=True, inplace=True) # noqa: PD002
columns_to_drop.append(name_sugest)
columns_to_drop.append('X_COORD')
columns_to_drop.append('Y_COORD')
- entity_data.drop(columns=columns_to_drop, inplace= True)
- #print(entity_data)
- #print("+++++++++++++++++++++++++++++++++++++++")
-
- #Sina: It does nothing. When there are less damage location within
- #the priority definition for the crew type, thsi works fine, but
- #when there are more damage location within the priority definiton,
- #it does not gurantee that only teh cloest damage locations to the
- #crew-type agents are matched to jobs
-
- elif target_priority.upper() == "HYDSIG":
+ entity_data.drop(columns=columns_to_drop, inplace=True) # noqa: PD002
+ # print(entity_data)
+ # print("+++++++++++++++++++++++++++++++++++++++")
+
+ # Sina: It does nothing. When there are less damage location within
+ # the priority definition for the crew type, this works fine, but
+ # when there are more damage location within the priority definition,
+ # it does not guarantee that only the closet damage locations to the
+ # crew-type agents are matched to jobs
+
+ elif target_priority.upper() == 'HYDSIG':
element_type = self._rm.entity[entity]
- if element_type != "PIPE":
- entity_data = self.sortDamageTable(entity_data, entity, agent_type, target_priority_index, order_index, target_priority = "CLOSEST")
+ if element_type != 'PIPE':
+ entity_data = self.sortDamageTable(
+ entity_data,
+ entity,
+ agent_type,
+ target_priority_index,
+ order_index,
+ target_priority='CLOSEST',
+ )
else:
- name_sugest = 'Priority_'+str(target_priority_index)+'_dist'
- hyd_sig = self._rm._registry.hydraulic_significance[entity_data["Orginal_element"] ]
-
+ name_sugest = 'Priority_' + str(target_priority_index) + '_dist'
+ hyd_sig = self._rm._registry.hydraulic_significance[ # noqa: SLF001
+ entity_data['Orginal_element']
+ ]
+
entity_data.loc[:, name_sugest] = hyd_sig.to_list()
- entity_data.sort_values(name_sugest, ascending=False, inplace=True)
- entity_data.drop(columns=name_sugest, inplace=True)
-
- #If element type is not leakable, it does nothing. IF nodes are not
- #Checked (i.e. check is not at the sequnce before the current action)
- #the leak data is real time leak for the damage location.
- elif target_priority.upper() == "MOSTLEAKATCHECK":
- #real_node_name_list = []
+ entity_data.sort_values(name_sugest, ascending=False, inplace=True) # noqa: PD002
+ entity_data.drop(columns=name_sugest, inplace=True) # noqa: PD002
+
+ # If element type is not leakable, it does nothing. IF nodes are not
+ # Checked (i.e. check is not at the sequence before the current action)
+ # the leak data is real time leak for the damage location.
+ elif target_priority.upper() == 'MOSTLEAKATCHECK':
+ # real_node_name_list = []
node_name_list = list(entity_data.index)
- name_sugest = 'Priority_'+str(target_priority_index)+'_leak_sina' #added sina so the possibility of a conflic of name is minimized
- #for node_name in node_name_list:
- #node_name_vir = get_node_name(node_name, entity_data)
- #real_node_name_list.append(node_name_vir)
+ name_sugest = (
+ 'Priority_' + str(target_priority_index) + '_leak_sina'
+ ) # added Sina so the possibility of a conflict of name is minimized
+ # for node_name in node_name_list:
+ # node_name_vir = get_node_name(node_name, entity_data)
+ # real_node_name_list.append(node_name_vir)
element_type = self._rm.entity[entity]
- leak_data = self._rm._registry.getMostLeakAtCheck(node_name_list, element_type)
- if type(leak_data) != type(None):
+ leak_data = self._rm._registry.getMostLeakAtCheck( # noqa: SLF001
+ node_name_list, element_type
+ )
+ if leak_data is not None:
entity_data.loc[node_name_list, name_sugest] = leak_data
- entity_data.sort_values(by=name_sugest, ascending=True, inplace =True)
- entity_data.drop(columns=[name_sugest], inplace= True)
+ entity_data.sort_values(by=name_sugest, ascending=True, inplace=True) # noqa: PD002
+ entity_data.drop(columns=[name_sugest], inplace=True) # noqa: PD002
else:
- entity_data = self.sortDamageTable(entity_data, entity, agent_type, target_priority_index, order_index, target_priority = "CLOSEST")
+ entity_data = self.sortDamageTable(
+ entity_data,
+ entity,
+ agent_type,
+ target_priority_index,
+ order_index,
+ target_priority='CLOSEST',
+ )
else:
- raise ValueError('Unrcognized Secondary Primary: ' + repr(target_priority) )
-
+ raise ValueError(
+ 'Unrcognized Secondary Primary: ' + repr(target_priority)
+ )
+
return entity_data
-
- def isAgentTypeInPriorityData(self, agent_type):
+
+ def isAgentTypeInPriorityData(self, agent_type): # noqa: N802, D102
return agent_type in self._data
-
-class Jobs():
+
+
+class Jobs: # noqa: D101
def __init__(self, restoration):
- self._rm = restoration
- self._job_list = pd.DataFrame(columns=['agent_type','entity', 'action', 'time_argument'])
- self._effect_defualts = {} #pd.DataFrame(columns=['effect_definition_name', 'method_name','argument','value'])
- self._effect_data = {}
- self._time_overwrite = {}
- self._final_method = {}
- self._once = {}
- def addEffect(self, effect_name, method_name, def_data):
-
+ self._rm = restoration
+ self._job_list = pd.DataFrame(
+ columns=['agent_type', 'entity', 'action', 'time_argument']
+ )
+ self._effect_defualts = {} # pd.DataFrame(columns=['effect_definition_name', 'method_name','argument','value'])
+ self._effect_data = {}
+ self._time_overwrite = {}
+ self._final_method = {}
+ self._once = {}
+
+ def addEffect(self, effect_name, method_name, def_data): # noqa: N802, D102
if effect_name not in self._effect_data:
- self._effect_data[effect_name]=None
-
- if self._effect_data[effect_name]!= None:
+ self._effect_data[effect_name] = None
+
+ if self._effect_data[effect_name] != None: # noqa: E711
if method_name in self._effect_data[effect_name]:
- raise ValueError('Dupplicate method_name is given. Effect name: ' + str(effect_name) + ', ' + str(method_name) )
-
- if self._effect_data[effect_name]==None:
+ raise ValueError(
+ 'Dupplicate method_name is given. Effect name: '
+ + str(effect_name)
+ + ', '
+ + str(method_name)
+ )
+
+ if self._effect_data[effect_name] == None: # noqa: E711
temp = {}
temp[method_name] = def_data
- self._effect_data[effect_name]=temp
+ self._effect_data[effect_name] = temp
else:
- self._effect_data[effect_name][method_name]=def_data
-
- def setJob(self, jobs_definition):
+ self._effect_data[effect_name][method_name] = def_data
+
+ def setJob(self, jobs_definition): # noqa: N802, D102
self._job_list = pd.DataFrame.from_records(jobs_definition)
-
+
def _filter(self, agent_type, entity, action):
temp = self._job_list
- temp = temp[(temp[['agent_type', 'entity', 'action']]==[agent_type, entity, action]).all(1)]
+ temp = temp[
+ (
+ temp[['agent_type', 'entity', 'action']]
+ == [agent_type, entity, action]
+ ).all(1)
+ ]
temp_length = len(temp)
- if temp_length >1:
- raise ValueError('We have more than one job description')
- elif temp_length==0:
- raise ValueError('We have Zero one job description for agent type= '+ repr(agent_type)+', entity= '+repr(entity)+', action= '+repr(action))
+ if temp_length > 1:
+ raise ValueError('We have more than one job description') # noqa: EM101, TRY003
+ elif temp_length == 0: # noqa: RET506
+ raise ValueError(
+ 'We have Zero one job description for agent type= '
+ + repr(agent_type)
+ + ', entity= '
+ + repr(entity)
+ + ', action= '
+ + repr(action)
+ )
return temp
-
- def getAJobEstimate(self, orginal_element, agent_type, entity, action, method_name, number):
+
+ def getAJobEstimate( # noqa: N802, D102
+ self,
+ orginal_element,
+ agent_type,
+ entity,
+ action,
+ method_name,
+ number,
+ ):
temp = self._filter(agent_type, entity, action)
time_arg = temp['time_argument'].iloc[0]
operation_name = temp['effect'].iloc[0]
@@ -1108,252 +1452,310 @@ def getAJobEstimate(self, orginal_element, agent_type, entity, action, method_na
if 'FIXED_TIME_OVERWRITE' in overwrite_data:
time_arg = overwrite_data['FIXED_TIME_OVERWRITE']
else:
- raise ValueError('Unknown Time Data')
+ raise ValueError('Unknown Time Data') # noqa: EM101, TRY003
time = int(time_arg)
- #try:
- #time_arg = int(time_arg):
- #time = time_arg
- #except:
- #raise ValueError('Unknow time argument: '+str(type(time_arg)))
-
+ # try:
+ # time_arg = int(time_arg):
+ # time = time_arg
+ # except:
+ # raise ValueError('Unknow time argument: '+str(type(time_arg)))
+
once_flag = False
if operation_name in self._once:
if method_name in self._once[operation_name]:
once_flag = True
-
- if once_flag == False:
- time = int(time*number)
-
- #IMPORTANT/sina
- if (method_name==2 or method_name==1) and action == 'reroute':
+
+ if once_flag == False: # noqa: E712
+ time = int(time * number)
+
+ # IMPORTANT/sina
+ if (method_name == 2 or method_name == 1) and action == 'reroute': # noqa: PLR1714, PLR2004
pass
-
+
return time
-
-
- def getMeanJobTime(self, agent_type, entity, action):
+
+ def getMeanJobTime(self, agent_type, entity, action): # noqa: N802, D102
temp = self._filter(agent_type, entity, action)
time_arg = temp['time_argument'].iloc[0]
- if type(time_arg) == int:
+ if type(time_arg) == int: # noqa: E721
time = time_arg
else:
- raise ValueError('Unknow time argument: '+str(type(time_arg)))
+ raise ValueError('Unknow time argument: ' + str(type(time_arg)))
return time
-
- def getAllEffectByJobData(self, agent_type, action, entity, iWithout_data=True, iOnlyData=False):
+ def getAllEffectByJobData( # noqa: N802, D102
+ self,
+ agent_type,
+ action,
+ entity,
+ iWithout_data=True, # noqa: FBT002, ARG002, N803
+ iOnlyData=False, # noqa: FBT002, N803
+ ):
temp = self._filter(agent_type, entity, action)
- all_effect_name = temp['effect'].iloc[0]
-
- if iOnlyData==True:
- return
-
- def addEffectDefaultValue(self,input_dict):
- _key = (input_dict['effect_definition_name'], input_dict['method_name'], input_dict["argument"])
-
+ all_effect_name = temp['effect'].iloc[0] # noqa: F841
+
+ if iOnlyData == True: # noqa: E712
+ return
+
+ def addEffectDefaultValue(self, input_dict): # noqa: N802, D102
+ _key = (
+ input_dict['effect_definition_name'],
+ input_dict['method_name'],
+ input_dict['argument'],
+ )
+
if _key in self._effect_defualts:
- raise ValueError("Duplicate effects definition: {0}, {1}, {2}".format( repr(input_dict['effect_definition_name']), repr(input_dict['method_name']), repr(input_dict["argument"]) ))
-
- self._effect_defualts[_key] = input_dict['value'] #self._effect_defualts.append(temp_s, ignore_index=True)
-
-
- def getEffectsList(self, effect_definition_name, method_name):
- if effect_definition_name==None:
+ raise ValueError(
+ 'Duplicate effects definition: {0}, {1}, {2}'.format( # noqa: EM103, UP030
+ repr(input_dict['effect_definition_name']),
+ repr(input_dict['method_name']),
+ repr(input_dict['argument']),
+ )
+ )
+
+ self._effect_defualts[_key] = input_dict[
+ 'value'
+ ] # self._effect_defualts.append(temp_s, ignore_index=True)
+
+ def getEffectsList(self, effect_definition_name, method_name): # noqa: N802, D102
+ if effect_definition_name == None: # noqa: E711
return []
- if effect_definition_name=='CHECK':
+ if effect_definition_name == 'CHECK':
return [{'EFFECT': 'CHECK'}]
- all_methods = self._effect_data[effect_definition_name]
+ all_methods = self._effect_data[effect_definition_name]
effects_list = all_methods[method_name]
- return effects_list
-
- def getEffectDefinition(self, effect_definition_name, iWithout_data=True):
- all_methods = self._effect_data[effect_definition_name]
-
- if iWithout_data==True and 'DATA' in all_methods:
+ return effects_list # noqa: RET504
+
+ def getEffectDefinition(self, effect_definition_name, iWithout_data=True): # noqa: FBT002, N802, N803, D102
+ all_methods = self._effect_data[effect_definition_name]
+
+ if iWithout_data == True and 'DATA' in all_methods: # noqa: E712
all_methods = copy.deepcopy(all_methods)
all_methods.pop('DATA')
-
+
return all_methods
-
- def getEffectDefinitionName(self, agent_type, action, entity):
+
+ def getEffectDefinitionName(self, agent_type, action, entity): # noqa: N802, D102
temp = self._filter(agent_type, entity, action)
effects_definition_name = temp['effect'].iloc[0]
- return effects_definition_name
-
-
- def chooseMethodForCurrentJob(self, node_name, effects_definition_name, entity):
+ return effects_definition_name # noqa: RET504
+
+ def chooseMethodForCurrentJob(self, node_name, effects_definition_name, entity): # noqa: N802, D102
returned_method = None
- if effects_definition_name==None:
+ if effects_definition_name == None: # noqa: E711
return None
- elif effects_definition_name=='CHECK' or effects_definition_name=='FASTCHECK' or effects_definition_name=='SKIP':
+ elif ( # noqa: RET505
+ effects_definition_name == 'CHECK' # noqa: PLR1714
+ or effects_definition_name == 'FASTCHECK'
+ or effects_definition_name == 'SKIP'
+ ):
return effects_definition_name
else:
- effects_definition = self.getEffectDefinition(effects_definition_name) #self._effect_data[effects_definition_name]
- for method_name, effect_list in effects_definition.items():
- prob_applicability = self.iEffectApplicableByProbability(effects_definition_name, method_name, node_name, entity)
- condition_applicability = self.iEffectApplicableByOtherConditions(effects_definition_name, method_name, node_name, entity)
+ effects_definition = self.getEffectDefinition(
+ effects_definition_name
+ ) # self._effect_data[effects_definition_name]
+ for method_name, effect_list in effects_definition.items(): # noqa: B007, PERF102
+ prob_applicability = self.iEffectApplicableByProbability(
+ effects_definition_name, method_name, node_name, entity
+ )
+ condition_applicability = self.iEffectApplicableByOtherConditions(
+ effects_definition_name, method_name, node_name, entity
+ )
if prob_applicability and condition_applicability:
returned_method = method_name
break
-
- if returned_method == None:
- try:
+
+ if returned_method == None: # noqa: E711
+ try: # noqa: SIM105
returned_method = self._final_method[effects_definition_name]
- except:
+ except: # noqa: S110, E722
pass
return returned_method
-
-
- def _getProbability(self, method, iCondition, element_type):
-
- if iCondition==True:
- if 'METHOD_PROBABILITY' in method:
+
+ def _getProbability(self, method, iCondition, element_type): # noqa: ARG002, N802, N803, PLR6301
+ if iCondition == True: # noqa: E712
+ if 'METHOD_PROBABILITY' in method: # noqa: SIM401
probability = method['METHOD_PROBABILITY']
else:
- probability = 1
- #else:
- #if 'METHOD_PROBABILITY' in method:
-
- def _iConditionHolds(self, val1, con, val2):
- if con=='BG':
- if val1 > val2:
+ probability = 1 # noqa: F841
+ # else:
+ # if 'METHOD_PROBABILITY' in method:
+
+ def _iConditionHolds(self, val1, con, val2): # noqa: C901, N802, PLR6301
+ if con == 'BG':
+ if val1 > val2: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
- elif con=='BG-EQ':
- if val1 >= val2:
+ elif con == 'BG-EQ':
+ if val1 >= val2: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
- elif con=='LT':
- if val1 < val2:
+ elif con == 'LT':
+ if val1 < val2: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
- elif con=='LT-IF':
- if val1 <= val2:
+ elif con == 'LT-IF':
+ if val1 <= val2: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
- elif con=='EQ':
- if val1 == val2:
+ elif con == 'EQ':
+ if val1 == val2: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
else:
- raise ValueError('Unrecognized condition: '+repr(con))
-
-
- def getDefualtValue(self, effects_definition_name, method_name, argument):
+ raise ValueError('Unrecognized condition: ' + repr(con))
+
+ def getDefualtValue(self, effects_definition_name, method_name, argument): # noqa: N802, D102
_default = self._effect_defualts
value = _default.get((effects_definition_name, method_name, argument), None)
-
- return value
-
- def iEffectApplicableByOtherConditions(self, effects_definition_name, method_name, damaged_node_name, entity):
+
+ return value # noqa: RET504
+
+ def iEffectApplicableByOtherConditions( # noqa: N802, D102
+ self,
+ effects_definition_name,
+ method_name,
+ damaged_node_name,
+ entity,
+ ):
element_type = self._rm.entity[entity]
effects_definition = self.getEffectDefinition(effects_definition_name)
if element_type == 'DISTNODE':
for single_effect in effects_definition[method_name]:
- if "PIDR" in single_effect:
-
+ if 'PIDR' in single_effect:
condition = single_effect['PIDR']
_con = condition[0]
_con_val = condition[1]
- _PIDR_type = self.getDefualtValue(effects_definition_name, method_name, 'PIDR_TYPE')
- if _PIDR_type==None or _PIDR_type=='ASSIGNED_DEMAND':
- old_demand = self._rm._registry._node_damage_table.loc[damaged_node_name, 'Demand1']
- new_demand = self._rm._registry._node_damage_table.loc[damaged_node_name, 'Demand2']
+ _PIDR_type = self.getDefualtValue( # noqa: N806
+ effects_definition_name, method_name, 'PIDR_TYPE'
+ )
+ if _PIDR_type == None or _PIDR_type == 'ASSIGNED_DEMAND': # noqa: E711, PLR1714
+ old_demand = self._rm._registry._node_damage_table.loc[ # noqa: SLF001
+ damaged_node_name, 'Demand1'
+ ]
+ new_demand = self._rm._registry._node_damage_table.loc[ # noqa: SLF001
+ damaged_node_name, 'Demand2'
+ ]
else:
- raise ValueError('unrecognized Setting: '+_PIDR_type)
-
- _PIDR=new_demand/old_demand
-
- iHold = self._iConditionHolds(_PIDR, _con, _con_val)
-
- return iHold
-
+ raise ValueError('unrecognized Setting: ' + _PIDR_type)
+
+ _PIDR = new_demand / old_demand # noqa: N806
+
+ iHold = self._iConditionHolds(_PIDR, _con, _con_val) # noqa: N806
+
+ return iHold # noqa: RET504
+
return True
-
-
-
-
- def iEffectApplicableByProbability(self, effects_definition_name, method_name, damaged_node_name, entity):
-
- _prob=0
- temp=self.getDefualtValue(effects_definition_name, method_name, 'METHOD_PROBABILITY')
- if temp!=None:
- _prob=temp
+
+ def iEffectApplicableByProbability( # noqa: N802, D102
+ self,
+ effects_definition_name,
+ method_name,
+ damaged_node_name,
+ entity,
+ ):
+ _prob = 0
+ temp = self.getDefualtValue(
+ effects_definition_name, method_name, 'METHOD_PROBABILITY'
+ )
+ if temp != None: # noqa: E711
+ _prob = temp
try:
self._check_probability(_prob)
- except Exception as e:
- print('in Method bsaed Probability of method ' +str(method_name)+ ', and definition_name '+str(effects_definition_name)+', :'+str(_prob))
- raise ValueError(e)
-
-# =============================================================================
-# if 'DEFAULT' in self._effect_data[effects_definition_name]:
-# data = self._effect_data[effects_definition_name]['DEFAULT']
-# if 'METHOD_PROBABILITY' in data:
-# if method_name in data['METHOD_PROBABILITY']:
-# _prob=data['METHOD_PROBABILITY'][method_name]
-# try:
-# _check_probability(_prob)
-# except Exception as e:
-# print('in Method bsaed Probability of method ' +method_name+ ', and definition_name '+effects_definition_name)
-# raise ValueError(e)
-# =============================================================================
+ except Exception as e: # noqa: BLE001
+ print( # noqa: T201
+ 'in Method bsaed Probability of method '
+ + str(method_name)
+ + ', and definition_name '
+ + str(effects_definition_name)
+ + ', :'
+ + str(_prob)
+ )
+ raise ValueError(e) # noqa: B904
+
+ # =============================================================================
+ # if 'DEFAULT' in self._effect_data[effects_definition_name]:
+ # data = self._effect_data[effects_definition_name]['DEFAULT']
+ # if 'METHOD_PROBABILITY' in data:
+ # if method_name in data['METHOD_PROBABILITY']:
+ # _prob=data['METHOD_PROBABILITY'][method_name]
+ # try:
+ # _check_probability(_prob)
+ # except Exception as e:
+ # print('in Method bsaed Probability of method ' +method_name+ ', and definition_name '+effects_definition_name)
+ # raise ValueError(e)
+ # =============================================================================
if 'DATA' in self._effect_data[effects_definition_name]:
data = self._effect_data[effects_definition_name]['DATA']
if 'METHOD_PROBABILITY' in data.columns:
-
- element_name = self._rm._registry.getOrginalElement(damaged_node_name, self._rm.entity[entity])
-
- #temp =data[(data[['ELEMENT_NAME','METHOD_NAME']]==[element_name, method_name]).all(1)]
- element_data = data[data['ELEMENT_NAME']==element_name]
+ element_name = self._rm._registry.getOrginalElement( # noqa: SLF001
+ damaged_node_name, self._rm.entity[entity]
+ )
+
+ # temp =data[(data[['ELEMENT_NAME','METHOD_NAME']]==[element_name, method_name]).all(1)]
+ element_data = data[data['ELEMENT_NAME'] == element_name]
if len(element_data) == 0:
pass
else:
- element_method_data = element_data[element_data['METHOD_NAME']==method_name]
+ element_method_data = element_data[
+ element_data['METHOD_NAME'] == method_name
+ ]
if len(element_method_data) == 0:
_prob = 0
elif len(element_method_data) == 1:
_prob = element_method_data['METHOD_PROBABILITY'].iloc[0]
else:
- raise ValueError('Number of probability found for element '+element_name + ', : '+str(len(temp)))
+ raise ValueError(
+ 'Number of probability found for element '
+ + element_name
+ + ', : '
+ + str(len(temp))
+ )
try:
self._check_probability(_prob)
- except Exception as e:
- print('in LIST of method ' +method_name + ', and definition_name '+effects_definition_name)
- raise ValueError(e)
-
+ except Exception as e: # noqa: BLE001
+ print( # noqa: T201
+ 'in LIST of method '
+ + method_name
+ + ', and definition_name '
+ + effects_definition_name
+ )
+ raise ValueError(e) # noqa: B904
+
_rand = random.random()
- #if effects_definition_name == 'MJTRreroute':
- #print(str(method_name) + ' - ' + repr(_prob))
+ # if effects_definition_name == 'MJTRreroute':
+ # print(str(method_name) + ' - ' + repr(_prob))
logger.debug(_prob)
- if _rand<_prob:
+ if _rand < _prob: # noqa: SIM103
return True
return False
-
- def _check_probability(self, _prob):
- mes=None
- _prob=float(_prob)
- if _prob<0:
- raise ValueError("probability cannot be less than 0.")
- elif _prob>1:
- res=False
- raise ValueError("probability cannot be more than 1.")
-
+
+ def _check_probability(self, _prob): # noqa: PLR6301
+ mes = None # noqa: F841
+ _prob = float(_prob)
+ if _prob < 0:
+ raise ValueError('probability cannot be less than 0.') # noqa: EM101, TRY003
+ elif _prob > 1: # noqa: RET506
+ res = False # noqa: F841
+ raise ValueError('probability cannot be more than 1.') # noqa: EM101, TRY003
+
+
# =============================================================================
# class Effects():
# def __init__(self, restoration_model):
# #self._data_table = pd.DataFrame(columns=['effect', 'method_name', 'data_index'])
-#
-#
-#
-#
-#
+#
+#
+#
+#
+#
# #self._data_table.loc[effect_name, 'method'] = method
# #self._data_table.loc[effect_name, 'effect'] = effect
# #self._data_table.loc[effect_name, 'connection'] = connection
diff --git a/modules/systemPerformance/REWET/REWET/restoration/io.py b/modules/systemPerformance/REWET/REWET/restoration/io.py
index 4aeb9fae9..c03595b62 100644
--- a/modules/systemPerformance/REWET/REWET/restoration/io.py
+++ b/modules/systemPerformance/REWET/REWET/restoration/io.py
@@ -1,19 +1,18 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Wed Dec 19 19:10:35 2020
+"""Created on Wed Dec 19 19:10:35 2020
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-import io
import logging
-import pandas as pd
from collections import OrderedDict
from pathlib import Path
+import pandas as pd
+
logger = logging.getLogger(__name__)
-#the follwing function is borrowed from WNTR
+
+# the following function is borrowed from WNTR
def _split_line(line):
_vc = line.split(';', 1)
_cmnt = None
@@ -22,17 +21,17 @@ def _split_line(line):
pass
elif len(_vc) == 1:
_vals = _vc[0].split()
- elif _vc[0] == '':
+ elif _vc[0] == '': # noqa: PLC1901
_cmnt = _vc[1]
else:
_vals = _vc[0].split()
_cmnt = _vc[1]
return _vals, _cmnt
-class RestorationIO():
+
+class RestorationIO: # noqa: D101
def __init__(self, restoration_model, definition_file_name):
- """
- Needs a file that contains:
+ """Needs a file that contains:
Parameters
----------
@@ -45,61 +44,75 @@ def __init__(self, restoration_model, definition_file_name):
-------
None.
- """
-
- #some of the following lines have been addopted from WNTR
- self.rm= restoration_model
+ """ # noqa: D400
+ # some of the following lines have been adopted from WNTR
+ self.rm = restoration_model
self.crew_data = {}
- expected_sections=['[FILES]','[ENTITIES]', '[JOBS]','[AGENTS]','[GROUPS]','[PRIORITIES]', '[SHIFTS]','[SEQUENCES]', '[DEFINE]', '[POINTS]']
-
+ expected_sections = [
+ '[FILES]',
+ '[ENTITIES]',
+ '[JOBS]',
+ '[AGENTS]',
+ '[GROUPS]',
+ '[PRIORITIES]',
+ '[SHIFTS]',
+ '[SEQUENCES]',
+ '[DEFINE]',
+ '[POINTS]',
+ ]
+
self.config_file_comment = []
- self.edata = []
-
+ self.edata = []
+
self.sections = OrderedDict()
for sec in expected_sections:
self.sections[sec] = []
-
+
section = None
lnum = 0
edata = {'fname': definition_file_name}
- #Sprint(definition_file_name)
+ # Sprint(definition_file_name)
config_file_path = Path(definition_file_name)
-
+
if config_file_path.is_absolute():
pass
else:
config_file_path = config_file_path.resolve()
-
+
self.config_file_dir = config_file_path.parent
-
- with io.open(definition_file_name, 'r', encoding='utf-8') as f:
+
+ with open(definition_file_name, encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
edata['lnum'] = lnum
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.startswith('['):
+ elif line.startswith('['): # noqa: RET507
vals = line.split()
sec = vals[0].upper()
edata['sec'] = sec
if sec in expected_sections:
section = sec
continue
- else:
- raise RuntimeError('%(fname)s:%(lnum)d: Invalid section "%(sec)s"' % edata)
+ else: # noqa: RET507
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Invalid section "%(sec)s"' % edata
+ )
elif section is None and line.startswith(';'):
self.config_file_comment.append(line[1:])
continue
elif section is None:
- raise RuntimeError('%(fname)s:%(lnum)d: Non-comment outside of valid section!' % edata)
+ raise RuntimeError( # noqa: DOC501
+ '%(fname)s:%(lnum)d: Non-comment outside of valid section!'
+ % edata
+ )
# We have text, and we are in a section
self.sections[section].append((lnum, line))
-
# Parse each of the sections
self._read_files()
self._read_shifts()
@@ -111,692 +124,1051 @@ def __init__(self, restoration_model, definition_file_name):
self._read_priorities()
self._read_jobs()
self._read_define()
- #self._read_config()
-
+ # self._read_config()
+
def _read_files(self):
edata = OrderedDict()
- self.file_name=[]
- self._file_data={}
+ self.file_name = []
+ self._file_data = {}
self._file_handle_address = {}
for lnum, line in self.sections['[FILES]']:
edata['lnum'] = lnum
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 2:
+ if len(words) != 2: # noqa: PLR2004
edata['key'] = words[0]
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
- file_handle = words[0]
- file_address = words[1]
-
+ raise RuntimeError(
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s'
+ % edata
+ )
+ file_handle = words[0]
+ file_address = words[1]
+
self._file_handle_address[file_handle] = file_address
-
+
for file_handle, file_address in self._file_handle_address.items():
self._file_data[file_handle] = self._read_each_file(file_address)
- self.rm._files=self._file_data
-
+ self.rm._files = self._file_data # noqa: SLF001
+
def _read_each_file(self, file_address, method=0):
lnum = 0
- iTitle = True
+ iTitle = True # noqa: N806
data_temp = None
- if method==0:
+ if method == 0: # noqa: PLR1702
try:
- raise
- with io.open(file_address, 'r', encoding='utf-8') as f:
+ raise # noqa: PLE0704
+ with open(file_address, encoding='utf-8') as f: # noqa: PTH123
for line in f:
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.startswith(';'):
+ elif line.startswith(';'): # noqa: RET507
# comment
continue
else:
lnum += 1
vals = line.split()
- if iTitle == True:
- iTitle = False
+ if iTitle == True: # noqa: E712
+ iTitle = False # noqa: N806
data_temp = pd.DataFrame(columns=vals)
else:
- data_temp.loc[lnum-2] = vals
- except:
+ data_temp.loc[lnum - 2] = vals
+ except: # noqa: E722
data_temp = self._read_each_file(file_address, method=1)
- elif method==1:
+ elif method == 1:
file_address = self.config_file_dir / file_address
data_temp = pd.read_csv(file_address)
else:
- raise ValueError('Uknown method: '+str(method))
+ raise ValueError('Uknown method: ' + str(method))
return data_temp
-
+
def _read_shifts(self):
- #self._shift_data=pd.DataFrame()
- #self._file_handle_address = {}
- for lnum, line in self.sections['[SHIFTS]']:
- #edata['lnum'] = lnum
- words, comments = _split_line(line)
+ # self._shift_data=pd.DataFrame()
+ # self._file_handle_address = {}
+ for lnum, line in self.sections['[SHIFTS]']: # noqa: B007
+ # edata['lnum'] = lnum
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 3:
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s')
- shift_name = words[0]
- shift_begining = int(words[1])*3600
- shift_ending = int(words[2])*3600
-
+ if len(words) != 3: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' # noqa: EM101
+ )
+ shift_name = words[0]
+ shift_begining = int(words[1]) * 3600
+ shift_ending = int(words[2]) * 3600
+
self.rm.shifting.addShift(shift_name, shift_begining, shift_ending)
-
- def _read_entities(self):
+
+ def _read_entities(self): # noqa: C901
for lnum, line in self.sections['[ENTITIES]']:
arg1 = None
arg2 = None
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) != 2 and len(words)!=4:
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s')
- entity_name = words[0]
- element = words[1].upper()
-
+ if len(words) != 2 and len(words) != 4: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' # noqa: EM101
+ )
+ entity_name = words[0]
+ element = words[1].upper()
+
if element not in self.rm.ELEMENTS:
raise ValueError('Unknown element line number ' + str(lnum))
-
- #if entity_name in self.rm.entity:
- #raise ValueError('Entity already defined')
-
- if len(words) == 4:
+
+ # if entity_name in self.rm.entity:
+ # raise ValueError('Entity already defined')
+
+ if len(words) == 4: # noqa: PLR2004
arg1 = words[2]
arg2 = words[3]
-
- if (element=='PIPE' and arg1 not in self.rm._registry._pipe_damage_table.columns and arg1!='FILE' and arg1!='NOT_IN_FILE') and (element=='DISTNODE' and arg1 not in self.rm._registry._node_damage_table.columns):
- raise ValueError('Argument 1('+arg1+') is not recognized in line number: ' + str(lnum))
-
- if arg1 == None:
+
+ if ( # noqa: PLR0916
+ element == 'PIPE' # noqa: PLR1714
+ and arg1 not in self.rm._registry._pipe_damage_table.columns # noqa: SLF001
+ and arg1 != 'FILE'
+ and arg1 != 'NOT_IN_FILE'
+ ) and (
+ element == 'DISTNODE'
+ and arg1 not in self.rm._registry._node_damage_table.columns # noqa: SLF001
+ ):
+ raise ValueError(
+ 'Argument 1('
+ + arg1
+ + ') is not recognized in line number: '
+ + str(lnum)
+ )
+
+ if arg1 == None: # noqa: E711
self.rm.entity[entity_name] = element
- ent_rule = [('ALL',None, None)]
-
+ ent_rule = [('ALL', None, None)]
+
if entity_name not in self.rm.entity_rule:
self.rm.entity_rule[entity_name] = ent_rule
else:
self.rm.entity_rule[entity_name].append(ent_rule[0])
-
- self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
-
- elif arg1=='FILE' or arg1=='NOT_IN_FILE':
- name_list=self.rm._files[arg2]['ElementID'].unique().tolist()
+
+ self.rm._registry.addAttrToElementDamageTable( # noqa: SLF001
+ element,
+ entity_name,
+ True, # noqa: FBT003
+ )
+
+ elif arg1 == 'FILE' or arg1 == 'NOT_IN_FILE': # noqa: PLR1714
+ name_list = self.rm._files[arg2]['ElementID'].unique().tolist() # noqa: SLF001
ent_rule = [(arg1, None, name_list)]
self.rm.entity[entity_name] = element
-
+
if entity_name not in self.rm.entity_rule:
self.rm.entity_rule[entity_name] = ent_rule
- self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
+ self.rm._registry.addAttrToElementDamageTable( # noqa: SLF001
+ element,
+ entity_name,
+ True, # noqa: FBT003
+ )
else:
self.rm.entity_rule[entity_name].append(ent_rule[0])
-
+
else:
-
if ':' in arg2:
split_arg = arg2.split(':')
-
- if len(split_arg)!=2:
- raise ValueError('There must be two parts: PART1:PART2. Now there are '+repr(len(split_arg)+' parts. Line number is '+repr(lnum)))
- if split_arg[0]=='':
- raise ValueError('The first part is Empty in line '+repr(lnum))
- if split_arg[1]=='':
- raise ValueError('The second part is Empty in line '+repr(lnum))
+
+ if len(split_arg) != 2: # noqa: PLR2004
+ raise ValueError(
+ 'There must be two parts: PART1:PART2. Now there are '
+ + repr(
+ len(split_arg)
+ + ' parts. Line number is '
+ + repr(lnum)
+ )
+ )
+ if split_arg[0] == '': # noqa: PLC1901
+ raise ValueError(
+ 'The first part is Empty in line ' + repr(lnum)
+ )
+ if split_arg[1] == '': # noqa: PLC1901
+ raise ValueError(
+ 'The second part is Empty in line ' + repr(lnum)
+ )
else:
- raise ValueError('There must be two parts as a conditio, separted with ":". Example: PART1:PART2 \nPart1 can be one of teh following: EQ, BG, LT, BG-EQ, and LT-EQ. Line number: '+repr(lnum))
-
+ raise ValueError(
+ 'There must be two parts as a condition, separated with ":". Example: PART1:PART2 \nPart1 can be one of the following: EQ, BG, LT, BG-EQ, and LT-EQ. Line number: '
+ + repr(lnum)
+ )
+
rest_of_args = arg2.split(':')
- arg2=rest_of_args[0]
- arg3=rest_of_args[1]
-
+ arg2 = rest_of_args[0]
+ arg3 = rest_of_args[1]
+
try:
temp_arg3 = float(arg3)
- except:
+ except: # noqa: E722
temp_arg3 = str(arg3)
-
- arg3=temp_arg3
+
+ arg3 = temp_arg3
ent_rule = [(arg1, arg2, arg3)]
if entity_name not in self.rm.entity:
-
self.rm.entity[entity_name] = element
self.rm.entity_rule[entity_name] = ent_rule
- self.rm._registry.addAttrToElementDamageTable(element ,entity_name , True)
+ self.rm._registry.addAttrToElementDamageTable( # noqa: SLF001
+ element,
+ entity_name,
+ True, # noqa: FBT003
+ )
else:
if self.rm.entity[entity_name] != element:
- raise ValueError('Element must not chanage in an added condition. Line '+str(lnum))
+ raise ValueError(
+ 'Element must not change in an added condition. Line '
+ + str(lnum)
+ )
self.rm.entity_rule[entity_name].append(ent_rule[0])
-# =============================================================================
-# if element == 'PIPE':
-# #candidate = self.getDamageData(element)
-# #candidate.index.tolist()
-# ent_rule = [element,'ALL']
-# #self.rm.entity[entity_name] = element
-# self.rm.entity_rule[entity_name] = ent_rule
-# self.rm._registry.addAttrToPipeDamageTable(entity_name, True)
-# elif element == 'DISTNODE':
-# ent_rule = [element,'ALL']
-# #self.rm.entity[entity_name]
-# self.rm.entity_rule[entity_name] = ent_rule
-# self.rm._registry.AttrToDistNodeDamageTable(entity_name, True)
-# else:
-# raise ValueError('Element type is not recognized')
-# =============================================================================
-
+
+ # =============================================================================
+ # if element == 'PIPE':
+ # #candidate = self.getDamageData(element)
+ # #candidate.index.tolist()
+ # ent_rule = [element,'ALL']
+ # #self.rm.entity[entity_name] = element
+ # self.rm.entity_rule[entity_name] = ent_rule
+ # self.rm._registry.addAttrToPipeDamageTable(entity_name, True)
+ # elif element == 'DISTNODE':
+ # ent_rule = [element,'ALL']
+ # #self.rm.entity[entity_name]
+ # self.rm.entity_rule[entity_name] = ent_rule
+ # self.rm._registry.AttrToDistNodeDamageTable(entity_name, True)
+ # else:
+ # raise ValueError('Element type is not recognized')
+ # =============================================================================
+
def _read_sequences(self):
- for lnum, line in self.sections['[SEQUENCES]']:
- words, comments = _split_line(line)
+ for lnum, line in self.sections['[SEQUENCES]']: # noqa: B007
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- #if len(words) != 2 or len(words)!=4:
- #raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
- element = words[0].upper()
+ # if len(words) != 2 or len(words)!=4:
+ # raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ element = words[0].upper()
seq = []
for arg in words[1:]:
- seq.append(arg)
+ seq.append(arg) # noqa: PERF402
if element in self.rm.sequence:
- raise ValueError('Element already in sequences')
+ raise ValueError('Element already in sequences') # noqa: EM101, TRY003
self.rm.sequence[element] = seq
for el in self.rm.sequence:
if el in self.rm.ELEMENTS:
for action in self.rm.sequence[el]:
- self.rm._registry.addAttrToElementDamageTable(el, action, None)
-
+ self.rm._registry.addAttrToElementDamageTable(el, action, None) # noqa: SLF001
+
def _read_agents(self):
- agent_file_handle={}
- group_names = {}
+ agent_file_handle = {}
+ group_names = {}
group_column = {}
-
- for lnum, line in self.sections['[AGENTS]']:
- #edata['lnum'] = lnum
- words, comments = _split_line(line)
+
+ for lnum, line in self.sections['[AGENTS]']: # noqa: B007
+ # edata['lnum'] = lnum
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- _group_name = None
+ _group_name = None
_group_column = None
-
- if len(words) < 3:
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s')
+
+ if len(words) < 3: # noqa: PLR2004
+ raise RuntimeError( # noqa: TRY003
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' # noqa: EM101
+ )
agent_type = words[0]
- if words[1].upper() == "FILE":
+ if words[1].upper() == 'FILE':
agent_file_handle[words[0]] = words[2]
else:
- raise ValueError("Unknown key")
- if len(words)>=4:
- group_data = words[3]
- _group_name = group_data.split(':')[0]
+ raise ValueError('Unknown key') # noqa: EM101, TRY003
+ if len(words) >= 4: # noqa: PLR2004
+ group_data = words[3]
+ _group_name = group_data.split(':')[0]
_group_column = group_data.split(':')[1]
-
-
- group_names[agent_type] = _group_name
+
+ group_names[agent_type] = _group_name
group_column[agent_type] = _group_column
-
+
for agent_type, file_handle in agent_file_handle.items():
data = self._file_data[file_handle]
-
- #print(file_handle)
- #print(self._file_data[file_handle])
-
+
+ # print(file_handle)
+ # print(self._file_data[file_handle])
+
agent_number = data['Number']
- j=0
- for lnum, line in data.iterrows():
- #try:
+ j = 0
+ for lnum, line in data.iterrows(): # noqa: B007
+ # try:
num = int(agent_number[j])
- #except :
- #print('exception')
- #pass
+ # except :
+ # print('exception')
+ # pass
_r = range(num)
-
+
for i in _r:
agent_name = agent_type + str(j) + str(i)
predefinitions = line.to_dict()
definitions = {}
- definitions['cur_x'] = predefinitions['Curr.-X-Coord']
- definitions['cur_y'] = predefinitions['Curr.-Y-Coord']
- definitions['base_x'] = predefinitions['Home-X-Coord']
- definitions['base_y'] = predefinitions['Home-Y-Coord']
+ definitions['cur_x'] = predefinitions['Curr.-X-Coord']
+ definitions['cur_y'] = predefinitions['Curr.-Y-Coord']
+ definitions['base_x'] = predefinitions['Home-X-Coord']
+ definitions['base_y'] = predefinitions['Home-Y-Coord']
definitions['shift_name'] = predefinitions['Shift']
-
- group_name_temp=None
- if group_names[agent_type] !=None:
- definitions['group'] = predefinitions[group_column[agent_type]]
+
+ group_name_temp = None
+ if group_names[agent_type] != None: # noqa: E711
+ definitions['group'] = predefinitions[
+ group_column[agent_type]
+ ]
group_name_temp = group_names[agent_type]
else:
group_name_temp = 'default'
- definitions['group'] = 'Default'
-
+ definitions['group'] = 'Default'
+
definitions['group_name'] = group_name_temp
- self.rm.agents.addAgent(agent_name ,agent_type, definitions)
- j += 1
-
+ self.rm.agents.addAgent(agent_name, agent_type, definitions)
+ j += 1 # noqa: SIM113
+
def _read_groups(self):
-
for lnum, line in self.sections['[GROUPS]']:
- words, comments = _split_line(line)
-
+ words, comments = _split_line(line) # noqa: F841
+
if words is not None and len(words) > 0:
- if len(words) != 6:
- raise ValueError('error in line: ' + str(lnum) + ": "+repr(len(words)))
- group_name = words[0]
- element_type = words[1]
- arguement = words[2]
- file_handler = words[3]
- element_col_ID = words[4]
- pipe_col_ID = words[5]
-
+ if len(words) != 6: # noqa: PLR2004
+ raise ValueError(
+ 'error in line: ' + str(lnum) + ': ' + repr(len(words))
+ )
+ group_name = words[0]
+ element_type = words[1]
+ argument = words[2]
+ file_handler = words[3]
+ element_col_ID = words[4] # noqa: N806
+ pipe_col_ID = words[5] # noqa: N806
+
if element_type not in self.rm.ELEMENTS:
- raise ValueError('Unknown element type: '+repr(element_type)+', in line: '+repr(lnum))
- if arguement!='FILE':
- raise ValueError('the Only acceptable argument is FILE. Line: '+repr(lnum))
-
- data = self.rm._files[file_handler]
-
+ raise ValueError(
+ 'Unknown element type: '
+ + repr(element_type)
+ + ', in line: '
+ + repr(lnum)
+ )
+ if argument != 'FILE':
+ raise ValueError(
+ 'the Only acceptable argument is FILE. Line: ' + repr(lnum)
+ )
+
+ data = self.rm._files[file_handler] # noqa: SLF001
+
if pipe_col_ID not in data:
- raise ValueError(repr(pipe_col_ID) + "not in file handle="+repr(file_handler) )
-
+ raise ValueError(
+ repr(pipe_col_ID)
+ + 'not in file handle='
+ + repr(file_handler)
+ )
+
if element_col_ID not in data:
- raise ValueError(repr(element_col_ID) + "not in file handle="+repr(file_handler) )
-
- group_list = data[pipe_col_ID]
+ raise ValueError(
+ repr(element_col_ID)
+ + 'not in file handle='
+ + repr(file_handler)
+ )
+
+ group_list = data[pipe_col_ID]
group_list.index = data[element_col_ID]
-
+
if element_type not in self.rm.group:
- raise ValueError('This error must never happen: '+repr(element_type))
-
+ raise ValueError(
+ 'This error must never happen: ' + repr(element_type)
+ )
+
if group_name in self.rm.group[element_type]:
- raise ValueError('The Group is already identified: '+repr(group_name)+' in line: '+repr(lnum))
-
- self.rm.group[element_type][group_name]=group_list
-
- def _read_points(self):
+ raise ValueError(
+ 'The Group is already identified: '
+ + repr(group_name)
+ + ' in line: '
+ + repr(lnum)
+ )
+
+ self.rm.group[element_type][group_name] = group_list
+
+ def _read_points(self): # noqa: C901
for lnum, line in self.sections['[POINTS]']:
- words, comments = _split_line(line)
-
- if words is None or len(words) < 1: #Empty Line
+ words, comments = _split_line(line) # noqa: F841
+
+ if words is None or len(words) < 1: # Empty Line
continue
-
- if not len(words) >= 2: #Syntax Error
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "Each Point Group must have at least one name and one point coordinate sperated by a ':'" + "\n" + "Example= 'PointGroupName X1:Y1 [X2:Y2 ...]'")
-
- group_name = words[0]
+
+ if not len(words) >= 2: # Syntax Error # noqa: PLR2004
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "Each Point Group must have at least one name and one point coordinate operated by a ':'"
+ + '\n'
+ + "Example= 'PointGroupName X1:Y1 [X2:Y2 ...]'"
+ )
+
+ group_name = words[0]
current_group_point_list = []
-
+
if group_name.upper() in self.rm.reserved_priority_names:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "Group name " + "'" + group_name + "'"+ " is ambiguous. " + "'" + group_name + " is a reserved priority")
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + 'Group name '
+ + "'"
+ + group_name
+ + "'"
+ + ' is ambiguous. '
+ + "'"
+ + group_name
+ + ' is a reserved priority'
+ )
for word in words[1:]:
- if ":" not in word:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "'" + word + "'" + " is not an accpetable point coordinate. It must be point coordinate sperated by a ':'" + "\n" + "Example= 'X1:Y1'")
-
- x_y_coord = word.split(":")
- if len(x_y_coord) > 2:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "Multiple devider (':') in "+ "'" + word + "'" + "It must be point coordinate sperated by a ':'" + "\n" + "Example= 'X1:Y1'")
-
+ if ':' not in word:
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "'"
+ + word
+ + "'"
+ + " is not an acceptable point coordinate. It must be point coordinate separated by a ':'"
+ + '\n'
+ + "Example= 'X1:Y1'"
+ )
+
+ x_y_coord = word.split(':')
+ if len(x_y_coord) > 2: # noqa: PLR2004
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "Multiple divider (':') in "
+ + "'"
+ + word
+ + "'"
+ + "It must be point coordinate separated by a ':'"
+ + '\n'
+ + "Example= 'X1:Y1'"
+ )
+
x_coord = x_y_coord[0]
y_coord = x_y_coord[1]
-
+
try:
x_coord = float(x_coord)
- except:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "'" + x_coord + "'" + " in " "'" + word + "'" +" is not a number")
-
+ except: # noqa: E722
+ raise ValueError( # noqa: B904
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "'"
+ + x_coord
+ + "'"
+ + ' in '
+ "'" + word + "'" + ' is not a number'
+ )
+
try:
y_coord = float(y_coord)
- except:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "'" + y_coord + "'" + " in " "'" + word + "'" +" is not a number")
-
- current_group_point_list.append((x_coord, y_coord) )
- #print(group_name)
- #print(words[1:])
- if group_name in self.rm.proximity_points: #To Support mutiple line assigment of the same group
+ except: # noqa: E722
+ raise ValueError( # noqa: B904
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "'"
+ + y_coord
+ + "'"
+ + ' in '
+ "'" + word + "'" + ' is not a number'
+ )
+
+ current_group_point_list.append((x_coord, y_coord))
+ # print(group_name)
+ # print(words[1:])
+ if (
+ group_name in self.rm.proximity_points
+ ): # To Support multiple line assignment of the same group
self.rm.proximity_points[group_name].extend(current_group_point_list)
else:
self.rm.proximity_points[group_name] = current_group_point_list
-
- def _read_priorities(self):
+
+ def _read_priorities(self): # noqa: C901
agent_type_list = self.rm.agents.getAllAgentTypes()
for lnum, line in self.sections['[PRIORITIES]']:
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is None or len(words) < 1:
continue
-
- if not len(words) >= 3:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "Inadequate parametrs to define priority. There must be at least three parametrs, " + repr(len(words))+ " is given." + "\n" + "Example= 'CREW TYPE PriorityType[1 or 2], Action:DamageGroup")
-
- agent_type = words[0]
-
+
+ if not len(words) >= 3: # noqa: PLR2004
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + 'Inadequate parameters to define priority. There must be at least three parameters, '
+ + repr(len(words))
+ + ' is given.'
+ + '\n'
+ + "Example= 'CREW TYPE PriorityType[1 or 2], Action:DamageGroup"
+ )
+
+ agent_type = words[0]
+
if agent_type not in agent_type_list:
- raise ValueError('Logical error in line: ' + str(lnum) + "\n" + "Crew type " + "'" + agent_type + "'" + " is not defiend in the crew section.")
-
+ raise ValueError(
+ 'Logical error in line: '
+ + str(lnum)
+ + '\n'
+ + 'Crew type '
+ + "'"
+ + agent_type
+ + "'"
+ + ' is not defined in the crew section.'
+ )
+
try:
priority_type = int(words[1])
- except:
+ except: # noqa: E722
try:
- priority_type = int(float(words[1]) )
- except:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "'" + priority_type + "'" + " is not an acceptable priority type. Priority type must be either 1 or 2 to define the first or secondary prioirty consecutively." + "\n" + "Example= 'CREW TYPE Prioritytype[1 or 2], Action:DamageGroup")
-
- if priority_type not in [1,2]:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "'" + priority_type + "'" + " is not an acceptable priority type. Priority type must be either 1 or 2 to define the first or secondary prioirty consecutively." + "\n" + "Example= 'CREW TYPE Prioritytype[1 or 2], Action:DamageGroup")
+ priority_type = int(float(words[1]))
+ except: # noqa: E722
+ raise ValueError( # noqa: B904
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "'"
+ + priority_type
+ + "'"
+ + ' is not an acceptable priority type. Priority type must be either 1 or 2 to define the first or secondary priority consecutively.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1 or 2], Action:DamageGroup"
+ )
+
+ if priority_type not in [1, 2]: # noqa: PLR6201
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "'"
+ + priority_type
+ + "'"
+ + ' is not an acceptable priority type. Priority type must be either 1 or 2 to define the first or secondary priority consecutively.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1 or 2], Action:DamageGroup"
+ )
arg = []
for word in words[2:]:
if priority_type == 1:
if word.find(':') == -1:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "The devider (':') is lacking. The primary priority " + "'" + word + "'" + " is not an acceptable Primary Priority. A Priority Priority is a consisted of an Action:DamageGroup." + "\n" + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup")
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "The divider (':') is lacking. The primary priority "
+ + "'"
+ + word
+ + "'"
+ + ' is not an acceptable Primary Priority. A Priority Priority is a consisted of an Action:DamageGroup.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup"
+ )
split_temp = word.split(':')
-
- if len(split_temp) > 2:
- raise ValueError('Syntax error in line: ' + str(lnum) + "\n" + "More than one devider (':') In the Primary Priority. The primary priority " + "'" + word + "'" + " is not an acceptable Primary Priority. A Priority Priority is a consisted of an Action:DamageGroup." + "\n" + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup")
-
- action = split_temp[0]
+
+ if len(split_temp) > 2: # noqa: PLR2004
+ raise ValueError(
+ 'Syntax error in line: '
+ + str(lnum)
+ + '\n'
+ + "More than one divider (':') In the Primary Priority. The primary priority "
+ + "'"
+ + word
+ + "'"
+ + ' is not an acceptable Primary Priority. A Priority Priority is a consisted of an Action:DamageGroup.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup"
+ )
+
+ action = split_temp[0]
damage_group = split_temp[1]
-
+
if damage_group not in self.rm.entity:
- raise ValueError('Logical error in line: ' + str(lnum) + "\n" + "DamageGroup " + "'" + damage_group + "'" + " is not an defined. A Priority Priority is a consisted of an Action:DamageGroup." + "\n" + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup")
-
+ raise ValueError(
+ 'Logical error in line: '
+ + str(lnum)
+ + '\n'
+ + 'DamageGroup '
+ + "'"
+ + damage_group
+ + "'"
+ + ' is not an defined. A Priority Priority is a consisted of an Action:DamageGroup.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup"
+ )
+
if action not in self.rm.sequence[self.rm.entity[damage_group]]:
- raise ValueError('Logical error in line: ' + str(lnum) + "\n" + "Action " + "'" + action + "'" + " is not an defined in Action Sequence. A Priority Priority is a consisted of an Action:DamageGroup." + "\n" + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup")
-
- arg.append((action, damage_group) )
-
- elif priority_type == 2:
- if word not in self.rm.proximity_points and word not in self.rm.reserved_priority_names:
- raise ValueError('Logical error in line: ' + str(lnum) + "\n" + "Secondary Priority " + "'" + word + "'" + " is not defined as a Point Group and is not a Reserved Secondary Priority." + "\n" + "Example= 'CREW TYPE Prioritytype[2] ['Point Group' or 'Reserved Secondary Priority']")
+ raise ValueError(
+ 'Logical error in line: '
+ + str(lnum)
+ + '\n'
+ + 'Action '
+ + "'"
+ + action
+ + "'"
+ + ' is not an defined in Action Sequence. A Priority Priority is a consisted of an Action:DamageGroup.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[1], Action:DamageGroup"
+ )
+
+ arg.append((action, damage_group))
+
+ elif priority_type == 2: # noqa: PLR2004
+ if (
+ word not in self.rm.proximity_points
+ and word not in self.rm.reserved_priority_names
+ ):
+ raise ValueError(
+ 'Logical error in line: '
+ + str(lnum)
+ + '\n'
+ + 'Secondary Priority '
+ + "'"
+ + word
+ + "'"
+ + ' is not defined as a Point Group and is not a Reserved Secondary Priority.'
+ + '\n'
+ + "Example= 'CREW TYPE Prioritytype[2] ['Point Group' or 'Reserved Secondary Priority']"
+ )
arg.append(word)
else:
- raise ValueError("Uknown Priority type: "+repr(priority_type))
-
+ raise ValueError('Uknown Priority type: ' + repr(priority_type))
+
self.rm.priority.addData(agent_type, priority_type, arg)
-
- for crew_type in self.rm.priority._data:
- priority_list = self.rm.priority._data[crew_type]
+
+ for crew_type in self.rm.priority._data: # noqa: SLF001
+ priority_list = self.rm.priority._data[crew_type] # noqa: SLF001
primary_priority_order_list = priority_list[1]
secondary_priority_order_list = priority_list[2]
- if len(primary_priority_order_list) != len(secondary_priority_order_list):
- raise ValueError("Logical error. The number of Primary Priority and Secondary Primary does not match for Crew Trye: " + repr(crew_type) )
-
- not_defined=[]
+ if len(primary_priority_order_list) != len(
+ secondary_priority_order_list
+ ):
+ raise ValueError(
+ 'Logical error. The number of Primary Priority and Secondary Primary does not match for Crew Type: '
+ + repr(crew_type)
+ )
+
+ not_defined = []
for agent_type in agent_type_list:
if not self.rm.priority.isAgentTypeInPriorityData(agent_type):
- not_defined.append(agent_type)
-
- if len(not_defined)>0:
- raise ValueError('Logical error. The following agent types are not defined in the prioirty sections:\n'+repr(not_defined))
-
+ not_defined.append(agent_type) # noqa: PERF401
+
+ if len(not_defined) > 0:
+ raise ValueError(
+ 'Logical error. The following agent types are not defined in the priority sections:\n'
+ + repr(not_defined)
+ )
+
def _read_jobs(self):
jobs_definition = []
for lnum, line in self.sections['[JOBS]']:
-
cur_job_definition = {}
- words, comments = _split_line(line)
-
+ words, comments = _split_line(line) # noqa: F841
+
if words is not None and len(words) > 0:
- if not len(words) >= 3:
- raise ValueError('Not enough arguments. error in line: ' + str(lnum))
- agent_type = words[0]
-
+ if not len(words) >= 3: # noqa: PLR2004
+ raise ValueError(
+ 'Not enough arguments. error in line: ' + str(lnum)
+ )
+ agent_type = words[0]
+
action_entity = words[1]
- if not action_entity.find(':')!=-1:
- raise ValueError('There must be an action and entity seprated by : in line '+str(lnum))
+ if not action_entity.find(':') != -1:
+ raise ValueError(
+ 'There must be an action and entity separated by : in line '
+ + str(lnum)
+ )
split_temp = action_entity.split(':')
action = split_temp[0]
entity = split_temp[1]
-
+
definer_arg = words[2]
- if not definer_arg.find(':')!=-1:
- raise ValueError('There must be an Time Definer and Argument seprated by : in line '+str(lnum))
+ if not definer_arg.find(':') != -1:
+ raise ValueError(
+ 'There must be an Time Definer and Argument separated by : in line '
+ + str(lnum)
+ )
split_temp = definer_arg.split(':')
- definer = split_temp[0]
+ definer = split_temp[0]
argument = split_temp[1]
-
+
if definer.upper() == 'FIXED':
try:
- argument = int(argument)
- except:
- print('exeption handled in _read_jobs')
+ argument = int(argument)
+ except: # noqa: E722
+ print('exeption handled in _read_jobs') # noqa: T201
else:
- raise ValueError('Definer is not recognized: '+definer)
-
+ raise ValueError('Definer is not recognized: ' + definer)
+
effect = None
- if len(words)>=4:
+ if len(words) >= 4: # noqa: PLR2004
effect = words[3]
-
- cur_job_definition = {'agent_type':agent_type,
- 'entity':entity,
- 'action':action,
- 'time_argument':argument,
- 'effect':effect}
+
+ cur_job_definition = {
+ 'agent_type': agent_type,
+ 'entity': entity,
+ 'action': action,
+ 'time_argument': argument,
+ 'effect': effect,
+ }
jobs_definition.append(cur_job_definition)
self.rm.jobs.setJob(jobs_definition)
-
- def _read_define(self):
- job={}
- used_jobs = self.rm.jobs._job_list.effect.unique().tolist()
+
+ def _read_define(self): # noqa: C901, PLR0912
+ job = {} # noqa: F841
+ used_jobs = self.rm.jobs._job_list.effect.unique().tolist() # noqa: SLF001
if None in used_jobs:
used_jobs.remove(None)
-
- #for key in used_effect:
- #job[key]=[]
- for lnum, line in self.sections['[DEFINE]']:
- words, comments = _split_line(line)
+
+ # for key in used_effect:
+ # job[key]=[]
+ for lnum, line in self.sections['[DEFINE]']: # noqa: PLR1702
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- #if not len(words) >= 3:
- #raise ValueError('Not enough arguments. error in line: ' + str(lnum))
+ # if not len(words) >= 3:
+ # raise ValueError('Not enough arguments. error in line: ' + str(lnum))
job_name = words[0]
if job_name not in used_jobs:
- raise ValueError('Effect name not recognized in line '+str(lnum)+' : '+job_name)
- try:
+ raise ValueError(
+ 'Effect name not recognized in line '
+ + str(lnum)
+ + ' : '
+ + job_name
+ )
+ try:
method_name = float(words[1])
- except:
+ except: # noqa: E722
method_name = words[1]
-
- res_list=[]
- flag=False
-
+
+ res_list = []
+ flag = False
+
if method_name == 'FILE':
- file_data = self._read_file_effect(words[2:], job_name)
- self.rm.jobs.addEffect(job_name, 'DATA', file_data)
- continue
-
+ file_data = self._read_file_effect(words[2:], job_name)
+ self.rm.jobs.addEffect(job_name, 'DATA', file_data)
+ continue
+
method_data_list = words[2:]
for method_data in method_data_list:
- res={}
+ res = {}
definition = method_data.split(':')
-
- i=0
- if len(definition)%2!=1:
- raise ValueError('Error in line '+str(lnum))
-
+
+ i = 0
+ if len(definition) % 2 != 1:
+ raise ValueError('Error in line ' + str(lnum))
+
main_arg = None
-
+
while i < len(definition):
arg = definition[i].upper()
- if i==0:
+ if i == 0:
main_arg = arg
i += 1
- res['EFFECT']=main_arg
+ res['EFFECT'] = main_arg
continue
- val = definition[i+1].upper()
-
+ val = definition[i + 1].upper()
+
if main_arg == 'RECONNECT':
if arg == 'PIPESIZE':
if 'PIPESIZEFACTOR' in res:
- raise ValueError('Either pipe size or pipe size factor can be defined')
+ raise ValueError( # noqa: TRY003
+ 'Either pipe size or pipe size factor can be defined' # noqa: EM101
+ )
res['PIPESIZE'] = float(val)
-
+
elif arg == 'PIPESIZEFACTOR':
if 'PIPESIZE' in res:
- raise ValueError('Either pipe size or pipe size factor can be defined')
+ raise ValueError( # noqa: TRY003
+ 'Either pipe size or pipe size factor can be defined' # noqa: EM101
+ )
val = float(val)
- if val>1 or val<0:
- raise ValueError('Pipe Size Factor must be bigger than 0 and less than or eqal to 1: '+str(val))
+ if val > 1 or val < 0:
+ raise ValueError(
+ 'Pipe Size Factor must be bigger than 0 and less than or eqal to 1: '
+ + str(val)
+ )
res['PIPESIZEFACTOR'] = float(val)
elif arg == 'CV':
- if val=='TRUE' or val=='1':
- val=True
- elif val=='FALSE' or val=='0':
- val=False
+ if val == 'TRUE' or val == '1': # noqa: PLR1714
+ val = True
+ elif val == 'FALSE' or val == '0': # noqa: PLR1714
+ val = False
else:
- raise ValueError('Unrecognized value for CV in line '+str(lnum)+': '+val+('Value for CV must be either True or False'))
- res['CV']=val
+ raise ValueError(
+ 'Unrecognized value for CV in line '
+ + str(lnum)
+ + ': '
+ + val
+ + (
+ 'Value for CV must be either True or False'
+ )
+ )
+ res['CV'] = val
elif arg == 'PIPELENGTH':
try:
- val == float(val)
+ val == float(val) # noqa: B015
except Exception as e:
- print("The value for PIPELENGTH must be a number")
- raise e
- res['PIPELENGTH']=val
+ print( # noqa: T201
+ 'The value for PIPELENGTH must be a number'
+ )
+ raise e # noqa: TRY201
+ res['PIPELENGTH'] = val
elif arg == 'PIPEFRICTION':
try:
- val == float(val)
+ val == float(val) # noqa: B015
except Exception as e:
- print("The value for PIPEFRICTION must be a number")
- raise e
- res['PIPEFRICTION']=val
+ print( # noqa: T201
+ 'The value for PIPEFRICTION must be a number'
+ )
+ raise e # noqa: TRY201
+ res['PIPEFRICTION'] = val
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
elif main_arg == 'ADD_RESERVOIR':
if arg == 'PUMP':
res['PUMP'] = float(val)
-
+
elif arg == 'CV':
- if val=='TRUE' or val=='1':
- val=True
- elif val=='FALSE' or val=='0':
- val=False
+ if val == 'TRUE' or val == '1': # noqa: PLR1714
+ val = True
+ elif val == 'FALSE' or val == '0': # noqa: PLR1714
+ val = False
else:
- raise ValueError('Unrecognized value for CV in line '+str(lnum)+': '+val+('Value for CV must be either True or False'))
- res['CV']=val
+ raise ValueError(
+ 'Unrecognized value for CV in line '
+ + str(lnum)
+ + ': '
+ + val
+ + (
+ 'Value for CV must be either True or False'
+ )
+ )
+ res['CV'] = val
elif arg == 'ADDEDELEVATION':
val = float(val)
res['ADDEDELEVATION'] = float(val)
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
elif main_arg == 'REMOVE_LEAK':
if arg == 'LEAKFACTOR':
val = float(val)
- if val>1 or val<=0:
- raise ValueError('Leak factor must be bigger than 0 and less than or eqal to 1: '+str(val))
+ if val > 1 or val <= 0:
+ raise ValueError(
+ 'Leak factor must be bigger than 0 and less than or eqal to 1: '
+ + str(val)
+ )
res['LEAKFACTOR'] = val
else:
- raise ValueError('Unrecognized argument: '+arg+ ', in effect: '+main_arg)
-
+ raise ValueError(
+ 'Unrecognized argument: '
+ + arg
+ + ', in effect: '
+ + main_arg
+ )
+
elif main_arg == 'COL_CLOSE_PIPE':
- raise ValueError('REPAIR at this stage does not accept any argument')
-
+ raise ValueError( # noqa: TRY003
+ 'REPAIR at this stage does not accept any argument' # noqa: EM101
+ )
+
elif main_arg == 'ISOLATE_DN':
- if arg == 'PIDR': #Post Incident Demand Ratio
-
- if val[0]!='(' or val[-1]!=')' or val.find(',')==-1:
- ValueError("After PIDR the format must be like (CONDIION,VALUE)")
-
+ if arg == 'PIDR': # Post Incident Demand Ratio
+ if (
+ val[0] != '('
+ or val[-1] != ')'
+ or val.find(',') == -1
+ ):
+ ValueError( # noqa: PLW0133
+ 'After PIDR the format must be like (CONDIION,VALUE)'
+ )
+
val = val.strip('(').strip(')')
- val_split=val.split(',')
- _con=val_split[0].upper()
- _con_val=float(val_split[1])
-
- if not (_con=='BG' or _con=='EQ' or _con=='LT' or _con=='BG-EQ' or _con=='LT-EQ'):
- raise ValueError('Condition is not recognized:' + str(_con))
-
+ val_split = val.split(',')
+ _con = val_split[0].upper()
+ _con_val = float(val_split[1])
+
+ if not (
+ _con == 'BG' # noqa: PLR1714
+ or _con == 'EQ'
+ or _con == 'LT'
+ or _con == 'BG-EQ'
+ or _con == 'LT-EQ'
+ ):
+ raise ValueError(
+ 'Condition is not recognized:' + str(_con)
+ )
+
if _con_val < 0:
- raise ValueError('PIDR condition value cannot be less than zero-->'+repr(_con_val))
-
- res['PIDR']=(_con,_con_val)
-
+ raise ValueError(
+ 'PIDR condition value cannot be less than zero-->'
+ + repr(_con_val)
+ )
+
+ res['PIDR'] = (_con, _con_val)
+
elif main_arg == 'REPAIR':
- raise ValueError('REPAIR at this stage does not accept any argument')
-
+ raise ValueError( # noqa: TRY003
+ 'REPAIR at this stage does not accept any argument' # noqa: EM101
+ )
+
elif method_name.upper() == 'DEFAULT':
-
- try:
- arg=int(arg)
- except:
+ try: # noqa: SIM105
+ arg = int(arg)
+ except: # noqa: S110, E722
pass
-
- if main_arg=='METHOD_PROBABILITY':
- val=float(val)
-
- if val<0:
- raise ValueError('Probability cannot be less than zero. '+' In line '+lnum+' probability: '+val)
- elif val>1:
- raise ValueError('Probability cannot be bigger than 1. ' +' In line '+lnum+' probability: '+val)
- temp={'effect_definition_name':job_name, 'method_name':arg,'argument':main_arg,'value':val}
+
+ if main_arg == 'METHOD_PROBABILITY':
+ val = float(val)
+
+ if val < 0:
+ raise ValueError(
+ 'Probability cannot be less than zero. ' # noqa: ISC003
+ + ' In line '
+ + lnum
+ + ' probability: '
+ + val
+ )
+ elif val > 1: # noqa: RET506
+ raise ValueError(
+ 'Probability cannot be bigger than 1. ' # noqa: ISC003
+ + ' In line '
+ + lnum
+ + ' probability: '
+ + val
+ )
+ temp = {
+ 'effect_definition_name': job_name,
+ 'method_name': arg,
+ 'argument': main_arg,
+ 'value': val,
+ }
self.rm.jobs.addEffectDefaultValue(temp)
- #temp={'effect_definition_name':effect_name, 'method_name':arg,'argument':'METHOD_PROBABILITY','value':val}
- elif main_arg=='FINALLY':
+ # temp={'effect_definition_name':effect_name, 'method_name':arg,'argument':'METHOD_PROBABILITY','value':val}
+ elif main_arg == 'FINALLY':
if val.upper() == 'NULL':
val = None
else:
val = None
- print('WARNING: At default line in FINALL section, the third argument is not NULL: ' + str(val) + 'The value is ignored antywhere')
- self.rm.jobs._final_method[job_name] = arg
- elif main_arg=='ONLYONCE':
- try:
+ print( # noqa: T201
+ 'WARNING: At default line in FINAL section, the third argument is not NULL: '
+ + str(val)
+ + 'The value is ignored antywhere'
+ )
+ self.rm.jobs._final_method[job_name] = arg # noqa: SLF001
+ elif main_arg == 'ONLYONCE':
+ try: # noqa: SIM105
val = float(val)
- except:
+ except: # noqa: S110, E722
pass
-
- if job_name in self.rm.jobs._once:
- self.rm.jobs._once[job_name].append(val)
+
+ if job_name in self.rm.jobs._once: # noqa: SLF001
+ self.rm.jobs._once[job_name].append(val) # noqa: SLF001
else:
- self.rm.jobs._once[job_name]=[val]
+ self.rm.jobs._once[job_name] = [val] # noqa: SLF001
else:
- raise ValueError('Unrecognized argument in line ' + str(lnum) + ': ' + arg)
-
- flag=True
+ raise ValueError(
+ 'Unrecognized argument in line '
+ + str(lnum)
+ + ': '
+ + arg
+ )
+
+ flag = True
else:
- raise ValueError('Unrecognized argument in line ' + str(lnum) + ': ' + arg)
-
+ raise ValueError(
+ 'Unrecognized argument in line '
+ + str(lnum)
+ + ': '
+ + arg
+ )
+
i += 2
res_list.append(res)
- if flag==False:
+ if flag == False: # noqa: E712
self.rm.jobs.addEffect(job_name, method_name, res_list)
-
-
- #for self.rm.effects.pruneData()
-
+
+ # for self.rm.effects.pruneData()
+
def _read_file_effect(self, file_info, effect_name):
res = {}
-
+
file_handle = file_info[0]
file_data = file_info[1:]
-
- data = self.rm._files[file_handle]
-
- #columns_to_remove = data.columns.tolist()
+
+ data = self.rm._files[file_handle] # noqa: SLF001
+
+ # columns_to_remove = data.columns.tolist()
aliases = {}
for pair in file_data:
if not pair.find(':'):
- raise ValueError('Error in file info. Not Pair: '+pair)
+ raise ValueError('Error in file info. Not Pair: ' + pair)
_arg, val = pair.split(':')
arg = _arg.upper()
-
+
if arg in res:
- raise ValueError('Argument already added: '+_arg)
-
+ raise ValueError('Argument already added: ' + _arg)
+
if val not in data.columns:
- raise ValueError('Value not in file: '+ val)
- if arg == 'ELEMENT_NAME' or arg == 'METHOD_NAME' or arg == 'METHOD_PROBABILITY':
+ raise ValueError('Value not in file: ' + val)
+ if (
+ arg == 'ELEMENT_NAME' # noqa: PLR1714
+ or arg == 'METHOD_NAME'
+ or arg == 'METHOD_PROBABILITY'
+ ):
aliases[arg] = val
- res[arg]= data[val].to_dict()
-
+ res[arg] = data[val].to_dict()
+
elif arg == 'FIXED_TIME_OVERWRITE':
time_overwrite_data = data[val].to_list()
- #self.rm.jobs._job_list[self.rm.jobs._job_list['effect']==effect_name]
- temp_list_for_effect_name = [effect_name]*data[val].size
- _key = list(zip(temp_list_for_effect_name, data[aliases['METHOD_NAME'] ], data[aliases['ELEMENT_NAME'] ]) )
-
- time_overwrite_data = [{'FIXED_TIME_OVERWRITE':int(time_overwrite_data[i]*3600)} for i in range(len(time_overwrite_data))]
- self.rm.jobs._time_overwrite.update(pd.Series(index=_key, data = time_overwrite_data).to_dict())
-
+ # self.rm.jobs._job_list[self.rm.jobs._job_list['effect']==effect_name]
+ temp_list_for_effect_name = [effect_name] * data[val].size
+ _key = list(
+ zip(
+ temp_list_for_effect_name,
+ data[aliases['METHOD_NAME']],
+ data[aliases['ELEMENT_NAME']],
+ )
+ )
+
+ time_overwrite_data = [
+ {'FIXED_TIME_OVERWRITE': int(time_overwrite_data[i] * 3600)}
+ for i in range(len(time_overwrite_data))
+ ]
+ self.rm.jobs._time_overwrite.update( # noqa: SLF001
+ pd.Series(index=_key, data=time_overwrite_data).to_dict()
+ )
+
else:
- raise ValueError('Unrecognized argument in pair: '+_arg)
+ raise ValueError('Unrecognized argument in pair: ' + _arg)
res = pd.DataFrame(res)
- #print(res)
- return res
-
-
+ # print(res)
+ return res # noqa: RET504
+
def _read_config(self):
- """
- reads config files which contains general specification of
+ """Reads config files which contains general specification of
configurations
Raises
@@ -808,76 +1180,92 @@ def _read_config(self):
-------
None.
- """
+ """ # noqa: D205, D400, D401
edata = OrderedDict()
- self._crew_file_name=[]
- self._crew_file_type=[]
+ self._crew_file_name = []
+ self._crew_file_type = []
for lnum, line in self.sections['[FILES]']:
edata['lnum'] = lnum
- words, comments = _split_line(line)
+ words, comments = _split_line(line) # noqa: F841
if words is not None and len(words) > 0:
- if len(words) < 2:
+ if len(words) < 2: # noqa: PLR2004
edata['key'] = words[0]
- raise RuntimeError('%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s' % edata)
+ raise RuntimeError(
+ '%(fname)s:%(lnum)-6d %(sec)13s no value provided for %(key)s'
+ % edata
+ )
key = words[0].upper()
-
- if key == "DEMAND_NODES":
+
+ if key == 'DEMAND_NODES':
self._demand_Node_file_name = words[1]
self._read_demand_nodes()
- if key == "CREW":
+ if key == 'CREW':
self._crew_file_type.append(words[1])
self._crew_file_name.append(words[2])
self._read_crew()
-
-
+
def _read_demand_nodes(self):
- titles = []
+ titles = [] # noqa: F841
ntitle = 0
lnum = 0
- dtemp=[]
- with io.open(self._demand_Node_file_name, 'r', encoding='utf-8') as f:
+ dtemp = []
+ with open(self._demand_Node_file_name, encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
words = line.split()
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.upper().startswith('NODEID'):
+ elif line.upper().startswith('NODEID'): # noqa: RET507
title = words.copy()
- ntitle = len(words) #we need this to confirm that every line has data for every title(column)
+ ntitle = len(
+ words
+ ) # we need this to confirm that every line has data for every title(column)
continue
elif nwords != ntitle:
- raise ValueError('%{fname}s:%(lnum)d: Number of data does not match number of titles')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d: Number of data does not match number of titles' # noqa: EM101
+ )
elif nwords == ntitle:
dtemp.append(words)
else:
- raise ValueError('%{fname}s:%(lnum)d:This error must nnever happen')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d:This error must nnever happen' # noqa: EM101
+ )
self.demand_node = pd.DataFrame(dtemp, columns=title)
-
+
def _read_crew(self):
- titles = []
+ titles = [] # noqa: F841
ntitle = 0
lnum = 0
- dtemp=[]
- with io.open(self._crew_file_name[-1], 'r', encoding='utf-8') as f:
+ dtemp = []
+ with open(self._crew_file_name[-1], encoding='utf-8') as f: # noqa: PTH123
for line in f:
lnum += 1
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
nwords = len(line.split())
words = line.split()
if len(line) == 0 or nwords == 0:
# Blank line
continue
- elif line.upper().startswith('DISTYARDID'):
+ elif line.upper().startswith('DISTYARDID'): # noqa: RET507
title = words.copy()
- ntitle = len(words) #we need this to confirm that every line has data for every title(column)
+ ntitle = len(
+ words
+ ) # we need this to confirm that every line has data for every title(column)
continue
elif nwords != ntitle:
- raise ValueError('%{fname}s:%(lnum)d: Number of data does not match number of titles')
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d: Number of data does not match number of titles' # noqa: EM101
+ )
elif nwords == ntitle:
dtemp.append(words)
else:
- raise ValueError('%{fname}s:%(lnum)d:This error must nnever happen')
- self.crew_data[self._crew_file_type[-1]]=pd.DataFrame(dtemp, columns=title)
\ No newline at end of file
+ raise ValueError( # noqa: TRY003
+ '%{fname}s:%(lnum)d:This error must nnever happen' # noqa: EM101
+ )
+ self.crew_data[self._crew_file_type[-1]] = pd.DataFrame(
+ dtemp, columns=title
+ )
diff --git a/modules/systemPerformance/REWET/REWET/restoration/model.py b/modules/systemPerformance/REWET/REWET/restoration/model.py
index ae72bb518..71f61cad2 100644
--- a/modules/systemPerformance/REWET/REWET/restoration/model.py
+++ b/modules/systemPerformance/REWET/REWET/restoration/model.py
@@ -1,500 +1,801 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Fri Dec 25 05:09:25 2020
+"""Created on Fri Dec 25 05:09:25 2020
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
import logging
-#import warnings
-import restoration.io as rio
-import restoration.base as base
-import pandas as pd
-import numpy as np
-import copy
import random
+from collections import OrderedDict
+
+import numpy as np
+import pandas as pd
+
+# import warnings
+import restoration.io as rio
+from repair import Repair
+from restoration import base
from restoration.base import get_node_name
-from repair import Repair
-from collections import OrderedDict
logger = logging.getLogger(__name__)
-class Restoration():
+
+class Restoration: # noqa: D101, PLR0904
def __init__(self, conifg_file_name, registry, damage):
- self.ELEMENTS = ['PIPE', 'DISTNODE', 'GNODE', 'TANK','PUMP', 'RESERVOIR']
- self._CONDITIONS = ['EQ','BG','LT','BG-EQ','LT-EQ','NOTEQ']
- self.reserved_priority_names = ["CLOSEST", "MOSTLEAKATCHECK", "HYDSIG", "HYDSIGLASTFLOW"]
- self._hard_event_table = pd.DataFrame(columns=['Requester', 'New', 'Detail'])
+ self.ELEMENTS = ['PIPE', 'DISTNODE', 'GNODE', 'TANK', 'PUMP', 'RESERVOIR']
+ self._CONDITIONS = ['EQ', 'BG', 'LT', 'BG-EQ', 'LT-EQ', 'NOTEQ']
+ self.reserved_priority_names = [
+ 'CLOSEST',
+ 'MOSTLEAKATCHECK',
+ 'HYDSIG',
+ 'HYDSIGLASTFLOW',
+ ]
+ self._hard_event_table = pd.DataFrame(columns=['Requester', 'New', 'Detail'])
self._reminder_time_hard_event = {}
- self.shifting = base.Shifting()
- self.jobs = base.Jobs(self)
- self.agents = base.Agents(registry, self.shifting, self.jobs, registry.restoration_log_book)
- self.proximity_points = {}
- self.priority = base.Priority(self)
- self.repair = Repair(registry)
- self.eq_time = None
- self.restoration_start_time = None
- self.earthquake = None
- self.if_initiated = False
- self.sequence = {}
- self.entity = {}
- self.entity_rule = {}
- self.group = {}
- self.pump_restoration = pd.DataFrame()
- self._damage = damage
- #self.temp =[]
-
+ self.shifting = base.Shifting()
+ self.jobs = base.Jobs(self)
+ self.agents = base.Agents(
+ registry, self.shifting, self.jobs, registry.restoration_log_book
+ )
+ self.proximity_points = {}
+ self.priority = base.Priority(self)
+ self.repair = Repair(registry)
+ self.eq_time = None
+ self.restoration_start_time = None
+ self.earthquake = None
+ self.if_initiated = False
+ self.sequence = {}
+ self.entity = {}
+ self.entity_rule = {}
+ self.group = {}
+ self.pump_restoration = pd.DataFrame()
+ self._damage = damage
+ # self.temp =[]
+
for el in self.ELEMENTS:
- self.group[el]=OrderedDict()
-
+ self.group[el] = OrderedDict()
+
self._registry = registry
- self.dispatch = base.Dispatch(self, registry.settings, method='new')
-
+ self.dispatch = base.Dispatch(self, registry.settings, method='new')
+
rio.RestorationIO(self, conifg_file_name)
- retoration_data = {}
+ retoration_data = {}
retoration_data['sequence'] = self.sequence
- retoration_data['entity' ] = self.entity
- retoration_data['group' ] = self.group
- registry.retoration_data = retoration_data
-
+ retoration_data['entity'] = self.entity
+ retoration_data['group'] = self.group
+ registry.retoration_data = retoration_data
+
self.ApplyOverrides()
-
- def ApplyOverrides(self):
+
+ def ApplyOverrides(self): # noqa: N802, D102
overrides = self._registry.settings.overrides
-
- if "POINTS" in overrides:
- points_overrides = overrides["POINTS"]
+
+ if 'POINTS' in overrides:
+ points_overrides = overrides['POINTS']
for point_group_name in points_overrides:
if point_group_name not in self.proximity_points:
- logger.warning("CAUTION!" + "\n" + "Override Point Group " + repr(point_group_name) + " is not a defined point group in the restoration plan." )
- self.proximity_points[point_group_name] = points_overrides[point_group_name]
-
- def perform_action(self, wn, stop_time):
+ logger.warning(
+ 'CAUTION!' # noqa: ISC003, G003
+ + '\n'
+ + 'Override Point Group '
+ + repr(point_group_name)
+ + ' is not a defined point group in the restoration plan.'
+ )
+ self.proximity_points[point_group_name] = points_overrides[
+ point_group_name
+ ]
+
+ def perform_action(self, wn, stop_time): # noqa: C901, D102
logger.debug(stop_time)
-
- #checks if the restoration is started
- if self.eq_time == None or self.restoration_start_time == None:
- raise ValueError("restoration is not initiated")
-
- #checks if the stop time is a hard event
- if not self._isHardEvent(stop_time):
- raise RuntimeError("stop time is not a hard event")
-
- #gets the latest damage revealed and reported to the damage board registery
- self.dispatch.updateDiscovery(stop_time)
-
+
+ # checks if the restoration is started
+ if self.eq_time == None or self.restoration_start_time == None: # noqa: E711
+ raise ValueError('restoration is not initiated') # noqa: EM101, TRY003
+
+ # checks if the stop time is a hard event
+ if not self._isHardEvent(stop_time):
+ raise RuntimeError('stop time is not a hard event') # noqa: EM101, TRY003
+
+ # gets the latest damage revealed and reported to the damage board registry
+ self.dispatch.updateDiscovery(stop_time)
+
if self._isHardEvent(stop_time, 'pump'):
- pump_list = (self.pump_restoration[self.pump_restoration['Restore_time']==stop_time])['Pump_ID'].tolist()
-
- #logger.warning(pump_list)
+ pump_list = (
+ self.pump_restoration[
+ self.pump_restoration['Restore_time'] == stop_time
+ ]
+ )['Pump_ID'].tolist()
+
+ # logger.warning(pump_list)
self.repair.restorePumps(pump_list, wn)
-
+
if self._isHardEvent(stop_time, 'tank'):
- tank_list = (self.tank_restoration[self.tank_restoration['Restore_time']==stop_time])['Tank_ID'].tolist()
-
- #logger.warning(tank_list)
+ tank_list = (
+ self.tank_restoration[
+ self.tank_restoration['Restore_time'] == stop_time
+ ]
+ )['Tank_ID'].tolist()
+
+ # logger.warning(tank_list)
self.repair.restoreTanks(tank_list, wn)
-
- if self._isHardEvent(stop_time, 'agent'):
-
- ##logger.debug('INSIDE RELEASE')
+
+ if self._isHardEvent(stop_time, 'agent'): # noqa: PLR1702
+ # logger.debug('INSIDE RELEASE')
released_agents = self.getHardEventDetails(stop_time, 'agent')
logger.warning('-----------------')
-
+
for r_agent in released_agents:
- agent_type = self.agents._agents.loc[r_agent, 'type']
- action = self.agents._agents.loc[r_agent, 'data'].cur_job_action
- entity = self.agents._agents.loc[r_agent, 'data'].cur_job_entity
- effect_definition_name = self.agents._agents.loc[r_agent, 'data'].cur_job_effect_definition_name
- method_name = self.agents._agents.loc[r_agent, 'data'].cur_job_method_name
- damaged_node_name = self.agents._agents.loc[r_agent, 'data'].cur_job_location
- iOngoing = self.agents._agents.loc[r_agent, 'data'].cur_job_ongoing
- element_type = self.entity[entity]
-
- effects_list = self.jobs.getEffectsList(effect_definition_name, method_name)
-
- if iOngoing==False:
- #This must be before apply effect because if not, bypass pipe will not be removed/Sina
- _damage_data = self._registry.getDamageData(element_type , iCopy=False)
- if self.entity[entity]=='PIPE' or self.entity[entity]=='DISTNODE':
-
- orginal_name = _damage_data.loc[damaged_node_name, 'Orginal_element']
-
- collective_damage_data = _damage_data[_damage_data['Orginal_element']==orginal_name]
- collective_damage_data = collective_damage_data[collective_damage_data[action]=='Collective']
- collective_damage_data_name_list = collective_damage_data.index.to_list()
- if len(collective_damage_data_name_list)>0:
- next_action=self.getNextSequence(element_type, action)
- if next_action!=None:
- self._registry.setDamageDataByRowAndColumn(element_type, collective_damage_data_name_list, next_action, False)
- self._registry.setDamageDataByRowAndColumn(element_type, collective_damage_data_name_list, 'discovered', True)
-
-
-
- self._registry.updateElementDamageTable(element_type, action, damaged_node_name, True)
+ agent_type = self.agents._agents.loc[r_agent, 'type'] # noqa: SLF001
+ action = self.agents._agents.loc[r_agent, 'data'].cur_job_action # noqa: SLF001
+ entity = self.agents._agents.loc[r_agent, 'data'].cur_job_entity # noqa: SLF001
+ effect_definition_name = self.agents._agents.loc[ # noqa: SLF001
+ r_agent, 'data'
+ ].cur_job_effect_definition_name
+ method_name = self.agents._agents.loc[ # noqa: SLF001
+ r_agent, 'data'
+ ].cur_job_method_name
+ damaged_node_name = self.agents._agents.loc[ # noqa: SLF001
+ r_agent, 'data'
+ ].cur_job_location
+ iOngoing = self.agents._agents.loc[r_agent, 'data'].cur_job_ongoing # noqa: SLF001, N806
+ element_type = self.entity[entity]
+
+ effects_list = self.jobs.getEffectsList(
+ effect_definition_name, method_name
+ )
+
+ if iOngoing == False: # noqa: E712
+ # This must be before apply effect because if not, bypass pipe will not be removed/Sina
+ _damage_data = self._registry.getDamageData(
+ element_type, iCopy=False
+ )
+ if (
+ self.entity[entity] == 'PIPE'
+ or self.entity[entity] == 'DISTNODE'
+ ):
+ orginal_name = _damage_data.loc[
+ damaged_node_name, 'Orginal_element'
+ ]
+
+ collective_damage_data = _damage_data[
+ _damage_data['Orginal_element'] == orginal_name
+ ]
+ collective_damage_data = collective_damage_data[
+ collective_damage_data[action] == 'Collective'
+ ]
+ collective_damage_data_name_list = (
+ collective_damage_data.index.to_list()
+ )
+ if len(collective_damage_data_name_list) > 0:
+ next_action = self.getNextSequence(element_type, action)
+ if next_action != None: # noqa: E711
+ self._registry.setDamageDataByRowAndColumn(
+ element_type,
+ collective_damage_data_name_list,
+ next_action,
+ False, # noqa: FBT003
+ )
+ self._registry.setDamageDataByRowAndColumn(
+ element_type,
+ collective_damage_data_name_list,
+ 'discovered',
+ True, # noqa: FBT003
+ )
+
+ self._registry.updateElementDamageTable(
+ element_type,
+ action,
+ damaged_node_name,
+ True, # noqa: FBT003
+ )
for single_effect in effects_list:
- self.applyEffect(damaged_node_name, single_effect, element_type, wn, action, stop_time)
-
- next_action=self.getNextSequence(element_type, action)
-
- if next_action!=None:
-
- if type(_damage_data.loc[damaged_node_name, next_action])==str:
+ self.applyEffect(
+ damaged_node_name,
+ single_effect,
+ element_type,
+ wn,
+ action,
+ stop_time,
+ )
+
+ next_action = self.getNextSequence(element_type, action)
+
+ if next_action != None: # noqa: E711
+ if (
+ type(_damage_data.loc[damaged_node_name, next_action]) # noqa: E721
+ == str
+ ):
pass
- elif np.isnan(_damage_data.loc[damaged_node_name, next_action]):
- self._registry.updateElementDamageTable(element_type, next_action, damaged_node_name, False, icheck=True)
+ elif np.isnan(
+ _damage_data.loc[damaged_node_name, next_action]
+ ):
+ self._registry.updateElementDamageTable(
+ element_type,
+ next_action,
+ damaged_node_name,
+ False, # noqa: FBT003
+ icheck=True,
+ )
else:
- self._registry.assignAgenttoLongJob(damaged_node_name, action, entity, None) #potential bug... When there is a long job availible but not a suitable agent to take crae of teh jiob, teh job will be forgotten
-
-
-
+ self._registry.assignAgenttoLongJob(
+ damaged_node_name, action, entity, None
+ ) # potential bug... When there is a long job available but not a suitable agent to take care of the job, the job will be forgotten
+
self.agents.releaseAgent(r_agent)
-
- #checks for shift change and if the stop time is a shift change, changes the shift and update agent data accordingly
+
+ # checks for shift change and if the stop time is a shift change, changes the shift and update agent data accordingly
self.updateShifiting(stop_time)
self.updateAvailability(stop_time)
-
- #gets list of ready agents, (on shift and idle)
+ # gets list of ready agents, (on shift and idle)
ready_agent = self.agents.getReadyAgents()
-
+
ready_agent_types = ready_agent['type'].unique()
- #for each agent type, we get the priority data (entity and action), refine damage data from entity that are waiting for action (action = False)
+ # for each agent type, we get the priority data (entity and action), refine damage data from entity that are waiting for action (action = False)
for agent_type in ready_agent_types:
- typed_ready_agent = ready_agent[ready_agent['type']==agent_type]
- typed_ready_agent._is_copy = None
+ typed_ready_agent = ready_agent[ready_agent['type'] == agent_type]
+ typed_ready_agent._is_copy = None # noqa: SLF001
- if not len(typed_ready_agent)>0:
+ if not len(typed_ready_agent) > 0:
continue
-
- agent_prime_priority_list = self.priority.getPriority(agent_type, 1)
- agent_group_tag_list, agent_group_name = self.agents.getAgentGroupTagList(typed_ready_agent)
+
+ agent_prime_priority_list = self.priority.getPriority(agent_type, 1)
+ agent_group_tag_list, agent_group_name = (
+ self.agents.getAgentGroupTagList(typed_ready_agent)
+ )
non_tagged_typed_ready_agent = typed_ready_agent.copy()
- non_tagged_typed_ready_agent._is_copy=None
+ non_tagged_typed_ready_agent._is_copy = None # noqa: SLF001
for agent_group_tag in agent_group_tag_list:
- typed_ready_agent = non_tagged_typed_ready_agent[non_tagged_typed_ready_agent['group']==agent_group_tag]
- typed_ready_agent._is_copy=None
+ typed_ready_agent = non_tagged_typed_ready_agent[
+ non_tagged_typed_ready_agent['group'] == agent_group_tag
+ ]
+ typed_ready_agent._is_copy = None # noqa: SLF001
order_counter = -1
for prime_priority in agent_prime_priority_list:
order_counter += 1
- action = list(prime_priority)[0]
+ action = list(prime_priority)[0] # noqa: RUF015
entity = list(prime_priority)[1]
damage_data = self._registry.getDamageData(self.entity[entity])
- entity_data = self.refineEntityDamageTable(damage_data, agent_group_name, agent_group_tag, self.entity[entity])
+ entity_data = self.refineEntityDamageTable(
+ damage_data,
+ agent_group_name,
+ agent_group_tag,
+ self.entity[entity],
+ )
if len(entity_data) == 0:
continue
- entity_data = entity_data[(entity_data['discovered']==True)]
- entity_data = entity_data[(entity_data[entity]==True)]
- entity_data = entity_data[(entity_data[action]==False)]
-
- logger.warning('action='+action+', entity='+entity+', len(entity_data)='+repr(len(entity_data))+', OC= '+repr(order_counter))
+ entity_data = entity_data[(entity_data['discovered'] == True)] # noqa: E712
+ entity_data = entity_data[(entity_data[entity] == True)] # noqa: E712
+ entity_data = entity_data[(entity_data[action] == False)] # noqa: E712
+
+ logger.warning(
+ 'action=' # noqa: G003
+ + action
+ + ', entity='
+ + entity
+ + ', len(entity_data)='
+ + repr(len(entity_data))
+ + ', OC= '
+ + repr(order_counter)
+ )
for previous_action in self.sequence[self.entity[entity]]:
if previous_action == action:
break
- entity_data = entity_data[(entity_data[previous_action]!=False)]
-
- vacant_job_list = self._registry.getVacantOnGoingJobs(action, entity)
-
- if len(vacant_job_list)>0 and len(typed_ready_agent)>0:
- self.assignVacantJob(vacant_job_list, typed_ready_agent, entity_data, agent_type, action, entity, stop_time, order_counter,wn)
-
-
- res = self.perform_action_helper(typed_ready_agent, entity_data, agent_type, action, entity, stop_time, order_counter, wn)
-
- if res=='break':
+ entity_data = entity_data[
+ (entity_data[previous_action] != False) # noqa: E712
+ ]
+
+ vacant_job_list = self._registry.getVacantOnGoingJobs(
+ action, entity
+ )
+
+ if len(vacant_job_list) > 0 and len(typed_ready_agent) > 0:
+ self.assignVacantJob(
+ vacant_job_list,
+ typed_ready_agent,
+ entity_data,
+ agent_type,
+ action,
+ entity,
+ stop_time,
+ order_counter,
+ wn,
+ )
+
+ res = self.perform_action_helper(
+ typed_ready_agent,
+ entity_data,
+ agent_type,
+ action,
+ entity,
+ stop_time,
+ order_counter,
+ wn,
+ )
+
+ if res == 'break':
break
- elif res=='continue':
+ elif res == 'continue': # noqa: RET508
continue
-
new_events = self.getNewEventsTime(reset=True)
- self._registry.restoration_log_book.updateAgentLogBook(self.agents._agents, stop_time)
- self._registry.restoration_log_book.updateAgentHistory(self.agents._agents, stop_time)
-
+ self._registry.restoration_log_book.updateAgentLogBook(
+ self.agents._agents, # noqa: SLF001
+ stop_time,
+ )
+ self._registry.restoration_log_book.updateAgentHistory(
+ self.agents._agents, # noqa: SLF001
+ stop_time,
+ )
+
return new_events
-
-
- def perform_action_helper(self, typed_ready_agent, entity_data, agent_type, action, entity, stop_time, order_counter,wn, flag=False):
- ignore_list=[]
- if len(entity_data)==0:
- if flag==True:
- raise RuntimeError('Ongoing and zero-length emtity data does must never appen together.')
+
+ def perform_action_helper( # noqa: C901, D102
+ self,
+ typed_ready_agent,
+ entity_data,
+ agent_type,
+ action,
+ entity,
+ stop_time,
+ order_counter,
+ wn,
+ flag=False, # noqa: FBT002
+ ):
+ ignore_list = []
+ if len(entity_data) == 0:
+ if flag == True: # noqa: E712
+ raise RuntimeError( # noqa: TRY003
+ 'Ongoing and zero-length emtity data does must never appended together.' # noqa: EM101
+ )
return 'continue'
- entity_data = self.priority.sortDamageTable(wn, entity_data, entity, agent_type, 2, order_counter) # sort according to the possible secondary priority
-
- for node_name, damage_data in entity_data.iterrows():
-
+ entity_data = self.priority.sortDamageTable(
+ wn, entity_data, entity, agent_type, 2, order_counter
+ ) # sort according to the possible secondary priority
+
+ for node_name, damage_data in entity_data.iterrows(): # noqa: RET503
if not len(typed_ready_agent) > 0:
break
-
- #if damage_data[action]!=False or node_name in ignore_list:
- if node_name in ignore_list: #if this condition is not here there will be a problem regarding same pipe damages/Sina
+
+ # if damage_data[action]!=False or node_name in ignore_list:
+ if (
+ node_name in ignore_list
+ ): # if this condition is not here there will be a problem regarding same pipe damages/Sina
continue
-
+
number_of_damages = damage_data['Number_of_damages']
-
- #mean_time_of_job = self.jobs.getMeanJobTime(agent_type, entity, action)
- #if not typed_ready_agent['data'].iloc[0].isOnShift(stop_time + mean_time_of_job + 900):
- #logger.debug('BREAK due to TIME at '+str(stop_time))
- # break
-
- distnace_agent_entity = pd.Series(index=typed_ready_agent.index.tolist(), data=typed_ready_agent.index.tolist())
+
+ # mean_time_of_job = self.jobs.getMeanJobTime(agent_type, entity, action)
+ # if not typed_ready_agent['data'].iloc[0].isOnShift(stop_time + mean_time_of_job + 900):
+ # logger.debug('BREAK due to TIME at '+str(stop_time))
+ # break
+
+ distnace_agent_entity = pd.Series(
+ index=typed_ready_agent.index.tolist(),
+ data=typed_ready_agent.index.tolist(),
+ )
node_name_vir = get_node_name(node_name, entity_data)
coord = wn.get_node(node_name_vir).coordinates
-
- distnace_agent_entity.apply(lambda x: typed_ready_agent.loc[x, 'data'].getDistanceFromCoordinate(coord))
-
- ##---------------------------------
- ##for agent_name, d_agent in typed_ready_agent.iterrows():
- ##distnace_agent_entity.loc[agent_name] = d_agent['data'].getDistanceFromCoordinate(coord)
- ##---------------------------------
-
- distnace_agent_entity.sort_values(ascending=True, inplace=True)
+
+ distnace_agent_entity.apply(
+ lambda x: typed_ready_agent.loc[x, 'data'].getDistanceFromCoordinate(
+ coord # noqa: B023
+ )
+ )
+
+ # ---------------------------------
+ # for agent_name, d_agent in typed_ready_agent.iterrows():
+ # distnace_agent_entity.loc[agent_name] = d_agent['data'].getDistanceFromCoordinate(coord)
+ # ---------------------------------
+
+ distnace_agent_entity.sort_values(ascending=True, inplace=True) # noqa: PD002
if self.entity[entity] == 'PIPE':
- orginal_element = entity_data.loc[node_name, 'Orginal_element']
+ orginal_element = entity_data.loc[node_name, 'Orginal_element']
else:
- orginal_element = node_name
- #-----------------------------------------------------------
+ orginal_element = node_name
+ # -----------------------------------------------------------
while len(distnace_agent_entity) > 0:
-
choosed_agent_name = distnace_agent_entity.index[0]
-
- if flag==False:
- i_assigned, description, job_gross_time, collective = self.agents.assignsJobToAgent(choosed_agent_name, node_name, entity, action, stop_time, wn, None, number_of_damages, orginal_element)
+
+ if flag == False: # noqa: E712
+ i_assigned, description, job_gross_time, collective = (
+ self.agents.assignsJobToAgent(
+ choosed_agent_name,
+ node_name,
+ entity,
+ action,
+ stop_time,
+ wn,
+ None,
+ number_of_damages,
+ orginal_element,
+ )
+ )
else:
- reminded_time = self._registry.getLongJobRemindedTime(node_name, action, entity)
- i_assigned, description, job_gross_time, collective = self.agents.assignsJobToAgent(choosed_agent_name, node_name, entity, action, stop_time, wn, reminded_time, None, None)
- collective=None #Collective already assigned/Sina
- if i_assigned == False and description == 'ShortOfTime':
+ reminded_time = self._registry.getLongJobRemindedTime(
+ node_name, action, entity
+ )
+ i_assigned, description, job_gross_time, collective = (
+ self.agents.assignsJobToAgent(
+ choosed_agent_name,
+ node_name,
+ entity,
+ action,
+ stop_time,
+ wn,
+ reminded_time,
+ None,
+ None,
+ )
+ )
+ collective = None # Collective already assigned/Sina
+ if i_assigned == False and description == 'ShortOfTime': # noqa: E712
distnace_agent_entity.pop(distnace_agent_entity.index[0])
break
-
- elif i_assigned == False and description == "FASTCHECK":
- self._registry.updateElementDamageTable(self.entity[entity], action, node_name, 'NA', icheck=True)
- next_action=self.getNextSequence(self.entity[entity], action)
- if next_action!=None:
- self._registry.updateElementDamageTable(self.entity[entity], next_action, node_name, False, icheck=True)
+
+ elif i_assigned == False and description == 'FASTCHECK': # noqa: RET508, E712
+ self._registry.updateElementDamageTable(
+ self.entity[entity], action, node_name, 'NA', icheck=True
+ )
+ next_action = self.getNextSequence(self.entity[entity], action)
+ if next_action != None: # noqa: E711
+ self._registry.updateElementDamageTable(
+ self.entity[entity],
+ next_action,
+ node_name,
+ False, # noqa: FBT003
+ icheck=True,
+ )
break
-
- elif i_assigned == False and description == "SKIP":
+
+ elif i_assigned == False and description == 'SKIP': # noqa: E712
break
-
- elif i_assigned == True:
- if collective !=None:
- orginal_element = entity_data.loc[node_name, 'Orginal_element']
- entity_same_element_damage_index = (entity_data[entity_data['Orginal_element']==orginal_element]).index.to_list()
-
- same_element_damage_data = self._registry.getDamageData(self.entity[entity], iCopy=False)
- same_element_damage_data = same_element_damage_data[same_element_damage_data['Orginal_element']==orginal_element]
-
-
-
- same_element_damage_index = same_element_damage_data.index.to_list()
-
-
+
+ elif i_assigned == True: # noqa: E712
+ if collective != None: # noqa: E711
+ orginal_element = entity_data.loc[
+ node_name, 'Orginal_element'
+ ]
+ entity_same_element_damage_index = (
+ entity_data[
+ entity_data['Orginal_element'] == orginal_element
+ ]
+ ).index.to_list()
+
+ same_element_damage_data = self._registry.getDamageData(
+ self.entity[entity], iCopy=False
+ )
+ same_element_damage_data = same_element_damage_data[
+ same_element_damage_data['Orginal_element']
+ == orginal_element
+ ]
+
+ same_element_damage_index = (
+ same_element_damage_data.index.to_list()
+ )
+
same_element_damage_index.remove(node_name)
entity_same_element_damage_index.remove(node_name)
-
+
ignore_list.extend(same_element_damage_index)
-
- _damage_data = self._registry.getDamageData(self.entity[entity], iCopy=False)
-
- if (_damage_data.loc[same_element_damage_index, action]=='Collective').any():
+
+ _damage_data = self._registry.getDamageData(
+ self.entity[entity], iCopy=False
+ )
+
+ if (
+ _damage_data.loc[same_element_damage_index, action]
+ == 'Collective'
+ ).any():
same_element_damage_index.append(node_name)
- raise ValueError('Hell to the naw'+repr(node_name))
-
- _damage_data.loc[same_element_damage_index, action]=collective #For times later
-
- entity_data.loc[entity_same_element_damage_index, action]='Collective'
-
- self._registry.setDamageDataByRowAndColumn(self.entity[entity], same_element_damage_index, action, 'Collective')
- #tt=self._registry.getDamageData(self.entity[entity], iCopy=False)
-
-
- self._registry.updateElementDamageTable( self.entity[entity], action, node_name, 'On_Going', icheck=not flag)
- typed_ready_agent.drop(choosed_agent_name, inplace=True)
+ raise ValueError('Hell to the naw' + repr(node_name))
+
+ _damage_data.loc[same_element_damage_index, action] = (
+ collective # For times later
+ )
+
+ entity_data.loc[entity_same_element_damage_index, action] = (
+ 'Collective'
+ )
+
+ self._registry.setDamageDataByRowAndColumn(
+ self.entity[entity],
+ same_element_damage_index,
+ action,
+ 'Collective',
+ )
+ # tt=self._registry.getDamageData(self.entity[entity], iCopy=False)
+
+ self._registry.updateElementDamageTable(
+ self.entity[entity],
+ action,
+ node_name,
+ 'On_Going',
+ icheck=not flag,
+ )
+ typed_ready_agent.drop(choosed_agent_name, inplace=True) # noqa: PD002
job_end_time = self.agents.getJobEndTime(choosed_agent_name)
-
- if job_end_time != None and description=='INSIDE_SHIFT':
- modfied_end_time = self._addHardEvent(job_end_time, 'agent', choosed_agent_name, stop_time)
- self._registry.restoration_log_book.addEndTimegentActionToLogBook(choosed_agent_name, stop_time, modfied_end_time)
-
- if self._registry.isThereSuchOngoingLongJob(node_name, action, entity)==True:
- arival_time = self.agents.getJobArivalTime(choosed_agent_name)
- self._registry.deductLongJobTime(node_name, action, entity,job_end_time-arival_time)
+
+ if job_end_time != None and description == 'INSIDE_SHIFT': # noqa: E711
+ modfied_end_time = self._addHardEvent(
+ job_end_time, 'agent', choosed_agent_name, stop_time
+ )
+ self._registry.restoration_log_book.addEndTimegentActionToLogBook(
+ choosed_agent_name, stop_time, modfied_end_time
+ )
+
+ if (
+ self._registry.isThereSuchOngoingLongJob( # noqa: E712
+ node_name, action, entity
+ )
+ == True
+ ):
+ arival_time = self.agents.getJobArivalTime(
+ choosed_agent_name
+ )
+ self._registry.deductLongJobTime(
+ node_name, action, entity, job_end_time - arival_time
+ )
self._registry.removeLongJob(node_name, action, entity)
-
+
break
-
- elif description=='OUTSIDE_SHIFT':
- #logger.warning('cur_time= '+repr(stop_time)+', end_time= '+repr(stop_time+job_gross_time))
- if not self._registry.isThereSuchOngoingLongJob(node_name, action, entity):
- self._registry.addLongJob(node_name, action, entity, job_gross_time, choosed_agent_name)
+
+ elif description == 'OUTSIDE_SHIFT': # noqa: RET508
+ # logger.warning('cur_time= '+repr(stop_time)+', end_time= '+repr(stop_time+job_gross_time))
+ if not self._registry.isThereSuchOngoingLongJob(
+ node_name, action, entity
+ ):
+ self._registry.addLongJob(
+ node_name,
+ action,
+ entity,
+ job_gross_time,
+ choosed_agent_name,
+ )
else:
- self._registry.assignAgenttoLongJob(node_name, action, entity, choosed_agent_name)
-
- end_shift_time =self.agents._agents.loc[choosed_agent_name,'data'].getAgentShiftEndTime(stop_time)
-
-
-
- arival_time = self.agents.getJobArivalTime(choosed_agent_name)
- self._registry.deductLongJobTime(node_name, action, entity,end_shift_time-arival_time)
- modfied_end_time = self._addHardEvent(end_shift_time, 'agent', choosed_agent_name, stop_time)
- self._registry.restoration_log_book.addEndTimegentActionToLogBook(choosed_agent_name, stop_time, modfied_end_time)
-
-
+ self._registry.assignAgenttoLongJob(
+ node_name, action, entity, choosed_agent_name
+ )
+
+ end_shift_time = self.agents._agents.loc[ # noqa: SLF001
+ choosed_agent_name, 'data'
+ ].getAgentShiftEndTime(stop_time)
+
+ arival_time = self.agents.getJobArivalTime(
+ choosed_agent_name
+ )
+ self._registry.deductLongJobTime(
+ node_name, action, entity, end_shift_time - arival_time
+ )
+ modfied_end_time = self._addHardEvent(
+ end_shift_time, 'agent', choosed_agent_name, stop_time
+ )
+ self._registry.restoration_log_book.addEndTimegentActionToLogBook(
+ choosed_agent_name, stop_time, modfied_end_time
+ )
+
break
+ elif job_end_time == None: # noqa: E711
+ raise ValueError('Job is not assigned to the agent') # noqa: EM101, TRY003
else:
- if job_end_time == None:
- raise ValueError('Job is not assigned to the agent')
- else:
- raise ValueError('Unknown description: ' + description)
+ raise ValueError('Unknown description: ' + description)
else:
- raise RuntimeError('i_assigned not boolean')
-
-
- #-----------------------------------------------------------
- #self._registry.updatePipeDamageTableTimeSeries(stop_time)
-
- def assignVacantJob(self, vacant_job_list, typed_ready_agent, entity_data, agent_type, action, entity, stop_time,order_counter,wn):
- if not len(typed_ready_agent)>0:
- raise RuntimeError('This should not happen. We have a condition before in performe action')
- if not len(vacant_job_list)>0:
+ raise RuntimeError('i_assigned not boolean') # noqa: EM101, TRY003
+
+ # -----------------------------------------------------------
+ # self._registry.updatePipeDamageTableTimeSeries(stop_time)
+
+ def assignVacantJob( # noqa: N802, D102
+ self,
+ vacant_job_list,
+ typed_ready_agent,
+ entity_data,
+ agent_type,
+ action,
+ entity,
+ stop_time,
+ order_counter,
+ wn,
+ ):
+ if not len(typed_ready_agent) > 0:
+ raise RuntimeError( # noqa: TRY003
+ # JVM: Not sure what we're saying here.
+ 'This should not happen. We have a condition before in perform action' # noqa: EM101
+ )
+ if not len(vacant_job_list) > 0:
return
damage_data = self._registry.getDamageData(self.entity[entity])
- entity_data = pd.DataFrame(columns=damage_data.columns, index=vacant_job_list)
-
- entity_data=entity_data.apply(lambda x: damage_data.loc[x.name], axis=1)
- self.perform_action_helper(typed_ready_agent, entity_data, agent_type, action, entity, stop_time, order_counter, wn, flag=True)
-
-
- def applyEffect(self, damage_node_name, single_effect_data, element_type, wn, action, stop_time):
- effect_type = single_effect_data['EFFECT']
- damage_data = self._registry.getDamageData(element_type, iCopy=False)
+ entity_data = pd.DataFrame(
+ columns=damage_data.columns, index=vacant_job_list
+ )
+
+ entity_data = entity_data.apply(lambda x: damage_data.loc[x.name], axis=1)
+ self.perform_action_helper(
+ typed_ready_agent,
+ entity_data,
+ agent_type,
+ action,
+ entity,
+ stop_time,
+ order_counter,
+ wn,
+ flag=True,
+ )
+
+ def applyEffect( # noqa: C901, N802, D102, PLR0912, PLR0915
+ self,
+ damage_node_name,
+ single_effect_data,
+ element_type,
+ wn,
+ action,
+ stop_time,
+ ):
+ effect_type = single_effect_data['EFFECT']
+ damage_data = self._registry.getDamageData(element_type, iCopy=False)
node_damage_data = damage_data.loc[damage_node_name]
- damage_type = None
-
- if element_type=='PIPE':
- damage_type = damage_data.loc[damage_node_name, 'damage_type']
-
- if effect_type=='CHECK':
- if element_type=='DISTNODE':
+ damage_type = None
+
+ if element_type == 'PIPE':
+ damage_type = damage_data.loc[damage_node_name, 'damage_type']
+
+ if effect_type == 'CHECK': # noqa: PLR1702
+ if element_type == 'DISTNODE':
result = self._registry.result.node
- #damage_table = self._registry.getDamageData('DISTNODE', iCopy=False)
+ # damage_table = self._registry.getDamageData('DISTNODE', iCopy=False)
real_node_name = get_node_name(damage_node_name, damage_data)
if real_node_name in result['leak'].columns:
- leak_demand = result['leak'].loc[stop_time, real_node_name]
+ leak_demand = result['leak'].loc[stop_time, real_node_name]
else:
- leak_demand = 0
- real_demand = result['demand'].loc[stop_time, real_node_name]
+ leak_demand = 0
+ real_demand = result['demand'].loc[stop_time, real_node_name]
total_demand = leak_demand + real_demand
-
-
+
node = wn.get_node(real_node_name)
- pattern_list = node.demand_timeseries_list.pattern_list()
- default_pattern = wn.options.hydraulic.pattern
+ pattern_list = node.demand_timeseries_list.pattern_list()
+ default_pattern = wn.options.hydraulic.pattern
node_pattern_name = None
- if pattern_list[0] != None:
+ if pattern_list[0] != None: # noqa: E711
node_pattern_name = pattern_list[0].name
- elif pattern_list[0] == None and default_pattern != None:
+ elif pattern_list[0] == None and default_pattern != None: # noqa: E711
node_pattern_name = str(default_pattern)
-
- if node_pattern_name==None:
+
+ if node_pattern_name == None: # noqa: E711
multiplier = 1
else:
cur_pattern = wn.get_pattern(node_pattern_name)
multiplier = cur_pattern.at(stop_time)
-
- base_demand = node.base_demand
+
+ base_demand = node.base_demand
required_demand = multiplier * base_demand
if 'virtual_of' in damage_data.columns:
- vir_nodal_damage_list = damage_data[damage_data['virtual_of']==real_node_name]
+ vir_nodal_damage_list = damage_data[
+ damage_data['virtual_of'] == real_node_name
+ ]
vir_nodal_damage_list = vir_nodal_damage_list.index
- if damage_data.loc[vir_nodal_damage_list, 'Demand1'].isna().any():
- self._registry.addNodalDemandChange(vir_nodal_damage_list, required_demand, total_demand) #Sina: maybe make it optional
- else:
- self._registry.addNodalDemandChange(damage_node_name, required_demand, total_demand)
-
- elif element_type == "PIPE":
-
+ if (
+ damage_data.loc[vir_nodal_damage_list, 'Demand1']
+ .isna()
+ .any()
+ ):
+ self._registry.addNodalDemandChange(
+ vir_nodal_damage_list, required_demand, total_demand
+ ) # Sina: maybe make it optional
+ else:
+ self._registry.addNodalDemandChange(
+ damage_node_name, required_demand, total_demand
+ )
+
+ elif element_type == 'PIPE':
leak_sum = 0
-
- pipe_damage_table = self._registry._pipe_damage_table
- pipe_break_history = self._registry._pipe_break_history
- damage_type = pipe_damage_table.loc[damage_node_name, 'damage_type']
- available_node_results = self._registry.result.node['demand'].loc[stop_time].dropna()
+
+ pipe_damage_table = self._registry._pipe_damage_table # noqa: SLF001
+ pipe_break_history = self._registry._pipe_break_history # noqa: SLF001
+ damage_type = pipe_damage_table.loc[damage_node_name, 'damage_type']
+ available_node_results = (
+ self._registry.result.node['demand'].loc[stop_time].dropna()
+ )
available_node_results = available_node_results.index
- if damage_type == "break":
+ if damage_type == 'break':
if damage_node_name in pipe_damage_table.index:
- break_node_B = pipe_break_history.loc[damage_node_name, 'Node_B']
+ break_node_B = pipe_break_history.loc[ # noqa: N806
+ damage_node_name, 'Node_B'
+ ]
if break_node_B in available_node_results:
- leak_beark_node_B = self._registry.result.node['demand'].loc[stop_time, break_node_B]
+ leak_beark_node_B = self._registry.result.node[ # noqa: N806
+ 'demand'
+ ].loc[stop_time, break_node_B]
else:
- leak_beark_node_B = 0
+ leak_beark_node_B = 0 # noqa: N806
leak_sum += leak_beark_node_B
else:
- break_node_A = (pipe_break_history[pipe_break_history['Node_B'] == damage_node_name]).iloc[0]["Node_A"]
+ break_node_A = ( # noqa: N806
+ pipe_break_history[
+ pipe_break_history['Node_B'] == damage_node_name
+ ]
+ ).iloc[0]['Node_A']
if break_node_A in available_node_results:
- leak_beark_node_A = self._registry.result.node['demand'].loc[stop_time, break_node_A]
+ leak_beark_node_A = self._registry.result.node[ # noqa: N806
+ 'demand'
+ ].loc[stop_time, break_node_A]
else:
- leak_beark_node_A = 0
- leak_sum += leak_beark_node_A
-
+ leak_beark_node_A = 0 # noqa: N806
+ leak_sum += leak_beark_node_A
+
if damage_node_name in available_node_results:
- leak_damaged_node = self._registry.result.node['demand'].loc[stop_time, damage_node_name]
- leak_sum += leak_damaged_node
-
- self._registry._pipe_damage_table.loc[damage_node_name, 'LeakAtCheck'] = leak_sum
-
-
- elif effect_type=='RECONNECT':
- self._registry.addRestorationDataOnPipe(damage_node_name, stop_time, 'RECONNECT')
+ leak_damaged_node = self._registry.result.node['demand'].loc[
+ stop_time, damage_node_name
+ ]
+ leak_sum += leak_damaged_node
+
+ self._registry._pipe_damage_table.loc[ # noqa: SLF001
+ damage_node_name, 'LeakAtCheck'
+ ] = leak_sum
+
+ elif effect_type == 'RECONNECT':
+ self._registry.addRestorationDataOnPipe(
+ damage_node_name, stop_time, 'RECONNECT'
+ )
middle_pipe_size = None
cv = False
- _length = None
+ _length = None
_friction = None
if 'PIPESIZE' in single_effect_data:
middle_pipe_size = single_effect_data['PIPESIZE']
elif 'PIPESIZEFACTOR' in single_effect_data:
- attached_pipe_name = node_damage_data.attached_element
- attached_pipe = wn.get_link(attached_pipe_name)
+ attached_pipe_name = node_damage_data.attached_element
+ attached_pipe = wn.get_link(attached_pipe_name)
attached_pipe_diameter = attached_pipe.diameter
-
- middle_pipe_size = attached_pipe_diameter*(single_effect_data['PIPESIZEFACTOR']**0.5)
-
- elif 'CV' in single_effect_data: # this has a problem /Sina
+
+ middle_pipe_size = attached_pipe_diameter * (
+ single_effect_data['PIPESIZEFACTOR'] ** 0.5
+ )
+
+ elif 'CV' in single_effect_data: # this has a problem /Sina
cv = single_effect_data['CV']
elif 'PIPELENGTH' in single_effect_data:
_length = single_effect_data['PIPELENGTH']
elif 'PIPEFRICTION' in single_effect_data:
_friction = single_effect_data['PIPEFRICTION']
-
- self.repair.bypassPipe(damage_node_name, middle_pipe_size, damage_type, wn, length=_length, friction=_friction)
-
+
+ self.repair.bypassPipe(
+ damage_node_name,
+ middle_pipe_size,
+ damage_type,
+ wn,
+ length=_length,
+ friction=_friction,
+ )
+
elif effect_type == 'ADD_RESERVOIR':
- pump=None
+ pump = None
middle_pipe_size = None
- cv=False
+ cv = False
if 'PIPESIZE' in single_effect_data:
middle_pipe_size = single_effect_data['PIPESIZE']
elif 'CV' in single_effect_data:
- cv = single_effect_data['CV']
+ cv = single_effect_data['CV'] # noqa: F841
elif 'PUMP' in single_effect_data:
- pump={}
- pump['POWER']=single_effect_data['PUMP']
-
- self.repair.addReservoir(damage_node_name, damage_type, 'PUMP', pump, wn)
+ pump = {}
+ pump['POWER'] = single_effect_data['PUMP']
+
+ self.repair.addReservoir(
+ damage_node_name, damage_type, 'PUMP', pump, wn
+ )
elif 'ADDEDELEVATION' in single_effect_data:
- reservoir={}
+ reservoir = {}
reservoir['ADDEDELEVATION'] = single_effect_data['ADDEDELEVATION']
-
- self.repair.addReservoir(damage_node_name, damage_type, 'ADDEDELEVATION', reservoir, wn)
+
+ self.repair.addReservoir(
+ damage_node_name, damage_type, 'ADDEDELEVATION', reservoir, wn
+ )
else:
- raise ValueError('Unknown parameter. Damaged Node: '+repr(damage_node_name))
-
+ raise ValueError(
+ 'Unknown parameter. Damaged Node: ' + repr(damage_node_name)
+ )
+
elif effect_type == 'REMOVE_LEAK':
factor = None
if 'LEAKFACTOR' in single_effect_data:
@@ -502,9 +803,9 @@ def applyEffect(self, damage_node_name, single_effect_data, element_type, wn, ac
self.repair.removeLeak(damage_node_name, damage_type, wn, factor)
else:
self.repair.removeLeak(damage_node_name, damage_type, wn)
-
+
elif effect_type == 'ISOLATE_DN':
- if 'FACTOR' in single_effect_data:
+ if 'FACTOR' in single_effect_data: # noqa: SIM401
factor = single_effect_data['FACTOR']
else:
factor = 1
@@ -512,53 +813,91 @@ def applyEffect(self, damage_node_name, single_effect_data, element_type, wn, ac
damage_table = self._registry.getDamageData('DISTNODE', iCopy=True)
if 'virtual_of' in damage_table.columns:
real_node_name = get_node_name(damage_node_name, damage_table)
-
+
if self._registry.settings['damage_node_model'] == 'Predefined_demand':
self.repair.removeDemand(real_node_name, factor, wn)
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_emitter' or self._registry.settings['damage_node_model'] == 'equal_diameter_reservoir':
+ elif (
+ self._registry.settings['damage_node_model']
+ == 'equal_diameter_emitter'
+ or self._registry.settings['damage_node_model']
+ == 'equal_diameter_reservoir'
+ ):
self.repair.removeDemand(real_node_name, factor, wn)
self.repair.removeExplicitNodalLeak(real_node_name, factor, wn)
else:
- raise ValueError('Unknown nodal damage method')
-
+ raise ValueError('Unknown nodal damage method') # noqa: EM101, TRY003
+
elif effect_type == 'REPAIR':
- if element_type=='PIPE':
- self._registry.addRestorationDataOnPipe(damage_node_name, stop_time, 'REPAIR')
+ if element_type == 'PIPE':
+ self._registry.addRestorationDataOnPipe(
+ damage_node_name, stop_time, 'REPAIR'
+ )
self.repair.removePipeRepair(damage_node_name, wn, action)
self.repair.repairPipe(damage_node_name, damage_type, wn)
- elif element_type=='DISTNODE':
- if self._registry.settings['Virtual_node'] == True:
- real_node_name = get_node_name(damage_node_name, self._registry._node_damage_table)
- virtual_node_table = self._registry._node_damage_table[self._registry._node_damage_table['Orginal_element']==real_node_name]
- temp = (virtual_node_table[action]==True)
+ elif element_type == 'DISTNODE':
+ if self._registry.settings['Virtual_node'] == True: # noqa: E712
+ real_node_name = get_node_name(
+ damage_node_name,
+ self._registry._node_damage_table, # noqa: SLF001
+ )
+ virtual_node_table = self._registry._node_damage_table[ # noqa: SLF001
+ self._registry._node_damage_table['Orginal_element'] # noqa: SLF001
+ == real_node_name
+ ]
+ temp = virtual_node_table[action] == True # noqa: E712
if temp.all():
self.repairDistNode(real_node_name, wn)
-
+
else:
repaired_number = temp.sum()
- total_number = virtual_node_table['Number_of_damages'].sum()
+ total_number = virtual_node_table['Number_of_damages'].sum()
if self._registry.isVirtualNodeDamaged(damage_node_name):
self._registry.setVirtualNodeRepaired(damage_node_name)
- if self._registry.settings['damage_node_model'] == 'Predefined_demand':
- self.repair.modifyDISTNodeDemandLinearMode(damage_node_name, real_node_name, wn, repaired_number, total_number)
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_emitter':
- self.repair.modifyDISTNodeExplicitLeakEmitter(damage_node_name, real_node_name, wn, repaired_number, total_number)
- elif self._registry.settings['damage_node_model'] == 'equal_diameter_reservoir':
- self.repair.modifyDISTNodeExplicitLeakReservoir(damage_node_name, real_node_name, wn, repaired_number, total_number)
- else:
+ if (
+ self._registry.settings['damage_node_model']
+ == 'Predefined_demand'
+ ):
+ self.repair.modifyDISTNodeDemandLinearMode(
+ damage_node_name,
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ )
+ elif (
+ self._registry.settings['damage_node_model']
+ == 'equal_diameter_emitter'
+ ):
+ self.repair.modifyDISTNodeExplicitLeakEmitter(
+ damage_node_name,
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ )
+ elif (
+ self._registry.settings['damage_node_model']
+ == 'equal_diameter_reservoir'
+ ):
+ self.repair.modifyDISTNodeExplicitLeakReservoir(
+ damage_node_name,
+ real_node_name,
+ wn,
+ repaired_number,
+ total_number,
+ )
+ else:
self.repairDistNode(real_node_name, wn)
-
-
+
else:
- raise ValueError('Unknown effect_type: '+repr(effect_type))
+ raise ValueError('Unknown effect_type: ' + repr(effect_type))
- def repairDistNode(self, damage_node_name, wn):
+ def repairDistNode(self, damage_node_name, wn): # noqa: N802, D102
self.repair.removeNodeTemporaryRepair(damage_node_name, wn)
-
- def updateShifiting(self, time):
- """
- Updates shifting with the new time given
-
+
+ def updateShifiting(self, time): # noqa: N802
+ """Updates shifting with the new time given
+
Parameters
----------
time : int
@@ -568,434 +907,571 @@ def updateShifiting(self, time):
-------
None.
- """
- if type(time) != int and type(time) !=float:
- raise ValueError('Time must be integer not ' + str(type(time)))
+ """ # noqa: D400, D401
+ if type(time) != int and type(time) != float: # noqa: E721
+ raise ValueError('Time must be integer not ' + str(type(time))) # noqa: DOC501
time = int(time)
- if time < 0 :
- raise ValueError('Time must be bigger than zero')
+ if time < 0:
+ raise ValueError('Time must be bigger than zero') # noqa: DOC501, EM101, TRY003
next_shift_time = self.shifting.getNextShiftTime(time)
- #logger.debug('next shitt time = ' + str(next_shift_time))
+ # logger.debug('next shitt time = ' + str(next_shift_time))
self._addHardEvent(int(next_shift_time), 'shift')
-
+
if 'shift' in self._hard_event_table['Requester'].loc[time]:
self.agents.setChangeShift(time, working_check=True)
-
- def updateAvailability(self, time):
- #SINA DELETET IT [URGENT]
-# =============================================================================
-# import pickle
-#
-# with open("av_data.pkl","rb") as f:
-# av_data = pickle.load(f)
-# try:
-# av_data_time = av_data[time]
-# except:
-# av_last_time = 0
-# time_list = list(av_data.keys())
-# time_list.append(time)
-# time_list.sort()
-#
-# time_list = pd.Series(data = time_list)
-# time_index = time_list.searchsorted(time)
-# av_last_time = time_list.iloc[time_index-1]
-#
-# av_data_time = av_data[av_last_time]
-#
-# self.agents._agents.loc[av_data_time.index, 'available'] = av_data_time.to_list()
-# #for agent_type in agent_type_list:
-# return
-# =============================================================================
- agent_type_list = self.agents._agents['type'].unique()
- availible_agent_table = self.agents._agents[self.agents._agents['available'].eq(True)]
+
+ def updateAvailability(self, time): # noqa: N802, D102
+ # SINA DELETET IT [URGENT]
+ # =============================================================================
+ # import pickle
+ #
+ # with open("av_data.pkl","rb") as f:
+ # av_data = pickle.load(f)
+ # try:
+ # av_data_time = av_data[time]
+ # except:
+ # av_last_time = 0
+ # time_list = list(av_data.keys())
+ # time_list.append(time)
+ # time_list.sort()
+ #
+ # time_list = pd.Series(data = time_list)
+ # time_index = time_list.searchsorted(time)
+ # av_last_time = time_list.iloc[time_index-1]
+ #
+ # av_data_time = av_data[av_last_time]
+ #
+ # self.agents._agents.loc[av_data_time.index, 'available'] = av_data_time.to_list()
+ # #for agent_type in agent_type_list:
+ # return
+ # =============================================================================
+ agent_type_list = self.agents._agents['type'].unique() # noqa: SLF001
+ availible_agent_table = self.agents._agents[ # noqa: SLF001
+ self.agents._agents['available'].eq(True) # noqa: FBT003, SLF001
+ ]
for agent_type in agent_type_list:
if time == self.eq_time:
av_r = self.agents.getDefaultAvailabilityRatio(agent_type)
elif time > self.eq_time:
- av_r = self.agents.getAvailabilityRatio(agent_type, time - self.eq_time)
- if av_r > 1:
- av_r = 1
-
- available_typed_table = availible_agent_table[availible_agent_table['type'].eq(agent_type)]
+ av_r = self.agents.getAvailabilityRatio(
+ agent_type, time - self.eq_time
+ )
+ av_r = min(av_r, 1)
+
+ available_typed_table = availible_agent_table[
+ availible_agent_table['type'].eq(agent_type)
+ ]
availible_number = len(available_typed_table)
- all_number = len(self.agents._agents[self.agents._agents['type'].eq(agent_type)] )
+ all_number = len(
+ self.agents._agents[self.agents._agents['type'].eq(agent_type)] # noqa: SLF001
+ )
new_availible_number = np.round(av_r * all_number) - availible_number
-
+
if new_availible_number < 0:
- new_index_list = random.sample(available_typed_table.index.to_list(), int(abs(new_availible_number)) )
- self.agents._agents.loc[new_index_list, 'available'] = False
+ new_index_list = random.sample(
+ available_typed_table.index.to_list(),
+ int(abs(new_availible_number)),
+ )
+ self.agents._agents.loc[new_index_list, 'available'] = False # noqa: SLF001
elif new_availible_number > 0:
- not_available_typed_table = self.agents._agents[(self.agents._agents['type'] == agent_type) & (self.agents._agents['available'] == False)]
- new_index_list = random.sample(not_available_typed_table.index.to_list(), int(new_availible_number))
- self.agents._agents.loc[new_index_list, 'available'] = True
-
- def initializeActiveAgents(self, time):
- for name, data in self.agents._agents.iterrows():
+ not_available_typed_table = self.agents._agents[ # noqa: SLF001
+ (self.agents._agents['type'] == agent_type) # noqa: SLF001
+ & (self.agents._agents['available'] == False) # noqa: SLF001, E712
+ ]
+ new_index_list = random.sample(
+ not_available_typed_table.index.to_list(),
+ int(new_availible_number),
+ )
+ self.agents._agents.loc[new_index_list, 'available'] = True # noqa: SLF001
+
+ def initializeActiveAgents(self, time): # noqa: N802, D102
+ for name, data in self.agents._agents.iterrows(): # noqa: B007, SLF001
agent = data['data']
if agent.isOnShift(time):
data['active'] = True
- #data['ready'] = True
-
- def initializeReadyAgents(self):
- active_agents_name_list = self.agents._agents[self.agents._agents['active'].eq(True)].index
- self.agents._agents.loc[active_agents_name_list, 'ready'] = True
- #for name, data in self.agents._agents.iterrows():
- #f data['active'] == True:
- #data['ready'] = True
-
- #def initializeAvailableAgents(self):
- #ready_agents_name_list = self.agents._agents['ready'].eq(True).index
- #self.agents._agents.loc[ready_agents_name_list, 'available'] = True
-
- def initializeEntities(self, WaterNetwork):
+ # data['ready'] = True
+
+ def initializeReadyAgents(self): # noqa: N802, D102
+ active_agents_name_list = self.agents._agents[ # noqa: SLF001
+ self.agents._agents['active'].eq(True) # noqa: FBT003, SLF001
+ ].index
+ self.agents._agents.loc[active_agents_name_list, 'ready'] = True # noqa: SLF001
+ # for name, data in self.agents._agents.iterrows():
+ # f data['active'] == True:
+ # data['ready'] = True
+
+ # def initializeAvailableAgents(self):
+ # ready_agents_name_list = self.agents._agents['ready'].eq(True).index
+ # self.agents._agents.loc[ready_agents_name_list, 'available'] = True
+
+ def initializeEntities(self, WaterNetwork): # noqa: N802, N803, D102
for entity, val in self.entity_rule.items():
element_type = self.entity[entity]
- if not element_type in self.ELEMENTS:
- raise ValueError('Unknown Element type')
+ if element_type not in self.ELEMENTS:
+ raise ValueError('Unknown Element type') # noqa: EM101, TRY003
if val[0][0] == 'ALL':
- self._registry.setDamageData(element_type, entity, True)
+ self._registry.setDamageData(element_type, entity, True) # noqa: FBT003
else:
-
- res = []
+ res = []
node_res = []
-
+
for line in val:
- attribute = line[0]
- condition = line[1]
+ attribute = line[0]
+ condition = line[1]
condition_value = line[2]
-
- temp, temp_node = self._getRefinedElementList(WaterNetwork, attribute, condition, condition_value, element_type, WaterNetwork)
-
+
+ temp, temp_node = self._getRefinedElementList(
+ WaterNetwork,
+ attribute,
+ condition,
+ condition_value,
+ element_type,
+ WaterNetwork,
+ )
+
res.append(temp)
node_res.append(temp_node)
-
+
union_list = self._unionOfAll(res)
node_res = self._unionOfAll(node_res)
- self._registry.setDamageDataByList(element_type, node_res, entity, True, iCheck=True)
+ self._registry.setDamageDataByList(
+ element_type,
+ node_res,
+ entity,
+ True, # noqa: FBT003
+ iCheck=True,
+ )
+
+ self._registry.setDamageDataByList(
+ element_type,
+ union_list,
+ entity,
+ True, # noqa: FBT003
+ )
- self._registry.setDamageDataByList(element_type, union_list, entity, True)
-
- def removeRecordsWithoutEntities(self, element_type):
-
+ def removeRecordsWithoutEntities(self, element_type): # noqa: N802, D102
entity_list = []
for entity in self.entity:
- if self.entity[entity]==element_type:
- entity_list.append(entity)
-
+ if self.entity[entity] == element_type:
+ entity_list.append(entity) # noqa: PERF401
+
damage_table = self._registry.getDamageData(element_type, iCopy=False)
if len(entity_list) > 0:
-
entities_damaged_table = damage_table[entity_list]
-
- not_asigned_damaged_table = entities_damaged_table[entities_damaged_table.isna().any(1)].index.tolist()
+
+ not_asigned_damaged_table = entities_damaged_table[
+ entities_damaged_table.isna().any(1)
+ ].index.tolist()
else:
not_asigned_damaged_table = damage_table.index.to_list()
- damage_table.drop(not_asigned_damaged_table, inplace=True)
+ damage_table.drop(not_asigned_damaged_table, inplace=True) # noqa: PD002
- def initializeGroups(self):
+ def initializeGroups(self): # noqa: N802, D102
for el in self.ELEMENTS:
- group_name_list=[]
-
- if el in self.group:
+ group_name_list = []
+ if el in self.group:
element_groups_data = self.group[el]
- if len(element_groups_data)<1:
+ if len(element_groups_data) < 1:
temp = self._registry.getListAllElementOrginalName(el).unique()
- element_groups_data['default']=pd.Series(index=temp, data='Default')
-
+ element_groups_data['default'] = pd.Series(
+ index=temp, data='Default'
+ )
+
for group_name in element_groups_data:
- self._registry.addAttrToElementDamageTable(el, group_name, np.nan)
+ self._registry.addAttrToElementDamageTable(
+ el, group_name, np.nan
+ )
group_name_list.append(group_name)
- group_data = element_groups_data[group_name]
+ group_data = element_groups_data[group_name]
- group_location_name_list = self._registry.getDamagedLocationListByOriginalElementList_2(el, group_data)
+ group_location_name_list = (
+ self._registry.getDamagedLocationListByOriginalElementList_2(
+ el, group_data
+ )
+ )
group_cat_list = group_data.reindex(group_location_name_list)
- self._registry.setDamageDataByRowAndColumn(el, group_location_name_list.index.tolist(), group_name, group_cat_list.tolist())
-
+ self._registry.setDamageDataByRowAndColumn(
+ el,
+ group_location_name_list.index.tolist(),
+ group_name,
+ group_cat_list.tolist(),
+ )
+
temp = self._registry.getDamageData(el)
- temp=temp[group_name_list]
+ temp = temp[group_name_list]
- temp_list=[]
- for col_name, col in temp.iteritems():
+ temp_list = []
+ for col_name, col in temp.iteritems(): # noqa: B007
not_na = col.notna()
- not_na = not_na[not_na==False]
-
+ not_na = not_na[not_na == False] # noqa: E712
+
temp_list.append(not_na.index.tolist())
temp_list = self._unionOfAll(temp_list)
- if len(temp_list)>0:
- print("In element: "+repr(el)+', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'+repr(temp_list))
- logger.warning("In element: "+repr(el)+', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'+repr(temp_list))
-
-
+ if len(temp_list) > 0:
+ print( # noqa: T201
+ 'In element: '
+ + repr(el)
+ + ', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'
+ + repr(temp_list)
+ )
+ logger.warning(
+ 'In element: ' # noqa: G003
+ + repr(el)
+ + ', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'
+ + repr(temp_list)
+ )
- def initializeGroups_old(self):
+ def initializeGroups_old(self): # noqa: N802, D102
for el in self.ELEMENTS:
- group_name_list=[]
-
- if el in self.group:
+ group_name_list = []
+ if el in self.group:
element_groups_data = self.group[el]
- if len(element_groups_data)<1:
+ if len(element_groups_data) < 1:
temp = self._registry.getListAllElementOrginalName(el).unique()
- element_groups_data['default']=pd.Series(index=temp, data='Default')
-
+ element_groups_data['default'] = pd.Series(
+ index=temp, data='Default'
+ )
+
for group_name in element_groups_data:
- self._registry.addAttrToElementDamageTable(el, group_name, np.nan)
+ self._registry.addAttrToElementDamageTable(
+ el, group_name, np.nan
+ )
group_name_list.append(group_name)
- group_data = element_groups_data[group_name]
+ group_data = element_groups_data[group_name]
- group_location_name_list = self._registry.getDamagedLocationListByOriginalElementList(el, group_data)
- for damage_location, element_name in group_location_name_list.iteritems():
+ group_location_name_list = (
+ self._registry.getDamagedLocationListByOriginalElementList(
+ el, group_data
+ )
+ )
+ for (
+ damage_location,
+ element_name,
+ ) in group_location_name_list.iteritems():
group_cat = group_data.loc[element_name]
- self._registry.setDamageDataByRowAndColumn(el, damage_location, group_name, group_cat)
-
+ self._registry.setDamageDataByRowAndColumn(
+ el, damage_location, group_name, group_cat
+ )
+
temp = self._registry.getDamageData(el)
- temp=temp[group_name_list]
+ temp = temp[group_name_list]
- temp_list=[]
- for col_name, col in temp.iteritems():
+ temp_list = []
+ for col_name, col in temp.iteritems(): # noqa: B007
not_na = col.notna()
- not_na = not_na[not_na==False]
-
+ not_na = not_na[not_na == False] # noqa: E712
+
temp_list.append(not_na.index.tolist())
temp_list = self._unionOfAll(temp_list)
- if len(temp_list)>0:
- print("In element: "+repr(el)+', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'+repr(temp_list))
- logger.warning("In element: "+repr(el)+', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'+repr(temp_list))
-
- def initializeNumberOfDamages(self):
+ if len(temp_list) > 0:
+ print( # noqa: T201
+ 'In element: '
+ + repr(el)
+ + ', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'
+ + repr(temp_list)
+ )
+ logger.warning(
+ 'In element: ' # noqa: G003
+ + repr(el)
+ + ', the following damaged locations does not have a assigned group and will not be affected in the course of restoration:\n'
+ + repr(temp_list)
+ )
+
+ def initializeNumberOfDamages(self): # noqa: N802, D102
for element_type in self.ELEMENTS:
- if 'Number_of_damages' not in (self._registry.getDamageData(element_type, iCopy=False)).columns:
- self._registry.addAttrToElementDamageTable(element_type, 'Number_of_damages', 1)
-
- def _unionOfAll(self, in_list):
+ if (
+ 'Number_of_damages'
+ not in (
+ self._registry.getDamageData(element_type, iCopy=False)
+ ).columns
+ ):
+ self._registry.addAttrToElementDamageTable(
+ element_type, 'Number_of_damages', 1
+ )
+
+ def _unionOfAll(self, in_list): # noqa: N802
num_of_lists = len(in_list)
-
- if len(in_list)==0:
+
+ if len(in_list) == 0:
return in_list
-
- if len(in_list)==1:
-
- if type(in_list[0])==list:
+
+ if len(in_list) == 1:
+ if type(in_list[0]) == list: # noqa: E721
return in_list[0]
- else:
- raise ValueError('Something is wrong here')
-
- first_list = in_list[0]
+ else: # noqa: RET505
+ raise ValueError('Something is wrong here') # noqa: EM101, TRY003
+
+ first_list = in_list[0]
second_list = in_list[1]
- union_list=[]
-
+ union_list = []
+
for item in first_list:
if item in second_list:
- union_list.append(item)
-
- if num_of_lists == 2:
+ union_list.append(item) # noqa: PERF401
+
+ if num_of_lists == 2: # noqa: PLR2004
return union_list
- else:
+ else: # noqa: RET505
in_list.pop(0)
- in_list[0]=union_list
+ in_list[0] = union_list
return self._unionOfAll(in_list)
+ def _getRefinedElementList( # noqa: N802
+ self,
+ WaterNetwork, # noqa: N803
+ attribute,
+ condition,
+ condition_value,
+ element_type,
+ wn,
+ ):
+ res = []
+ node_res = []
- def _getRefinedElementList(self, WaterNetwork, attribute, condition, condition_value, element_type, wn):
+ if element_type == 'PIPE':
+ res = self._getRefinedPipeList(
+ WaterNetwork, attribute, condition, condition_value
+ )
- res = []
- node_res = []
-
- if element_type=="PIPE":
- res = self._getRefinedPipeList(WaterNetwork, attribute, condition, condition_value)
-
- elif element_type=="PUMP":
- res = self._getRefinedPumpList(WaterNetwork, attribute, condition, condition_value)
-
- elif element_type in ['DISTNODE', 'GNODE', 'TANK', 'PUMP', 'RESERVOIR']:
- res, node_res = self._getRefinedNodeElementList(WaterNetwork, attribute, condition, condition_value, element_type, wn)
+ elif element_type == 'PUMP':
+ res = self._getRefinedPumpList(
+ WaterNetwork, attribute, condition, condition_value
+ )
+
+ elif element_type in ['DISTNODE', 'GNODE', 'TANK', 'PUMP', 'RESERVOIR']: # noqa: PLR6201
+ res, node_res = self._getRefinedNodeElementList(
+ WaterNetwork, attribute, condition, condition_value, element_type, wn
+ )
else:
- raise ValueError('Unknown Element Type:'+str(element_type))
-
- return res, node_res
-
- def refineEntityDamageTable(self, damaged_table, group_name, agent_group_tag, element_type):
- ret=[]
- #logger.warning('Sina')
- if group_name == None:
+ raise ValueError('Unknown Element Type:' + str(element_type))
+
+ return res, node_res
+
+ def refineEntityDamageTable( # noqa: D102, N802, PLR6301
+ self,
+ damaged_table,
+ group_name,
+ agent_group_tag,
+ element_type,
+ ):
+ ret = []
+ # logger.warning('Sina')
+ if group_name == None: # noqa: E711
ret = damaged_table
- #logger.warning('1')
-
+ # logger.warning('1')
+
elif group_name in damaged_table.columns:
- #logger.warning('2')
- #logger.warning(group_name)
- #logger.warning(agent_group_tag)
- #logger.warning(damaged_table[damaged_table[group_name]==agent_group_tag])
- #agent_type = damaged_table['type'].iloc[0]
- ret = damaged_table[damaged_table[group_name]==agent_group_tag]
- if len(ret)==0:
- logger.warning('Empty damage table in element type='+repr(element_type)+'group name='+repr(group_name)+', group_tag='+repr(agent_group_tag))
+ # logger.warning('2')
+ # logger.warning(group_name)
+ # logger.warning(agent_group_tag)
+ # logger.warning(damaged_table[damaged_table[group_name]==agent_group_tag])
+ # agent_type = damaged_table['type'].iloc[0]
+ ret = damaged_table[damaged_table[group_name] == agent_group_tag]
+ if len(ret) == 0:
+ logger.warning(
+ 'Empty damage table in element type=' # noqa: G003
+ + repr(element_type)
+ + 'group name='
+ + repr(group_name)
+ + ', group_tag='
+ + repr(agent_group_tag)
+ )
else:
ret = pd.DataFrame(columns=damaged_table.columns)
-
+
return ret
-
- def _refine_table(self, table, attribute, condition, condition_value):
-
+
+ def _refine_table(self, table, attribute, condition, condition_value): # noqa: C901, PLR6301
refined_table = None
-
- if type(condition_value)==str:
- if condition=='EQ':
- refined_table = table[table[attribute]==condition_value]
- elif condition=='NOTEQ':
- refined_table = table[table[attribute]!=condition_value]
+
+ if type(condition_value) == str: # noqa: E721
+ if condition == 'EQ':
+ refined_table = table[table[attribute] == condition_value]
+ elif condition == 'NOTEQ':
+ refined_table = table[table[attribute] != condition_value]
else:
- raise ValueError('Undefined condition: '+repr(condition))
- elif type(condition_value)==int or type(condition_value)==float:
- if condition=='EQ':
- refined_table = table[table[attribute]==condition_value]
- elif condition=='BG-EQ':
- refined_table = table[table[attribute]>=condition_value]
- elif condition=='LT-EQ':
- refined_table = table[table[attribute]<=condition_value]
- elif condition=='BG':
- refined_table = table[table[attribute]>condition_value]
- elif condition=='LT':
- refined_table = table[table[attribute]= condition_value]
+ elif condition == 'LT-EQ':
+ refined_table = table[table[attribute] <= condition_value]
+ elif condition == 'BG':
+ refined_table = table[table[attribute] > condition_value]
+ elif condition == 'LT':
+ refined_table = table[table[attribute] < condition_value]
+ elif condition == 'NOTEQ':
+ refined_table = table[table[attribute] != condition_value]
else:
- raise ValueError('Undefined condition: '+repr(condition))
+ raise ValueError('Undefined condition: ' + repr(condition))
else:
- raise ValueError('Undefined data type: '+repr(type(condition_value)))
-
+ raise ValueError('Undefined data type: ' + repr(type(condition_value)))
+
return refined_table
-
- def _getRefinedNodeElementList(self, WaterNetwork, attribute, condition, condition_value, element_type, wn):
- res = []
+
+ def _getRefinedNodeElementList( # noqa: C901, N802
+ self,
+ WaterNetwork, # noqa: ARG002, N803
+ attribute,
+ condition,
+ condition_value,
+ element_type,
+ wn,
+ ):
+ res = []
node_res = []
-
- if attribute =='FILE' or attribute =='NOT_IN_FILE':
- node_damage_list=self._registry.getDamageData(element_type)
-
+ if attribute == 'FILE' or attribute == 'NOT_IN_FILE': # noqa: PLR1714
+ node_damage_list = self._registry.getDamageData(element_type)
+
for org_file_name in condition_value:
- #not_included = set(org_file_name) - set(wn.node_name_list)
+ # not_included = set(org_file_name) - set(wn.node_name_list)
if org_file_name not in node_damage_list.index:
- #Sina out it back. Suppressed for runing in cluster
+ # Sina out it back. Suppressed for ruining in cluster
continue
-
-
-
+
if 'virtual_of' in node_damage_list.columns:
- node_name_list = node_damage_list.index
- temp_damage_table = node_damage_list.set_index('Virtual_node', drop=False)
+ node_name_list = node_damage_list.index
+ temp_damage_table = node_damage_list.set_index(
+ 'Virtual_node', drop=False
+ )
temp_damage_table['random_sina_index'] = node_name_list.tolist()
temp = temp_damage_table.loc[org_file_name]
temp.index = temp['random_sina_index']
temp = temp.drop('random_sina_index', axis=1)
else:
- if type(org_file_name) == str:
- org_file_name = [org_file_name]
+ if type(org_file_name) == str: # noqa: E721
+ org_file_name = [org_file_name] # noqa: PLW2901
temp = node_damage_list.loc[org_file_name]
-
+
ichosen = False
-
- if len(temp)>=1:
+
+ if len(temp) >= 1:
res.extend(temp.index.tolist())
ichosen = True
-
- if ichosen==False:
+
+ if ichosen == False: # noqa: E712
if org_file_name in wn.node_name_list:
- ichosen =True
+ ichosen = True
node_res.append(org_file_name)
- if ichosen==False:
- raise ValueError('Element with ID: '+repr(org_file_name) +' is not either a element: '+repr(element_type)+' or a node.')
-
- if attribute=='NOT_IN_FILE':
+ if ichosen == False: # noqa: E712
+ raise ValueError(
+ 'Element with ID: '
+ + repr(org_file_name)
+ + ' is not either a element: '
+ + repr(element_type)
+ + ' or a node.'
+ )
+
+ if attribute == 'NOT_IN_FILE':
index_list = node_damage_list.index.tolist()
for in_file in res:
index_list.remove(in_file)
-
+
res = index_list
-
- #res.extend(node_res)
-
+
+ # res.extend(node_res)
+
elif attribute in self._registry.getDamageData(element_type).columns:
- temp = self._registry.getDamageData(element_type)
-
- refined_table = self._refine_table(temp, attribute, condition, condition_value)
+ temp = self._registry.getDamageData(element_type)
+
+ refined_table = self._refine_table(
+ temp, attribute, condition, condition_value
+ )
refined_table = refined_table.index
- res = refined_table.to_list()
+ res = refined_table.to_list()
else:
raise ValueError('Unknown Entity Condition: ' + condition)
-
+
return res, node_res
-
- def _getRefinedPumpList(self, WaterNetwork, attribute, condition, condition_value):
+ def _getRefinedPumpList( # noqa: N802
+ self,
+ WaterNetwork, # noqa: ARG002, N803
+ attribute,
+ condition,
+ condition_value,
+ ):
element_res = []
-
- if attribute =='FILE' or condition =='NOT_IN_FILE':
- pump_damage_list=self._registry.getDamageData('PUMP')
-
+
+ if attribute == 'FILE' or condition == 'NOT_IN_FILE':
+ pump_damage_list = self._registry.getDamageData('PUMP')
+
for org_file_name in condition_value:
- temp = pump_damage_list[pump_damage_list['element_name']==org_file_name]
+ temp = pump_damage_list[
+ pump_damage_list['element_name'] == org_file_name
+ ]
- if len(temp)==1:
+ if len(temp) == 1:
element_res.append(temp.element_name[0])
- elif len(temp)>1:
- raise ValueError('Something wrong here')
-
- if attribute=='NOT_IN_FILE':
+ elif len(temp) > 1:
+ raise ValueError('Something wrong here') # noqa: EM101, TRY003
+
+ if attribute == 'NOT_IN_FILE':
index_list = pump_damage_list.element_name.tolist()
for in_file in element_res:
index_list.remove(in_file)
-
+
element_res = index_list
-
+
elif attribute in self._registry.getDamageData('PUMP').columns:
- temp = self._registry._pump_damage_table
-
- refined_table = self._refine_table(temp, attribute, condition, condition_value)
-
+ temp = self._registry._pump_damage_table # noqa: SLF001
+
+ refined_table = self._refine_table(
+ temp, attribute, condition, condition_value
+ )
+
refined_table = refined_table.index
- element_res = refined_table.to_list()
+ element_res = refined_table.to_list()
else:
raise ValueError('Unknown Entity Condition: ' + attribute)
-
- res=[]
+
+ res = []
pump_name_list = pump_damage_list['element_name'].tolist()
-
+
for element_name in element_res:
if element_name in pump_name_list:
- temp = pump_damage_list[pump_damage_list['element_name']==element_name].index[0]
+ temp = pump_damage_list[
+ pump_damage_list['element_name'] == element_name
+ ].index[0]
res.append(temp)
return res
-
- def _getRefinedPipeList(self, WaterNetwork, attribute, condition, condition_value):
-
+ def _getRefinedPipeList( # noqa: C901, N802
+ self,
+ WaterNetwork, # noqa: N803
+ attribute,
+ condition,
+ condition_value,
+ ):
res = []
- #if condition in self._CONDITIONS:
- if attribute.upper() in ['DIAMETER']:
- #for pipe_name in WaterNetwork.pipe_name_list:
+ # if condition in self._CONDITIONS:
+ if attribute.upper() == 'DIAMETER':
+ # for pipe_name in WaterNetwork.pipe_name_list:
for damage_name, line in self._registry.getDamageData('PIPE').iterrows():
if attribute.upper() == 'DIAMETER':
- #orginal_element = line['Orginal_element']
+ # orginal_element = line['Orginal_element']
attached_elements = line['attached_element']
-
+
pipe = WaterNetwork.get_link(attached_elements)
pipe_value = pipe.diameter
else:
raise ValueError('Undefined attribute ' + attribute)
-
+
if condition == 'EQ':
if pipe_value == condition_value:
res.append(damage_name)
@@ -1011,56 +1487,56 @@ def _getRefinedPipeList(self, WaterNetwork, attribute, condition, condition_val
elif condition == 'LT-EQ':
if pipe_value <= condition_value:
res.append(damage_name)
-
- elif attribute =='FILE' or attribute =='NOT_IN_FILE':
- pipe_damage_list=self._registry.getDamageData('PIPE')
+
+ elif attribute == 'FILE' or attribute == 'NOT_IN_FILE': # noqa: PLR1714
+ pipe_damage_list = self._registry.getDamageData('PIPE')
for org_file_name in condition_value:
- temp = pipe_damage_list[pipe_damage_list['Orginal_element']==org_file_name]
+ temp = pipe_damage_list[
+ pipe_damage_list['Orginal_element'] == org_file_name
+ ]
- if len(temp)==1:
+ if len(temp) == 1:
res.append(temp.index[0])
- elif len(temp)>1:
+ elif len(temp) > 1:
res.extend(temp.index.to_list())
-
- if attribute =='NOT_IN_FILE':
+
+ if attribute == 'NOT_IN_FILE':
index_list = pipe_damage_list.index.tolist()
for in_file in res:
- i=0
-
- while i in range(0, len(index_list)):
- if index_list[i]==in_file:
+ i = 0
+
+ while i in range(len(index_list)):
+ if index_list[i] == in_file:
index_list.pop(i)
- i+=1
-
+ i += 1
+
res = index_list
-
-
-
+
elif attribute in self._registry.getDamageData('PIPE').columns:
- temp = self._registry.getDamageData('PIPE')
-
- refined_table = self._refine_table(temp, attribute, condition, condition_value)
-
+ temp = self._registry.getDamageData('PIPE')
+
+ refined_table = self._refine_table(
+ temp, attribute, condition, condition_value
+ )
+
refined_table = refined_table.index
- res = refined_table.to_list()
+ res = refined_table.to_list()
else:
raise ValueError('Unknown Entity Condition: ' + condition)
-
-
+
return res
-
- def _getReminderTime(self, name):
+
+ def _getReminderTime(self, name): # noqa: N802
return self._reminder_time_hard_event[name]
-
- def _saveReminderTime(self, time, name):
+
+ def _saveReminderTime(self, time, name): # noqa: N802
if name not in self._reminder_time_hard_event:
self._reminder_time_hard_event[name] = int(time)
else:
self._reminder_time_hard_event[name] += int(time)
- def _addHardEvent(self, next_time, requester, detail = None, current_time=None):
- """
- Adds a hard event
+ def _addHardEvent(self, next_time, requester, detail=None, current_time=None): # noqa: N802
+ """Adds a hard event
Parameters
----------
@@ -1073,305 +1549,344 @@ def _addHardEvent(self, next_time, requester, detail = None, current_time=None):
-------
None.
- """
+ """ # noqa: D400, D401
time = int(next_time)
- next_time=int(next_time)
- if type(next_time) != int and type(next_time) != float:
- raise ValueError("time must be int, not " +str(type(next_time)))
- if detail != None and current_time == None:
- raise ValueError('When detail is provided, current time cannot be None')
-
- minimum_time_devision = int(self._registry.settings["simulation_time_step"])
- if current_time != None:
+ next_time = int(next_time)
+ if type(next_time) != int and type(next_time) != float: # noqa: E721
+ raise ValueError('time must be int, not ' + str(type(next_time))) # noqa: DOC501
+ if detail != None and current_time == None: # noqa: E711
+ raise ValueError('When detail is provided, current time cannot be None') # noqa: DOC501, EM101, TRY003
+
+ minimum_time_devision = int(self._registry.settings['simulation_time_step'])
+ if current_time != None: # noqa: E711
if next_time < current_time:
- raise ValueError('Time is smaller than current time')
- if detail == None:
- raise ValueError('When current time is provided, detail cannot be None')
+ raise ValueError('Time is smaller than current time') # noqa: DOC501, EM101, TRY003
+ if detail == None: # noqa: E711
+ raise ValueError( # noqa: DOC501, TRY003
+ 'When current time is provided, detail cannot be None' # noqa: EM101
+ )
if minimum_time_devision < 0:
- raise ValueError('Minimum time devision cannot be negative')
-
- name = requester + '-' + detail
-
- time = next_time-current_time
-
-
- _b = np.round(time/minimum_time_devision)
-
- if abs(_b)<0.01:
- _b=1
-
+ raise ValueError('Minimum time division cannot be negative') # noqa: DOC501, EM101, TRY003
+
+ name = requester + '-' + detail
+
+ time = next_time - current_time
+
+ _b = np.round(time / minimum_time_devision)
+
+ if abs(_b) < 0.01: # noqa: PLR2004
+ _b = 1
+
new_time = _b * minimum_time_devision
reminder = time - new_time
self._saveReminderTime(reminder, name)
- next_time = current_time+new_time
-
-
+ next_time = current_time + new_time
+
if next_time not in self._hard_event_table.index:
- self._hard_event_table.loc[next_time] = [[requester,], True, [detail,]]
- elif requester in self._hard_event_table.loc[next_time, 'Requester'] and detail == None:
+ self._hard_event_table.loc[next_time] = [
+ [
+ requester,
+ ],
+ True,
+ [
+ detail,
+ ],
+ ]
+ elif (
+ requester in self._hard_event_table.loc[next_time, 'Requester']
+ and detail == None # noqa: E711
+ ):
pass
else:
self._hard_event_table.loc[next_time, 'Requester'].append(requester)
self._hard_event_table.loc[next_time, 'New'] = True
- self._hard_event_table.loc[next_time,'Detail'].append(detail)
-
+ self._hard_event_table.loc[next_time, 'Detail'].append(detail)
+
return next_time
-
- def _isHardEvent(self, time, requester = None):
- if requester == None:
+
+ def _isHardEvent(self, time, requester=None): # noqa: N802
+ if requester == None: # noqa: E711
return time in self._hard_event_table.index
- else:
+ else: # noqa: RET505
if time in self._hard_event_table.index:
req = self._hard_event_table.loc[time, 'Requester']
if requester in req:
return True
return False
-
- def getHardEventDetails(self, time, by=None):
-
- if by == None:
+
+ def getHardEventDetails(self, time, by=None): # noqa: N802, D102
+ if by == None: # noqa: E711
return self._hard_event_table.loc[time, 'Detail']
- elif by not in self._hard_event_table.loc[time, 'Requester']:
+ elif by not in self._hard_event_table.loc[time, 'Requester']: # noqa: RET505
return []
else:
res = []
requester_list = self._hard_event_table.loc[time, 'Requester']
- detail_list = self._hard_event_table.loc[time, 'Detail']
- i=0
+ detail_list = self._hard_event_table.loc[time, 'Detail']
+ i = 0
for requester in requester_list:
if requester == by:
res.append(detail_list[i])
- i += 1
+ i += 1 # noqa: SIM113
return res
-
- def getNewEventsTime(self, reset=False):
- new_event_table = self._hard_event_table[self._hard_event_table["New"] == True]
+ def getNewEventsTime(self, reset=False): # noqa: FBT002, N802, D102
+ new_event_table = self._hard_event_table[
+ self._hard_event_table['New'] == True # noqa: E712
+ ]
new_event_table = new_event_table.sort_index()
-
- if reset == True:
- for ind, val in new_event_table.iterrows():
- self._hard_event_table.loc[ind,'New']=False
-
+
+ if reset == True: # noqa: E712
+ for ind, val in new_event_table.iterrows(): # noqa: B007
+ self._hard_event_table.loc[ind, 'New'] = False
+
return list(new_event_table.index)
-
- def unmarkNewEvents(self):
- self._hard_event_table['new'][self._hard_event_table['New']== True] = False
-
- def getAllSequences(self, element_type):
+
+ def unmarkNewEvents(self): # noqa: N802, D102
+ self._hard_event_table['new'][self._hard_event_table['New'] == True] = False # noqa: E712
+
+ def getAllSequences(self, element_type): # noqa: N802, D102
return self.sequence[element_type]
-
-
-
- def getNextSequence(self, element_type, cur_seq):
+
+ def getNextSequence(self, element_type, cur_seq): # noqa: N802, D102
seq_list = self.sequence[element_type]
if cur_seq not in seq_list:
- raise ValueError('Seqence was not in sequnce list: '+str(cur_seq))
- i=0
+ raise ValueError('Sequence was not in sequence list: ' + str(cur_seq))
+ i = 0
for seq in seq_list:
- if cur_seq==seq:
+ if cur_seq == seq:
break
- i+=1
- if not i+1=stop_time]
+ raise ValueError('Stop time is less than 0') # noqa: EM101, TRY003
+
+ # refined_pump = self.pump_restoration[self.pump_restoration['Restore_time']>=stop_time]
if not self.pump_restoration.empty:
- self.pump_restoration['Restore_time'] = self.pump_restoration['Restore_time'] + stop_time
-
+ self.pump_restoration['Restore_time'] = ( # noqa: PLR6104
+ self.pump_restoration['Restore_time'] + stop_time
+ )
+
if not self.tank_restoration.empty:
- self.tank_restoration['Restore_time'] = self.tank_restoration['Restore_time'] + stop_time
-
- for ind, row, in self.pump_restoration.items():
+ self.tank_restoration['Restore_time'] = ( # noqa: PLR6104
+ self.tank_restoration['Restore_time'] + stop_time
+ )
+
+ for (
+ ind, # noqa: B007
+ row,
+ ) in self.pump_restoration.items(): # noqa: PERF102
self._addHardEvent(row['Restore_time'], 'pump')
-
- if type(self.tank_restoration) != pd.core.series.Series:
- raise
- for ind, row, in self.tank_restoration.items():
+
+ if type(self.tank_restoration) != pd.core.series.Series: # noqa: E721
+ raise # noqa: PLE0704
+ for (
+ ind, # noqa: B007
+ row,
+ ) in self.tank_restoration.items(): # noqa: PERF102
self._addHardEvent(row['Restore_time'], 'tank')
-
+
self.restoration_start_time = stop_time + delay
-
- self._addHardEvent(self.restoration_start_time,'start')
+
+ self._addHardEvent(self.restoration_start_time, 'start')
self.initializeActiveAgents(stop_time)
self.initializeReadyAgents()
-
+
for node_name in wn.node_name_list:
self._registry.addGeneralNodeDamageToRegistry(node_name)
-
+
for tank_name in wn.tank_name_list:
self._registry.addTankDamageToRegistry(tank_name)
-
+
for pump_name in wn.pump_name_list:
self._registry.addPumpDamageToRegistry(pump_name, wn.get_link(pump_name))
-
+
for reservoir_name in wn.reservoir_name_list:
self._registry.addReservoirDamageToRegistry(reservoir_name)
-
+
self.initializeEntities(wn)
self.removeRecordsWithoutEntities('TANK')
self.removeRecordsWithoutEntities('RESERVOIR')
self.removeRecordsWithoutEntities('PUMP')
self.removeRecordsWithoutEntities('GNODE')
-
+
for el in self.ELEMENTS:
- self._registry.setDamageData(el,'discovered', False)
+ self._registry.setDamageData(el, 'discovered', False) # noqa: FBT003
self.initializeGroups()
self.initializeNumberOfDamages()
-
-
for seq_key, seq_list in self.sequence.items():
- self._registry.setDamageData(seq_key, seq_list[0], False)
-
+ self._registry.setDamageData(seq_key, seq_list[0], False) # noqa: FBT003
+
if self.delay == 0:
event_time_list = self.perform_action(wn, stop_time)
else:
event_time_list = self.getNewEventsTime(reset=True)
-
- if earthquake != None:
+
+ if earthquake != None: # noqa: E711
self.earthquake = earthquake
-
+
event_time_list = event_time_list[1:]
- return event_time_list
-
- def iRestorationStopTime(self):
- if self.if_initiated == False:
+ return event_time_list # noqa: RET504
+
+ def iRestorationStopTime(self): # noqa: N802, D102
+ if self.if_initiated == False: # noqa: E712
return False
- logger.debug("Func: node functionality")
- pipe_damage_end = self.iAllPipeLastActionDone()
- node_damage_end = self.iAllNodeLastActionDone()
- pump_damage_end = self.iAllPumpLastActionDone()
- GNODE_damage_end = self.iAllGNodeLastActionDone()
- tank_damage_end = self.iAllTankLastActionDone()
+ logger.debug('Func: node functionality')
+ pipe_damage_end = self.iAllPipeLastActionDone()
+ node_damage_end = self.iAllNodeLastActionDone()
+ pump_damage_end = self.iAllPumpLastActionDone()
+ GNODE_damage_end = self.iAllGNodeLastActionDone() # noqa: N806
+ tank_damage_end = self.iAllTankLastActionDone()
reservoir_damage_end = self.iAllReservoirLastActionDone()
-
- logger.debug("pipe: " + repr(pipe_damage_end) )
- logger.debug("node: " + repr(node_damage_end) )
- logger.debug("pump: " + repr(pump_damage_end) )
- logger.debug("GNODE: " + repr(GNODE_damage_end) )
- logger.debug("tank: " + repr(tank_damage_end) )
- logger.debug("reservoir: " + repr(reservoir_damage_end) )
-
- if pipe_damage_end and node_damage_end and pump_damage_end and GNODE_damage_end and tank_damage_end and reservoir_damage_end:
+
+ logger.debug('pipe: ' + repr(pipe_damage_end)) # noqa: G003
+ logger.debug('node: ' + repr(node_damage_end)) # noqa: G003
+ logger.debug('pump: ' + repr(pump_damage_end)) # noqa: G003
+ logger.debug('GNODE: ' + repr(GNODE_damage_end)) # noqa: G003
+ logger.debug('tank: ' + repr(tank_damage_end)) # noqa: G003
+ logger.debug('reservoir: ' + repr(reservoir_damage_end)) # noqa: G003
+
+ if ( # noqa: SIM103
+ pipe_damage_end # noqa: PLR0916
+ and node_damage_end
+ and pump_damage_end
+ and GNODE_damage_end
+ and tank_damage_end
+ and reservoir_damage_end
+ ):
return True
- else:
+ else: # noqa: RET505
return False
-
- def iAllPipeLastActionDone(self):
- print()
- if "PIPE" in self.sequence:
- if len(self._registry._pipe_damage_table) == 0:
+
+ def iAllPipeLastActionDone(self): # noqa: N802, D102
+ print() # noqa: T201
+ if 'PIPE' in self.sequence:
+ if len(self._registry._pipe_damage_table) == 0: # noqa: SLF001
return True
-
- pipe_action = self.sequence["PIPE"][-1]
- pipe_last_action_values = self._registry._pipe_damage_table[pipe_action]
- if_pipe_last_action_true = (pipe_last_action_values==True | (pipe_last_action_values=="Collective")).all()
- if if_pipe_last_action_true:
+
+ pipe_action = self.sequence['PIPE'][-1]
+ pipe_last_action_values = self._registry._pipe_damage_table[pipe_action] # noqa: SLF001
+ if_pipe_last_action_true = (
+ pipe_last_action_values
+ == True | (pipe_last_action_values == 'Collective')
+ ).all()
+ if if_pipe_last_action_true: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def iAllNodeLastActionDone(self):
- if "DISTNODE" in self.sequence:
- if len(self._registry._node_damage_table) == 0:
+
+ def iAllNodeLastActionDone(self): # noqa: N802, D102
+ if 'DISTNODE' in self.sequence:
+ if len(self._registry._node_damage_table) == 0: # noqa: SLF001
return True
-
- node_action = self.sequence["DISTNODE"][-1]
- node_last_action_values = self._registry._node_damage_table[node_action]
- if_node_last_action_true = (node_last_action_values==True | (node_last_action_values=="Collective")).all()
-
- if if_node_last_action_true == True:
+
+ node_action = self.sequence['DISTNODE'][-1]
+ node_last_action_values = self._registry._node_damage_table[node_action] # noqa: SLF001
+ if_node_last_action_true = (
+ node_last_action_values
+ == True | (node_last_action_values == 'Collective')
+ ).all()
+
+ if if_node_last_action_true == True: # noqa: SIM103, E712
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def iAllPumpLastActionDone(self):
- if "PUMP" in self.sequence:
- if len(self._registry._pump_damage_table) == 0:
+
+ def iAllPumpLastActionDone(self): # noqa: N802, D102
+ if 'PUMP' in self.sequence:
+ if len(self._registry._pump_damage_table) == 0: # noqa: SLF001
return True
-
- pump_action = self.sequence["PUMP"][-1]
- pump_last_action_values = self._registry._pump_damage_table[pump_action]
-
- if len(self._registry._pump_damage_table) == 0:
+
+ pump_action = self.sequence['PUMP'][-1]
+ pump_last_action_values = self._registry._pump_damage_table[pump_action] # noqa: SLF001
+
+ if len(self._registry._pump_damage_table) == 0: # noqa: SLF001
return True
-
- if_pump_last_action_true = (pump_last_action_values==True).all()
-
- if if_pump_last_action_true == True:
+
+ if_pump_last_action_true = (pump_last_action_values == True).all() # noqa: E712
+
+ if if_pump_last_action_true == True: # noqa: SIM103, E712
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def iAllGNodeLastActionDone(self):
- if "GNODE" in self.sequence:
- if len(self._registry._gnode_damage_table) == 0:
+
+ def iAllGNodeLastActionDone(self): # noqa: N802, D102
+ if 'GNODE' in self.sequence:
+ if len(self._registry._gnode_damage_table) == 0: # noqa: SLF001
return True
-
- gnode_action = self.sequence["GNODE"][-1]
- gnode_last_action_values = self._registry._gnode_damage_table[gnode_action]
- if_gnode_last_action_true = (gnode_last_action_values==True).all()
-
- if if_gnode_last_action_true == True:
+
+ gnode_action = self.sequence['GNODE'][-1]
+ gnode_last_action_values = self._registry._gnode_damage_table[ # noqa: SLF001
+ gnode_action
+ ]
+ if_gnode_last_action_true = (gnode_last_action_values == True).all() # noqa: E712
+
+ if if_gnode_last_action_true == True: # noqa: SIM103, E712
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def iAllTankLastActionDone(self):
- if "TANK" in self.sequence:
- if len(self._registry._tank_damage_table) == 0:
+
+ def iAllTankLastActionDone(self): # noqa: N802, D102
+ if 'TANK' in self.sequence:
+ if len(self._registry._tank_damage_table) == 0: # noqa: SLF001
return True
-
- tank_action = self.sequence["TANK"][-1]
- tank_last_action_values = self._registry._tank_damage_table[tank_action]
- if_tank_last_action_true = (tank_last_action_values==True).all()
-
- if if_tank_last_action_true == True:
+
+ tank_action = self.sequence['TANK'][-1]
+ tank_last_action_values = self._registry._tank_damage_table[tank_action] # noqa: SLF001
+ if_tank_last_action_true = (tank_last_action_values == True).all() # noqa: E712
+
+ if if_tank_last_action_true == True: # noqa: SIM103, E712
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def iAllReservoirLastActionDone(self):
- if "RESERVOIR" in self.sequence:
- if len(self._registry._reservoir_damage_table) == 0:
+
+ def iAllReservoirLastActionDone(self): # noqa: N802, D102
+ if 'RESERVOIR' in self.sequence:
+ if len(self._registry._reservoir_damage_table) == 0: # noqa: SLF001
return True
-
- reservoir_action = self.sequence["RESERVOIR"][-1]
- reservoir_last_action_values = self._registry._reservoir_damage_table[reservoir_action]
- if_reservoir_last_action_true = (reservoir_last_action_values==True).all()
-
- if if_reservoir_last_action_true == True:
+
+ reservoir_action = self.sequence['RESERVOIR'][-1]
+ reservoir_last_action_values = self._registry._reservoir_damage_table[ # noqa: SLF001
+ reservoir_action
+ ]
+ if_reservoir_last_action_true = (
+ reservoir_last_action_values == True # noqa: E712
+ ).all()
+
+ if if_reservoir_last_action_true == True: # noqa: SIM103, E712
return True
- else:
+ else: # noqa: RET505
return False
else:
return True
-
- def getHydSigPipeList(self):
- damage_group_list = self.priority.getHydSigDamageGroups()
- pipe_damage_group_list = [cur_damage_group for cur_damage_group in damage_group_list if self.entity[cur_damage_group]=="PIPE"]
- return pipe_damage_group_list
\ No newline at end of file
+
+ def getHydSigPipeList(self): # noqa: N802, D102
+ damage_group_list = self.priority.getHydSigDamageGroups()
+ pipe_damage_group_list = [
+ cur_damage_group
+ for cur_damage_group in damage_group_list
+ if self.entity[cur_damage_group] == 'PIPE'
+ ]
+ return pipe_damage_group_list # noqa: RET504
diff --git a/modules/systemPerformance/REWET/REWET/restoration/registry.py b/modules/systemPerformance/REWET/REWET/restoration/registry.py
index 380c2d919..1ce8da5dc 100644
--- a/modules/systemPerformance/REWET/REWET/restoration/registry.py
+++ b/modules/systemPerformance/REWET/REWET/restoration/registry.py
@@ -1,321 +1,508 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Sat Dec 26 03:22:21 2020
+"""Created on Sat Dec 26 03:22:21 2020
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
-import pandas as pd
-import numpy as np
import logging
from collections import OrderedDict
-from restoration.restorationlog import RestorationLog
-from restoration.base import get_node_name
+
+import numpy as np
+import pandas as pd
+from restoration.restorationlog import RestorationLog
logger = logging.getLogger(__name__)
-class Registry():
- def __init__(self, WaterNetwork, settings, demand_node_name_list, scenario_name):
-
- self._registry_version = 0.15
- self.wn = WaterNetwork
- self.settings = settings
+
+class Registry: # noqa: D101, PLR0904
+ def __init__(self, WaterNetwork, settings, demand_node_name_list, scenario_name): # noqa: N803
+ self._registry_version = 0.15
+ self.wn = WaterNetwork
+ self.settings = settings
self.demand_node_name_list = demand_node_name_list
- self.scenario_name = scenario_name
- #self.EQCoordinates = (6398403.298, 1899243.660)
- #self.proximity_points = {'WaterSource':[(6435903.606431,1893248.592426),(6441950.711447,1897369.022871),
- #(6424377.955317,1929513.408731),(6467146.075381,1816296.452238),
- #(6483259.266246,1803209.907606),(6436359.6420960,1905761.7390040),
- #(6492204.110122,1758379.158018),(6464169.549436,1738989.098520),
- #(6504097.778564,1875687.031985),(6414434.124,1929805.346),
- #(6412947.370,1936851.950)]}
- self._pipe_break_node_coupling = {} # for broken points that each has two nodes
- self._break_point_attached_to_mainPipe = [] # for broken points to show which node is attached to the main point. For easier and faster coding in removals of damage
- #self._occupancy = pd.Series() # for agent occupency
- #self._pipe_RepairAgentNameRegistry=[] # MAYBE NOT NEEDED for agent occupency
- self._tank_damage_table = pd.DataFrame(columns=['damage_type'])
- self._reservoir_damage_table = pd.DataFrame(columns=['damage_type'])
- self._pump_damage_table = pd.DataFrame(columns=['damage_type', 'element_name', 'start_node', 'end_node'])
- self._gnode_damage_table = pd.DataFrame(columns=['damage_type'])
- self._pipe_damage_table = pd.DataFrame(columns=['damage_type', 'damage_sub_type', 'Orginal_element', 'attached_element','number', 'LeakAtCheck'])
- self._pipe_data = pd.DataFrame(columns=['diameter'])
- self._node_damage_table = pd.DataFrame(columns=['Demand1','Demand2','Number_of_damages'])
- self._pipe_break_history = pd.DataFrame(columns=['Pipe_A','Pipe_B','Orginal_pipe', 'Node_A','Node_B'])
- self._pipe_leak_history = pd.DataFrame(columns=['Pipe_A','Pipe_B','Orginal_pipe','Node_name'])
- self._long_task_data = pd.DataFrame(columns=['Node_name', 'Action', 'Entity', 'Time', 'cur_agent_name'])
- self.all_node_table = pd.DataFrame(columns=["X_COORD", "Y_COORD"], dtype=float)
- self.pre_event_demand_met = pd.DataFrame(dtype=float)
- self.hydraulic_significance = pd.Series(dtype=float)
- self.if_first_event_occured = 0
- self.restoration_log_book = RestorationLog(settings)
- self.explicit_leak_node = {}
- self.demand_node_name_list = []
- self.all_node_name_list = WaterNetwork.node_name_list.copy()
- #self.demand_node_users = pd.Series()
- #self.minimum_time_devision = 60*60
+ self.scenario_name = scenario_name
+ # self.EQCoordinates = (6398403.298, 1899243.660)
+ # self.proximity_points = {'WaterSource':[(6435903.606431,1893248.592426),(6441950.711447,1897369.022871),
+ # (6424377.955317,1929513.408731),(6467146.075381,1816296.452238),
+ # (6483259.266246,1803209.907606),(6436359.6420960,1905761.7390040),
+ # (6492204.110122,1758379.158018),(6464169.549436,1738989.098520),
+ # (6504097.778564,1875687.031985),(6414434.124,1929805.346),
+ # (6412947.370,1936851.950)]}
+ self._pipe_break_node_coupling = {} # for broken points that each has two nodes
+ self._break_point_attached_to_mainPipe = [] # for broken points to show which node is attached to the main point. For easier and faster coding in removals of damage
+ # self._occupancy = pd.Series() # for agent occupency
+ # self._pipe_RepairAgentNameRegistry=[] # MAYBE NOT NEEDED for agent occupency
+ self._tank_damage_table = pd.DataFrame(columns=['damage_type'])
+ self._reservoir_damage_table = pd.DataFrame(columns=['damage_type'])
+ self._pump_damage_table = pd.DataFrame(
+ columns=['damage_type', 'element_name', 'start_node', 'end_node']
+ )
+ self._gnode_damage_table = pd.DataFrame(columns=['damage_type'])
+ self._pipe_damage_table = pd.DataFrame(
+ columns=[
+ 'damage_type',
+ 'damage_sub_type',
+ 'Orginal_element',
+ 'attached_element',
+ 'number',
+ 'LeakAtCheck',
+ ]
+ )
+ self._pipe_data = pd.DataFrame(columns=['diameter'])
+ self._node_damage_table = pd.DataFrame(
+ columns=['Demand1', 'Demand2', 'Number_of_damages']
+ )
+ self._pipe_break_history = pd.DataFrame(
+ columns=['Pipe_A', 'Pipe_B', 'Orginal_pipe', 'Node_A', 'Node_B']
+ )
+ self._pipe_leak_history = pd.DataFrame(
+ columns=['Pipe_A', 'Pipe_B', 'Orginal_pipe', 'Node_name']
+ )
+ self._long_task_data = pd.DataFrame(
+ columns=['Node_name', 'Action', 'Entity', 'Time', 'cur_agent_name']
+ )
+ self.all_node_table = pd.DataFrame(
+ columns=['X_COORD', 'Y_COORD'], dtype=float
+ )
+ self.pre_event_demand_met = pd.DataFrame(dtype=float)
+ self.hydraulic_significance = pd.Series(dtype=float)
+ self.if_first_event_occured = 0
+ self.restoration_log_book = RestorationLog(settings)
+ self.explicit_leak_node = {}
+ self.demand_node_name_list = []
+ self.all_node_name_list = WaterNetwork.node_name_list.copy()
+ # self.demand_node_users = pd.Series()
+ # self.minimum_time_devision = 60*60
self.nodal_equavalant_diameter = None
- self.original_pipe_data = {}
- self.result = None
- self.active_pipe_damages = OrderedDict()
- self.active_nodal_damages = OrderedDict()
- self.active_collectives = pd.DataFrame(columns=['action','Orginal_pipe'])
- self.virtual_node_data = OrderedDict()
- self._nodal_data = OrderedDict()
- self.result = None
- self.result_dump_file_list = []
+ self.original_pipe_data = {}
+ self.result = None
+ self.active_pipe_damages = OrderedDict()
+ self.active_nodal_damages = OrderedDict()
+ self.active_collectives = pd.DataFrame(columns=['action', 'Orginal_pipe'])
+ self.virtual_node_data = OrderedDict()
+ self._nodal_data = OrderedDict()
+ self.result = None
+ self.result_dump_file_list = []
self.Pipe_Damage_restoration_report = []
- self.undamaged_link_node_list = {}
+ self.undamaged_link_node_list = {}
-
for name, pipe in WaterNetwork.pipes():
self._pipe_data.loc[name] = [pipe.diameter]
-
-
+
for node_name, node in WaterNetwork.junctions():
- if node.demand_timeseries_list[0].base_value>0.00000008:
+ if node.demand_timeseries_list[0].base_value > 0.00000008: # noqa: PLR2004
self.demand_node_name_list.append(node_name)
-
- #for demand_node_name in self.demand_node_name_list:
- #self.demand_node_users.loc[demand_node_name]=1
-
+
+ # for demand_node_name in self.demand_node_name_list:
+ # self.demand_node_users.loc[demand_node_name]=1
+
for node_name, node in WaterNetwork.nodes():
- self.all_node_table.loc[node_name, "X_COORD"] = node.coordinates[0]
- self.all_node_table.loc[node_name, "Y_COORD"] = node.coordinates[1]
-
+ self.all_node_table.loc[node_name, 'X_COORD'] = node.coordinates[0]
+ self.all_node_table.loc[node_name, 'Y_COORD'] = node.coordinates[1]
+
for link_name, link in WaterNetwork.links():
- self.undamaged_link_node_list[link_name] = (link.start_node_name, link.end_node_name)
-
- #self._restoration_table = pd.DataFrame(columns = ['node_name','function', 'element_name', 'element_type', 'in_function_index'])
- self._restoration_table = pd.DataFrame(columns = ['node_name','function', 'record_index'])
- self._record_registry = []
-
- self._pipe_damage_table_time_series = OrderedDict()
- self._node_damage_table_time_series = OrderedDict()
- self._tank_level_time_series = OrderedDict()
- self._restoration_reservoir_name_time_series = OrderedDict()
- self.ED_history = pd.Series(dtype="O") #Equavalant Damage Diameter
-
-
+ self.undamaged_link_node_list[link_name] = (
+ link.start_node_name,
+ link.end_node_name,
+ )
+
+ # self._restoration_table = pd.DataFrame(columns = ['node_name','function', 'element_name', 'element_type', 'in_function_index'])
+ self._restoration_table = pd.DataFrame(
+ columns=['node_name', 'function', 'record_index']
+ )
+ self._record_registry = []
+
+ self._pipe_damage_table_time_series = OrderedDict()
+ self._node_damage_table_time_series = OrderedDict()
+ self._tank_level_time_series = OrderedDict()
+ self._restoration_reservoir_name_time_series = OrderedDict()
+ self.ED_history = pd.Series(dtype='O') # Equavalant Damage Diameter
+
for pipe_name, pipe in WaterNetwork.pipes():
- self.original_pipe_data[pipe_name]={'diameter':pipe.diameter, 'length':pipe.length, 'start_node_name':pipe.start_node_name, 'end_node_name':pipe.end_node_name, 'roughness':pipe.roughness }
-# =============================================================================
-# def addElementToRestorationRegistry(self, damaged_node_name, function_name, element_name, elemenet_type, in_function_index):
-# data = self.__restoration_table
-# selected_data = data[(data[['node_name', 'element_name', 'element_type']]==[damaged_node_name,element_name,elemenet_type]).all(1))]
-#
-# if len(selected_data)>1:
-# raise ValueError('There are data in restroation regustry. Damaged node name: '+damaged_node_name)
-#
-#
-# temp = pd.Series(data=[damaged_node_name, function_name, element_name, elemenet_type, in_function_index], index=['node_name','function', 'element_name', 'element_type', 'in_function_index'])
-# self._restoration_table = self._restoration_table.append(temp, ignore_index=True)
-# =============================================================================
-
- def addRestorationDataOnPipe(self, damage_node_name, time, state):
- if self.settings['dmg_rst_data_save'] == True:
- orginal_pipe_name = self._pipe_damage_table.loc[damage_node_name, 'Orginal_element']
- time = time /3600
- temp_row = {'time':time, 'pipe_name': orginal_pipe_name, 'last_state': state}
+ self.original_pipe_data[pipe_name] = {
+ 'diameter': pipe.diameter,
+ 'length': pipe.length,
+ 'start_node_name': pipe.start_node_name,
+ 'end_node_name': pipe.end_node_name,
+ 'roughness': pipe.roughness,
+ }
+
+ # =============================================================================
+ # def addElementToRestorationRegistry(self, damaged_node_name, function_name, element_name, elemenet_type, in_function_index):
+ # data = self.__restoration_table
+ # selected_data = data[(data[['node_name', 'element_name', 'element_type']]==[damaged_node_name,element_name,elemenet_type]).all(1))]
+ #
+ # if len(selected_data)>1:
+ # raise ValueError('There are data in restroation regustry. Damaged node name: '+damaged_node_name)
+ #
+ #
+ # temp = pd.Series(data=[damaged_node_name, function_name, element_name, elemenet_type, in_function_index], index=['node_name','function', 'element_name', 'element_type', 'in_function_index'])
+ # self._restoration_table = self._restoration_table.append(temp, ignore_index=True)
+ # =============================================================================
+
+ def addRestorationDataOnPipe(self, damage_node_name, time, state): # noqa: N802, D102
+ if self.settings['dmg_rst_data_save'] == True: # noqa: E712
+ orginal_pipe_name = self._pipe_damage_table.loc[
+ damage_node_name, 'Orginal_element'
+ ]
+ time = time / 3600 # noqa: PLR6104
+ temp_row = {
+ 'time': time,
+ 'pipe_name': orginal_pipe_name,
+ 'last_state': state,
+ }
self.Pipe_Damage_restoration_report.append(temp_row)
-
- def addEquavalantDamageHistory(self, node_name, new_node_name, new_pipe_name, equavalant_pipe_diameter, number_of_damages):
+
+ def addEquavalantDamageHistory( # noqa: N802, D102
+ self,
+ node_name,
+ new_node_name,
+ new_pipe_name,
+ equavalant_pipe_diameter,
+ number_of_damages,
+ ):
if node_name in self.ED_history:
- raise ValueError('Node_damage already in history')
-
- self.ED_history.loc[node_name] = {'new_node_name':new_node_name, 'new_pipe_name':new_pipe_name, 'equavalant_pipe_diameter':equavalant_pipe_diameter, 'initial_number_of_damage':number_of_damages, 'current_number_of_damage':number_of_damages}
-
- def getEquavalantDamageHistory(self, node_name):
+ raise ValueError('Node_damage already in history') # noqa: EM101, TRY003
+
+ self.ED_history.loc[node_name] = {
+ 'new_node_name': new_node_name,
+ 'new_pipe_name': new_pipe_name,
+ 'equavalant_pipe_diameter': equavalant_pipe_diameter,
+ 'initial_number_of_damage': number_of_damages,
+ 'current_number_of_damage': number_of_damages,
+ }
+
+ def getEquavalantDamageHistory(self, node_name): # noqa: N802, D102
temp = self.ED_history[node_name]
-
- if type(temp)!=dict:
- raise ValueError('probably two damages with the same name: '+node_name)
-
+
+ if type(temp) != dict: # noqa: E721
+ raise ValueError('probably two damages with the same name: ' + node_name)
+
return temp
-
- def removeEquavalantDamageHistory(self, node_name):
- self.ED_history.drop(node_name, inplace=True)
-
- def isThereSuchOngoingLongJob(self, damaged_node_name, action, entity):
+
+ def removeEquavalantDamageHistory(self, node_name): # noqa: N802, D102
+ self.ED_history.drop(node_name, inplace=True) # noqa: PD002
+
+ def isThereSuchOngoingLongJob(self, damaged_node_name, action, entity): # noqa: N802, D102
data = self._long_task_data
- temp = data[['Node_name','Action','Entity']]==[damaged_node_name, action, entity]
+ temp = data[['Node_name', 'Action', 'Entity']] == [
+ damaged_node_name,
+ action,
+ entity,
+ ]
temp = data[temp.all(1)]
-
- if len(temp)>1:
- raise ValueError('More job than 1 in long jobs')
- elif len(temp)==1:
- if abs(temp['Time'].iloc[0])<0.01:
- raise ValueError('Something Wrong')
- else:
+
+ if len(temp) > 1:
+ raise ValueError('More job than 1 in long jobs') # noqa: EM101, TRY003
+ elif len(temp) == 1: # noqa: RET506
+ if abs(temp['Time'].iloc[0]) < 0.01: # noqa: PLR2004
+ raise ValueError('Something Wrong') # noqa: EM101, TRY003
+ else: # noqa: RET506
return True
else:
return False
-
- def addLongJob(self, damaged_node_name, action, entity, job_gross_time, agent_name):
-
+
+ def addLongJob( # noqa: N802, D102
+ self,
+ damaged_node_name,
+ action,
+ entity,
+ job_gross_time,
+ agent_name,
+ ):
data = self._long_task_data
- temp = data[['Node_name','Action','Entity','Time','cur_agent_name']]==[damaged_node_name, action, entity, job_gross_time, agent_name]
-
+ temp = data[['Node_name', 'Action', 'Entity', 'Time', 'cur_agent_name']] == [
+ damaged_node_name,
+ action,
+ entity,
+ job_gross_time,
+ agent_name,
+ ]
+
if temp.all(1).any():
- raise ValueError('There are currently data on: '+damaged_node_name+','+action+','+entity)
- #elif temp['cur_agent_name'].iloc[0]!=None:
- #raise ValueError('There is one agent: '+temp['cur_agent_name'].iloc[0]+' assigned to long job: '+damaged_node_name+','+action+','+entity)
-
- temp=pd.Series(index=['Node_name','Action','Entity','Time', 'cur_agent_name'], data=[damaged_node_name,action, entity, job_gross_time, agent_name])
- self._long_task_data=data.append(temp, ignore_index=True)
-
-
-
- def assignAgenttoLongJob(self, damaged_node_name, action, entity, choosed_agent_name):
+ raise ValueError(
+ 'There are currently data on: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+ # elif temp['cur_agent_name'].iloc[0]!=None:
+ # raise ValueError('There is one agent: '+temp['cur_agent_name'].iloc[0]+' assigned to long job: '+damaged_node_name+','+action+','+entity)
+
+ temp = pd.Series(
+ index=['Node_name', 'Action', 'Entity', 'Time', 'cur_agent_name'],
+ data=[damaged_node_name, action, entity, job_gross_time, agent_name],
+ )
+ self._long_task_data = data.append(temp, ignore_index=True)
+
+ def assignAgenttoLongJob( # noqa: N802, D102
+ self,
+ damaged_node_name,
+ action,
+ entity,
+ choosed_agent_name,
+ ):
data = self._long_task_data
- temp = data[['Node_name','Action','Entity']]==[damaged_node_name, action, entity]
+ temp = data[['Node_name', 'Action', 'Entity']] == [
+ damaged_node_name,
+ action,
+ entity,
+ ]
temp = data[temp.all(1)]
-
- if len(temp)!=1:
- raise ValueError('There must be one record: '+damaged_node_name+','+action+','+entity)
-
- ind=temp.index[0]
- if self._long_task_data.loc[ind, 'cur_agent_name']!=None and choosed_agent_name!=None:
- raise ValueError('Already someone is here '+repr(self._long_task_data.loc[ind, 'cur_agent_name']))
-
- self._long_task_data.loc[ind, 'cur_agent_name']=choosed_agent_name
-
-
- def deductLongJobTime(self, damaged_node_name, action, entity, deduced_time):
-
+
+ if len(temp) != 1:
+ raise ValueError(
+ 'There must be one record: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+
+ ind = temp.index[0]
+ if (
+ self._long_task_data.loc[ind, 'cur_agent_name'] != None # noqa: E711
+ and choosed_agent_name != None # noqa: E711
+ ):
+ raise ValueError(
+ 'Already someone is here '
+ + repr(self._long_task_data.loc[ind, 'cur_agent_name'])
+ )
+
+ self._long_task_data.loc[ind, 'cur_agent_name'] = choosed_agent_name
+
+ def deductLongJobTime(self, damaged_node_name, action, entity, deduced_time): # noqa: N802, D102
deduced_time = int(deduced_time)
-
- if deduced_time<0:
- raise ValueError('deductig time must not be less than zero: '+repr(deduced_time))
-
+
+ if deduced_time < 0:
+ raise ValueError(
+ 'deductig time must not be less than zero: ' + repr(deduced_time)
+ )
+
data = self._long_task_data
- temp = data[['Node_name','Action','Entity']]==[damaged_node_name, action, entity]
-
+ temp = data[['Node_name', 'Action', 'Entity']] == [
+ damaged_node_name,
+ action,
+ entity,
+ ]
+
temp = data[temp.all(1)]
-
- if len(temp)==0:
- raise ValueError('There is no long task defined for: '+damaged_node_name+', '+action+', '+entity)
- elif len(temp)>1:
- raise ValueError('There are MORE THAN ONE long task defined for: '+damaged_node_name+', '+action+', '+entity)
-
+
+ if len(temp) == 0:
+ raise ValueError(
+ 'There is no long task defined for: '
+ + damaged_node_name
+ + ', '
+ + action
+ + ', '
+ + entity
+ )
+ elif len(temp) > 1: # noqa: RET506
+ raise ValueError(
+ 'There are MORE THAN ONE long task defined for: '
+ + damaged_node_name
+ + ', '
+ + action
+ + ', '
+ + entity
+ )
+
ind = temp.index[0]
-
- if (self._long_task_data.loc[ind, 'Time'] - deduced_time)<0:
- logger.warning(damaged_node_name+', '+action+', '+entity+', '+str(self._long_task_data.loc[ind, 'Time'])+', '+str(deduced_time)+', '+str(self._long_task_data.loc[ind, 'Time'] - deduced_time))
- raise ValueError('Zero reminded time for long task')
-
-
+
+ if (self._long_task_data.loc[ind, 'Time'] - deduced_time) < 0:
+ logger.warning(
+ damaged_node_name # noqa: G003
+ + ', '
+ + action
+ + ', '
+ + entity
+ + ', '
+ + str(self._long_task_data.loc[ind, 'Time'])
+ + ', '
+ + str(deduced_time)
+ + ', '
+ + str(self._long_task_data.loc[ind, 'Time'] - deduced_time)
+ )
+ raise ValueError('Zero reminded time for long task') # noqa: EM101, TRY003
+
self._long_task_data.loc[ind, 'Time'] -= deduced_time
-
- def getLongJobRemindedTime(self, damaged_node_name,action, entity):
+
+ def getLongJobRemindedTime(self, damaged_node_name, action, entity): # noqa: N802, D102
data = self._long_task_data
- temp = data[['Node_name','Action','Entity']]==[damaged_node_name, action, entity]
-
+ temp = data[['Node_name', 'Action', 'Entity']] == [
+ damaged_node_name,
+ action,
+ entity,
+ ]
+
temp = data[temp.all(1)]
-
- if len(temp)==0:
- raise ValueError('There is no long task defined for: '+damaged_node_name+','+action+','+entity)
- elif len(temp)>1:
- raise ValueError('There are MORE THAN ONE long task defined for: '+damaged_node_name+','+action+','+entity)
-
+
+ if len(temp) == 0:
+ raise ValueError(
+ 'There is no long task defined for: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+ elif len(temp) > 1: # noqa: RET506
+ raise ValueError(
+ 'There are MORE THAN ONE long task defined for: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+
return temp['Time'].iloc[0]
-
- def getVacantOnGoingJobs(self, action, entity):
- res=[]
+
+ def getVacantOnGoingJobs(self, action, entity): # noqa: N802, D102
+ res = []
data = self._long_task_data
- temp = data[['Action','Entity']]==[action, entity]
-
+ temp = data[['Action', 'Entity']] == [action, entity]
+
temp = data[temp.all(1)]
-
- for ind, data in temp.iterrows():
- if data['cur_agent_name']==None:
+
+ for ind, data in temp.iterrows(): # noqa: B007
+ if data['cur_agent_name'] == None: # noqa: E711
res.append(data['Node_name'])
-
+
return res
-
- def getdamagedNodesOfPipes(self, damage_type):
-
- if damage_type !='break' and damage_type !='leak':
- raise ValueError('The damage for pipe is either break or leak.')
-
- if damage_type == 'break':
- return self._pipe_break_history[['Node_A','Node_B']]
-
- elif damage_type=='leak':
+
+ def getdamagedNodesOfPipes(self, damage_type): # noqa: N802, D102
+ if damage_type != 'break' and damage_type != 'leak': # noqa: PLR1714
+ raise ValueError('The damage for pipe is either break or leak.') # noqa: EM101, TRY003
+
+ if damage_type == 'break': # noqa: RET503
+ return self._pipe_break_history[['Node_A', 'Node_B']]
+
+ elif damage_type == 'leak': # noqa: RET505
return self._pipe_leak_history['Node_name']
-
-
- def removeLongJob(self, damaged_node_name, action, entity):
+ def removeLongJob(self, damaged_node_name, action, entity): # noqa: N802, D102
data = self._long_task_data
- temp = data[['Node_name','Action','Entity']]==[damaged_node_name, action, entity]
-
+ temp = data[['Node_name', 'Action', 'Entity']] == [
+ damaged_node_name,
+ action,
+ entity,
+ ]
+
temp = data[temp.all(1)]
-
- if len(temp)==0:
- raise ValueError('There is no long task defined for: '+damaged_node_name+','+action+','+entity)
- elif len(temp)>1:
- raise ValueError('There are MORE THAN ONE long task defined for: '+damaged_node_name+','+action+','+entity)
-
+
+ if len(temp) == 0:
+ raise ValueError(
+ 'There is no long task defined for: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+ elif len(temp) > 1: # noqa: RET506
+ raise ValueError(
+ 'There are MORE THAN ONE long task defined for: '
+ + damaged_node_name
+ + ','
+ + action
+ + ','
+ + entity
+ )
+
ind = temp.index[0]
-
- self._long_task_data.drop(ind, inplace=True)
+ self._long_task_data.drop(ind, inplace=True) # noqa: PD002
- def addFunctionDataToRestorationRegistry(self, damaged_node_name, history, function_name):
+ def addFunctionDataToRestorationRegistry( # noqa: N802, D102
+ self,
+ damaged_node_name,
+ history,
+ function_name,
+ ):
data = self._restoration_table
- selected_data = data[(data[['node_name', 'function']]==[damaged_node_name, function_name]).all(1)]
- if len(selected_data)>0:
- raise ValueError('There are data in restroation registry. Damaged node name: '+damaged_node_name+' '+' '+function_name)
-
+ selected_data = data[
+ (
+ data[['node_name', 'function']] == [damaged_node_name, function_name]
+ ).all(1)
+ ]
+ if len(selected_data) > 0:
+ raise ValueError(
+ 'There are data in restroation registry. Damaged node name: '
+ + damaged_node_name
+ + ' '
+ + ' '
+ + function_name
+ )
+
self._record_registry.append(history)
- latest_index = len(self._record_registry)-1
-
- temp = pd.Series(data=[damaged_node_name, function_name, latest_index], index=['node_name', 'function','record_index'])
- self._restoration_table = self._restoration_table.append(temp, ignore_index=True)
-
- def addNodalDamage(self, nodal_damage, new_pipe_name_list):
- if self.settings['Virtual_node'] ==True:
+ latest_index = len(self._record_registry) - 1
+
+ temp = pd.Series(
+ data=[damaged_node_name, function_name, latest_index],
+ index=['node_name', 'function', 'record_index'],
+ )
+ self._restoration_table = self._restoration_table.append(
+ temp, ignore_index=True
+ )
+
+ def addNodalDamage(self, nodal_damage, new_pipe_name_list): # noqa: N802, D102
+ if self.settings['Virtual_node'] == True: # noqa: E712
for ind, val in nodal_damage.items():
- val = int(val)
+ val = int(val) # noqa: PLW2901
virtual_node_name_list = []
- for i in range(val ):
- new_virtual_node_name = ind+'_vir_'+str(i)
- self._node_damage_table.loc[new_virtual_node_name, 'Number_of_damages'] = 1
- self._node_damage_table.loc[new_virtual_node_name, 'virtual_of'] = ind
- self._node_damage_table.loc[new_virtual_node_name, 'Orginal_element'] = ind
- self.virtual_node_data[new_virtual_node_name]={"is_damaged":True}
- self.virtual_node_data[new_virtual_node_name]['new_pipe_name'] = new_pipe_name_list[ind]
+ for i in range(val):
+ new_virtual_node_name = ind + '_vir_' + str(i)
+ self._node_damage_table.loc[
+ new_virtual_node_name, 'Number_of_damages'
+ ] = 1
+ self._node_damage_table.loc[
+ new_virtual_node_name, 'virtual_of'
+ ] = ind
+ self._node_damage_table.loc[
+ new_virtual_node_name, 'Orginal_element'
+ ] = ind
+ self.virtual_node_data[new_virtual_node_name] = {
+ 'is_damaged': True
+ }
+ self.virtual_node_data[new_virtual_node_name][
+ 'new_pipe_name'
+ ] = new_pipe_name_list[ind]
virtual_node_name_list.append(new_virtual_node_name)
self._nodal_data[ind] = new_pipe_name_list[ind]
else:
for ind, val in nodal_damage.items():
self._node_damage_table.loc[ind, 'Number_of_damages'] = val
- self._node_damage_table.loc[new_virtual_node_name, 'Orginal_element'] = ind
- self._nodal_data[ind] = {'real_node_name': ind, "number_of_damages":val}
-
-
-
- def isVirtualNodeDamaged(self, virtual_node_name):
- return self.virtual_node_data[virtual_node_name]["is_damaged"]
-
- def setVirtualNodeRepaired(self, virtual_node_name):
- self.virtual_node_data[virtual_node_name]["is_damaged"]=False
-
- def addNodalDemandChange(self, node_name, demand1, demand2):
- #if self.settings['Virtual_node'] == False:
- if type(node_name) == str:
- if not node_name in self._node_damage_table.index:
- raise ValueError(repr(node_name)+" is not in the node table")
+ self._node_damage_table.loc[
+ new_virtual_node_name, 'Orginal_element'
+ ] = ind
+ self._nodal_data[ind] = {
+ 'real_node_name': ind,
+ 'number_of_damages': val,
+ }
+
+ def isVirtualNodeDamaged(self, virtual_node_name): # noqa: N802, D102
+ return self.virtual_node_data[virtual_node_name]['is_damaged']
+
+ def setVirtualNodeRepaired(self, virtual_node_name): # noqa: N802, D102
+ self.virtual_node_data[virtual_node_name]['is_damaged'] = False
+
+ def addNodalDemandChange(self, node_name, demand1, demand2): # noqa: N802, D102
+ # if self.settings['Virtual_node'] == False:
+ if type(node_name) == str: # noqa: E721
+ if node_name not in self._node_damage_table.index:
+ raise ValueError(repr(node_name) + ' is not in the node table')
self._node_damage_table.loc[node_name, 'Demand1'] = demand1
self._node_damage_table.loc[node_name, 'Demand2'] = demand2
- #else:
- #node_name_vir = get_node_name(node_name, self._node_damage_table)
- #self._node_damage_table.loc[node_name_vir, 'Demand1'] = demand1
- #self._node_damage_table.loc[node_name_vir, 'Demand2'] = demand2
-
- def addPipeDamageToRegistry(self, node_name, data):
- """
- Adds damage to pipe registry
+ # else:
+ # node_name_vir = get_node_name(node_name, self._node_damage_table)
+ # self._node_damage_table.loc[node_name_vir, 'Demand1'] = demand1
+ # self._node_damage_table.loc[node_name_vir, 'Demand2'] = demand2
+
+ def addPipeDamageToRegistry(self, node_name, data): # noqa: N802
+ """Adds damage to pipe registry
Parameters
----------
@@ -323,283 +510,346 @@ def addPipeDamageToRegistry(self, node_name, data):
Damaged node Name.
data : Dict
Data about Damage.
-
+
Returns
-------
None.
- """
- #self._pipe_node_damage_status[name] = data
-
- leaking_pipe_with_pipeA_orginal_pipe = self._pipe_leak_history[self._pipe_leak_history.loc[:,'Pipe_A']==data['orginal_pipe']]
- breaking_pipe_with_pipeA_orginal_pipe = self._pipe_break_history[self._pipe_break_history.loc[:,'Pipe_A']==data['orginal_pipe']]
-
- i_leak_not_zero_length = len(leaking_pipe_with_pipeA_orginal_pipe)>0
- i_break_not_zero_length = len(breaking_pipe_with_pipeA_orginal_pipe)>0
-
+ """ # noqa: D400, D401
+ # self._pipe_node_damage_status[name] = data
+
+ leaking_pipe_with_pipeA_orginal_pipe = self._pipe_leak_history[ # noqa: N806
+ self._pipe_leak_history.loc[:, 'Pipe_A'] == data['orginal_pipe']
+ ]
+ breaking_pipe_with_pipeA_orginal_pipe = self._pipe_break_history[ # noqa: N806
+ self._pipe_break_history.loc[:, 'Pipe_A'] == data['orginal_pipe']
+ ]
+
+ i_leak_not_zero_length = len(leaking_pipe_with_pipeA_orginal_pipe) > 0
+ i_break_not_zero_length = len(breaking_pipe_with_pipeA_orginal_pipe) > 0
+
if i_leak_not_zero_length and i_break_not_zero_length:
- raise ValueError('There are more than 1 damage with orginal pipe name in pipe A. it does not make sense')
+ raise ValueError( # noqa: DOC501, TRY003
+ 'There are more than 1 damage with original pipe name in pipe A. it does not make sense' # noqa: EM101
+ )
if i_leak_not_zero_length:
temp_node_name = leaking_pipe_with_pipeA_orginal_pipe.index[0]
- self._pipe_leak_history.loc[temp_node_name, 'Pipe_A'] = data['pipe_B']
+ self._pipe_leak_history.loc[temp_node_name, 'Pipe_A'] = data['pipe_B']
elif i_break_not_zero_length:
temp_node_name = breaking_pipe_with_pipeA_orginal_pipe.index[0]
self._pipe_break_history.loc[temp_node_name, 'Pipe_A'] = data['pipe_B']
-
+
if data['damage_type'] == 'leak':
- self._pipe_damage_table.loc[node_name, 'damage_type'] = data['damage_type']
- self._pipe_damage_table.loc[node_name, 'damage_sub_type'] = data['damage_subtype']
- self._pipe_damage_table.loc[node_name, 'Orginal_element'] = data['orginal_pipe']
- self._pipe_damage_table.loc[node_name, 'attached_element'] = data['pipe_A']
- self._pipe_damage_table.loc[node_name, 'number'] = data['number']
-
- self._pipe_leak_history.loc[node_name, 'Pipe_A'] = data['pipe_A']
- self._pipe_leak_history.loc[node_name, 'Pipe_B'] = data['pipe_B']
- self._pipe_leak_history.loc[node_name, 'Orginal_pipe'] = data['orginal_pipe']
- self._pipe_leak_history.loc[node_name, 'Node_name'] = node_name
-
+ self._pipe_damage_table.loc[node_name, 'damage_type'] = data[
+ 'damage_type'
+ ]
+ self._pipe_damage_table.loc[node_name, 'damage_sub_type'] = data[
+ 'damage_subtype'
+ ]
+ self._pipe_damage_table.loc[node_name, 'Orginal_element'] = data[
+ 'orginal_pipe'
+ ]
+ self._pipe_damage_table.loc[node_name, 'attached_element'] = data[
+ 'pipe_A'
+ ]
+ self._pipe_damage_table.loc[node_name, 'number'] = data['number']
+
+ self._pipe_leak_history.loc[node_name, 'Pipe_A'] = data['pipe_A']
+ self._pipe_leak_history.loc[node_name, 'Pipe_B'] = data['pipe_B']
+ self._pipe_leak_history.loc[node_name, 'Orginal_pipe'] = data[
+ 'orginal_pipe'
+ ]
+ self._pipe_leak_history.loc[node_name, 'Node_name'] = node_name
+
elif data['damage_type'] == 'break':
- self._pipe_damage_table.loc[node_name, 'damage_type'] = data['damage_type']
- self._pipe_damage_table.loc[node_name, 'Orginal_element'] = data['orginal_pipe']
- self._pipe_damage_table.loc[node_name, 'attached_element'] = data['pipe_A']
- self._pipe_damage_table.loc[node_name, 'number'] = data['number']
-
- self._pipe_break_history.loc[node_name, 'Pipe_A'] = data['pipe_A']
- self._pipe_break_history.loc[node_name, 'Pipe_B'] = data['pipe_B']
- self._pipe_break_history.loc[node_name, 'Orginal_pipe'] = data['orginal_pipe']
- self._pipe_break_history.loc[node_name, 'Node_A'] = data['node_A']
- self._pipe_break_history.loc[node_name, 'Node_B'] = data['node_B']
-
+ self._pipe_damage_table.loc[node_name, 'damage_type'] = data[
+ 'damage_type'
+ ]
+ self._pipe_damage_table.loc[node_name, 'Orginal_element'] = data[
+ 'orginal_pipe'
+ ]
+ self._pipe_damage_table.loc[node_name, 'attached_element'] = data[
+ 'pipe_A'
+ ]
+ self._pipe_damage_table.loc[node_name, 'number'] = data['number']
+
+ self._pipe_break_history.loc[node_name, 'Pipe_A'] = data['pipe_A']
+ self._pipe_break_history.loc[node_name, 'Pipe_B'] = data['pipe_B']
+ self._pipe_break_history.loc[node_name, 'Orginal_pipe'] = data[
+ 'orginal_pipe'
+ ]
+ self._pipe_break_history.loc[node_name, 'Node_A'] = data['node_A']
+ self._pipe_break_history.loc[node_name, 'Node_B'] = data['node_B']
+
else:
- raise ValueError('Undefined damage type')
-
-
- def addGeneralNodeDamageToRegistry(self, node_name, data=None):
- self._gnode_damage_table.loc[node_name, 'damage_type']=None
-
- def addTankDamageToRegistry(self, node_name, data=None):
- self._tank_damage_table.loc[node_name, 'damage_type']=None
-
- def addPumpDamageToRegistry(self, pump_name, data):
+ raise ValueError('Undefined damage type') # noqa: DOC501, EM101, TRY003
+
+ def addGeneralNodeDamageToRegistry(self, node_name, data=None): # noqa: ARG002, N802, D102
+ self._gnode_damage_table.loc[node_name, 'damage_type'] = None
+
+ def addTankDamageToRegistry(self, node_name, data=None): # noqa: ARG002, N802, D102
+ self._tank_damage_table.loc[node_name, 'damage_type'] = None
+
+ def addPumpDamageToRegistry(self, pump_name, data): # noqa: N802, D102
node_name = data.start_node.name
- self._pump_damage_table.loc[node_name, 'damage_type'] = None
+ self._pump_damage_table.loc[node_name, 'damage_type'] = None
self._pump_damage_table.loc[node_name, 'element_name'] = pump_name
- self._pump_damage_table.loc[node_name, 'start_node'] = data.start_node.name
- self._pump_damage_table.loc[node_name, 'end_node'] = data.end_node.name
-
- def addReservoirDamageToRegistry(self, node_name, data=None):
- self._reservoir_damage_table.loc[node_name, 'damage_type']=None
-
- #def assignAgentToDamage(self, element, node_name, choosed_agent_name):
-
- def getListAllElementOrginalName(self, element_type):
- original_element_list=None
- if element_type=='PIPE':
+ self._pump_damage_table.loc[node_name, 'start_node'] = data.start_node.name
+ self._pump_damage_table.loc[node_name, 'end_node'] = data.end_node.name
+
+ def addReservoirDamageToRegistry(self, node_name, data=None): # noqa: ARG002, N802, D102
+ self._reservoir_damage_table.loc[node_name, 'damage_type'] = None
+
+ # def assignAgentToDamage(self, element, node_name, choosed_agent_name):
+
+ def getListAllElementOrginalName(self, element_type): # noqa: N802, D102
+ original_element_list = None
+ if element_type == 'PIPE':
original_element_list = self._pipe_damage_table['Orginal_element']
-
- elif element_type=='PUMP':
+
+ elif element_type == 'PUMP':
original_element_list = self._pump_damage_table['element_name']
- elif element_type=='DISTNODE' or element_type=='GNODE' or element_type=='TANK' or element_type=='RESERVOIR':
+ elif (
+ element_type == 'DISTNODE' # noqa: PLR1714
+ or element_type == 'GNODE'
+ or element_type == 'TANK'
+ or element_type == 'RESERVOIR'
+ ):
temp = self.getDamageData(element_type, iCopy=False)
if 'virtual_of' in temp.columns:
- original_element_list = pd.Series(temp['virtual_of'],index=temp.index)
+ original_element_list = pd.Series(
+ temp['virtual_of'], index=temp.index
+ )
else:
- original_element_list = pd.Series(temp.index,index=temp.index)
-
+ original_element_list = pd.Series(temp.index, index=temp.index)
+
else:
- raise ValueError('Unkown recognized element type: '+repr(element_type))
-
-
+ raise ValueError('Unkown recognized element type: ' + repr(element_type))
+
return original_element_list
-
- def getDamagedLocationListByOriginalElementList(self, element_type, orginal_element_list, iCheck=False):
- res=pd.Series()
-
- if element_type=='PIPE':
+
+ def getDamagedLocationListByOriginalElementList( # noqa: N802, D102
+ self,
+ element_type,
+ orginal_element_list,
+ iCheck=False, # noqa: FBT002, N803
+ ):
+ res = pd.Series()
+
+ if element_type == 'PIPE':
original_element_list = self._pipe_damage_table['Orginal_element']
-
- elif element_type=='PUMP':
+
+ elif element_type == 'PUMP':
original_element_list = self._pump_damage_table['element_name']
- elif element_type=='DISTNODE' or element_type=='GNODE' or element_type=='TANK' or element_type=='RESERVOIR':
+ elif (
+ element_type == 'DISTNODE' # noqa: PLR1714
+ or element_type == 'GNODE'
+ or element_type == 'TANK'
+ or element_type == 'RESERVOIR'
+ ):
temp = self.getDamageData(element_type)
- original_element_list = pd.Series(temp.index,index=temp.index)
-
+ original_element_list = pd.Series(temp.index, index=temp.index)
+
else:
- raise ValueError('Unkown recognized element type: '+repr(element_type))
-
- for element_name, group_tag in orginal_element_list.iteritems():
- temp = original_element_list[original_element_list==element_name]
-
- #if len(temp)!=1:
- if len(temp)!=0:
- res=res.append(temp)
- #elif len(temp)>1:
- #raise ValueError('Something wrong here')
- else:
- if iCheck:
- raise ValueError('The element: '+repr(element_name)+' does not exist in elemet type: '+repr(element_type))
-
+ raise ValueError('Unkown recognized element type: ' + repr(element_type))
+
+ for element_name, group_tag in orginal_element_list.iteritems(): # noqa: B007
+ temp = original_element_list[original_element_list == element_name]
+
+ # if len(temp)!=1:
+ if len(temp) != 0:
+ res = res.append(temp)
+ # elif len(temp)>1:
+ # raise ValueError('Something wrong here')
+ elif iCheck:
+ raise ValueError(
+ 'The element: '
+ + repr(element_name)
+ + ' does not exist in element type: '
+ + repr(element_type)
+ )
+
return res
-
- def getDamagedLocationListByOriginalElementList_2(self, element_type, orginal_element_list, iCheck=False):
-
-
- if element_type=='PIPE':
+
+ def getDamagedLocationListByOriginalElementList_2( # noqa: N802, D102
+ self,
+ element_type,
+ orginal_element_list,
+ iCheck=False, # noqa: FBT002, N803
+ ):
+ if element_type == 'PIPE':
all_original_element_list = self._pipe_damage_table['Orginal_element']
-
- elif element_type=='PUMP':
+
+ elif element_type == 'PUMP':
all_original_element_list = self._pump_damage_table['element_name']
- elif element_type=='DISTNODE' or element_type=='GNODE' or element_type=='TANK' or element_type=='RESERVOIR':
+ elif (
+ element_type == 'DISTNODE' # noqa: PLR1714
+ or element_type == 'GNODE'
+ or element_type == 'TANK'
+ or element_type == 'RESERVOIR'
+ ):
temp = self.getDamageData(element_type, iCopy=False)
- if "virtual_of" in temp:
+ if 'virtual_of' in temp:
all_original_element_list = temp['virtual_of']
else:
- all_original_element_list = pd.Series(temp.index,index=temp.index)
-
+ all_original_element_list = pd.Series(temp.index, index=temp.index)
+
else:
- raise ValueError('Unkown recognized element type: '+repr(element_type))
+ raise ValueError('Unkown recognized element type: ' + repr(element_type))
temp_bool = all_original_element_list.isin(orginal_element_list.index)
res = all_original_element_list[temp_bool]
- if iCheck==True:
+ if iCheck == True: # noqa: E712
if len(res.index) < len(orginal_element_list):
not_available_list = set(orginal_element_list) - set(res.index)
- raise ValueError('The element: '+repr(not_available_list)+' does not exist in elemet type: '+repr(element_type))
-
+ raise ValueError(
+ 'The element: '
+ + repr(not_available_list)
+ + ' does not exist in element type: '
+ + repr(element_type)
+ )
+
return res
-
-
- def getOriginalPipenodes(self, orginal_pipe_name):
+
+ def getOriginalPipenodes(self, orginal_pipe_name): # noqa: N802, D102
return self.original_pipe_data[orginal_pipe_name]
-
- def getLeakData(self, leaking_node_name):
- pipe_A = self._pipe_leak_history.loc[leaking_node_name, 'Pipe_A']
- pipe_B = self._pipe_leak_history.loc[leaking_node_name, 'Pipe_B']
+
+ def getLeakData(self, leaking_node_name): # noqa: N802, D102
+ pipe_A = self._pipe_leak_history.loc[leaking_node_name, 'Pipe_A'] # noqa: N806
+ pipe_B = self._pipe_leak_history.loc[leaking_node_name, 'Pipe_B'] # noqa: N806
orginal_pipe = self._pipe_leak_history.loc[leaking_node_name, 'Orginal_pipe']
-
+
return pipe_A, pipe_B, orginal_pipe
-
- def getCertainLeakData(self, damage_node_name, wn):
+
+ def getCertainLeakData(self, damage_node_name, wn): # noqa: C901, N802, D102
pipe_name_list = []
-
- result_pipe_A=None
- result_pipe_B=None
-
+
+ result_pipe_A = None # noqa: N806
+ result_pipe_B = None # noqa: N806
+
orginal_pipe = self._pipe_leak_history.loc[damage_node_name, 'Orginal_pipe']
- refined_data = self._pipe_leak_history[self._pipe_leak_history['Orginal_pipe']==orginal_pipe]
-
- for damage_point_name, data in refined_data.iterrows():
- pipe_A = data['Pipe_A']
- pipe_B = data['Pipe_B']
-
+ refined_data = self._pipe_leak_history[
+ self._pipe_leak_history['Orginal_pipe'] == orginal_pipe
+ ]
+
+ for damage_point_name, data in refined_data.iterrows(): # noqa: B007
+ pipe_A = data['Pipe_A'] # noqa: N806
+ pipe_B = data['Pipe_B'] # noqa: N806
+
if pipe_A not in pipe_name_list:
pipe_name_list.append(pipe_A)
if pipe_B not in pipe_name_list:
pipe_name_list.append(pipe_B)
-
- #orginal_pipe = self._pipe_break_history.loc[damage_node_name, 'Orginal_pipe']
- refined_data = self._pipe_break_history[self._pipe_break_history['Orginal_pipe']==orginal_pipe]
-
- for damage_point_name, data in refined_data.iterrows():
- pipe_A = data['Pipe_A']
- pipe_B = data['Pipe_B']
-
+
+ # orginal_pipe = self._pipe_break_history.loc[damage_node_name, 'Orginal_pipe']
+ refined_data = self._pipe_break_history[
+ self._pipe_break_history['Orginal_pipe'] == orginal_pipe
+ ]
+
+ for damage_point_name, data in refined_data.iterrows(): # noqa: B007
+ pipe_A = data['Pipe_A'] # noqa: N806
+ pipe_B = data['Pipe_B'] # noqa: N806
+
if pipe_A not in pipe_name_list:
pipe_name_list.append(pipe_A)
if pipe_B not in pipe_name_list:
pipe_name_list.append(pipe_B)
-
- for pipe_name in pipe_name_list:
+ for pipe_name in pipe_name_list:
try:
pipe = wn.get_link(pipe_name)
- except:
+ except: # noqa: S112, E722
continue
-
+
if damage_node_name == pipe.start_node_name:
- result_pipe_B=pipe_name
+ result_pipe_B = pipe_name # noqa: N806
elif damage_node_name == pipe.end_node_name:
- result_pipe_A=pipe_name
-
- if result_pipe_A !=None and result_pipe_B !=None:
+ result_pipe_A = pipe_name # noqa: N806
+
+ if result_pipe_A != None and result_pipe_B != None: # noqa: E711
return result_pipe_A, result_pipe_B
- raise RuntimeError('There must be a pair of pipes for '+repr(damage_node_name))
-
-
-
- def getBreakData(self, breaking_node_name):
- pipe_A = self._pipe_break_history.loc[breaking_node_name, 'Pipe_A']
- pipe_B = self._pipe_break_history.loc[breaking_node_name, 'Pipe_B']
- orginal_pipe = self._pipe_break_history.loc[breaking_node_name, 'Orginal_pipe']
- node_A = self._pipe_break_history.loc[breaking_node_name, 'Node_A']
- node_B = self._pipe_break_history.loc[breaking_node_name, 'Node_B']
-
+ raise RuntimeError(
+ 'There must be a pair of pipes for ' + repr(damage_node_name)
+ )
+
+ def getBreakData(self, breaking_node_name): # noqa: N802, D102
+ pipe_A = self._pipe_break_history.loc[breaking_node_name, 'Pipe_A'] # noqa: N806
+ pipe_B = self._pipe_break_history.loc[breaking_node_name, 'Pipe_B'] # noqa: N806
+ orginal_pipe = self._pipe_break_history.loc[
+ breaking_node_name, 'Orginal_pipe'
+ ]
+ node_A = self._pipe_break_history.loc[breaking_node_name, 'Node_A'] # noqa: N806
+ node_B = self._pipe_break_history.loc[breaking_node_name, 'Node_B'] # noqa: N806
+
return pipe_A, pipe_B, orginal_pipe, node_A, node_B
-
- def getCertainBreakData(self, damage_node_name, wn):
+
+ def getCertainBreakData(self, damage_node_name, wn): # noqa: C901, N802, D102
pipe_name_list = []
-
- result_pipe_A=None
- result_pipe_B=None
-
- node_A = self._pipe_break_history.loc[damage_node_name, 'Node_A']
- node_B = self._pipe_break_history.loc[damage_node_name, 'Node_B']
-
+
+ result_pipe_A = None # noqa: N806
+ result_pipe_B = None # noqa: N806
+
+ node_A = self._pipe_break_history.loc[damage_node_name, 'Node_A'] # noqa: N806
+ node_B = self._pipe_break_history.loc[damage_node_name, 'Node_B'] # noqa: N806
+
orginal_pipe = self._pipe_break_history.loc[damage_node_name, 'Orginal_pipe']
-
- refined_data = self._pipe_leak_history[self._pipe_leak_history['Orginal_pipe']==orginal_pipe]
-
- for damage_point_name, data in refined_data.iterrows():
- pipe_A = data['Pipe_A']
- pipe_B = data['Pipe_B']
-
+
+ refined_data = self._pipe_leak_history[
+ self._pipe_leak_history['Orginal_pipe'] == orginal_pipe
+ ]
+
+ for damage_point_name, data in refined_data.iterrows(): # noqa: B007
+ pipe_A = data['Pipe_A'] # noqa: N806
+ pipe_B = data['Pipe_B'] # noqa: N806
+
if pipe_A not in pipe_name_list:
pipe_name_list.append(pipe_A)
if pipe_B not in pipe_name_list:
pipe_name_list.append(pipe_B)
-
- #orginal_pipe = self._pipe_break_history.loc[damage_node_name, 'Orginal_pipe']
- refined_data = self._pipe_break_history[self._pipe_break_history['Orginal_pipe']==orginal_pipe]
-
- for damage_point_name, data in refined_data.iterrows():
- pipe_A = data['Pipe_A']
- pipe_B = data['Pipe_B']
-
+
+ # orginal_pipe = self._pipe_break_history.loc[damage_node_name, 'Orginal_pipe']
+ refined_data = self._pipe_break_history[
+ self._pipe_break_history['Orginal_pipe'] == orginal_pipe
+ ]
+
+ for damage_point_name, data in refined_data.iterrows(): # noqa: B007
+ pipe_A = data['Pipe_A'] # noqa: N806
+ pipe_B = data['Pipe_B'] # noqa: N806
+
if pipe_A not in pipe_name_list:
pipe_name_list.append(pipe_A)
if pipe_B not in pipe_name_list:
pipe_name_list.append(pipe_B)
-
- for pipe_name in pipe_name_list:
+ for pipe_name in pipe_name_list:
try:
pipe = wn.get_link(pipe_name)
- except:
+ except: # noqa: S112, E722
continue
-
if node_B == pipe.start_node_name:
- result_pipe_B=pipe_name
+ result_pipe_B = pipe_name # noqa: N806
elif node_A == pipe.end_node_name:
- result_pipe_A=pipe_name
-
- if result_pipe_A !=None and result_pipe_B !=None:
+ result_pipe_A = pipe_name # noqa: N806
+
+ if result_pipe_A != None and result_pipe_B != None: # noqa: E711
return result_pipe_A, result_pipe_B, node_A, node_B
- raise RuntimeError('There must be a pair of pipes for '+repr(damage_node_name))
+ raise RuntimeError(
+ 'There must be a pair of pipes for ' + repr(damage_node_name)
+ )
- def getPipeDamageAttribute(self, attribute_name, damage_node_name=None):
-
+ def getPipeDamageAttribute(self, attribute_name, damage_node_name=None): # noqa: N802, D102
if attribute_name not in self._pipe_damage_table.columns:
- raise ValueError('Attribute not in damage table: '+str(attribute_name))
-
- if damage_node_name==None:
+ raise ValueError('Attribute not in damage table: ' + str(attribute_name))
+
+ if damage_node_name == None: # noqa: E711
return self._pipe_damage_table[attribute_name]
- else:
+ else: # noqa: RET505
return self._pipe_damage_table.loc[damage_node_name, attribute_name]
- def getDamageData(self, element_type, iCopy=True):
+ def getDamageData(self, element_type, iCopy=True): # noqa: FBT002, C901, N802, N803, D102
if element_type.upper() == 'PIPE':
if iCopy:
res = self._pipe_damage_table.copy()
@@ -610,53 +860,53 @@ def getDamageData(self, element_type, iCopy=True):
res = self._node_damage_table.copy()
else:
res = self._node_damage_table
-
+
elif element_type.upper() == 'GNODE':
if iCopy:
res = self._gnode_damage_table.copy()
else:
res = self._gnode_damage_table
-
+
elif element_type.upper() == 'TANK':
if iCopy:
res = self._tank_damage_table.copy()
else:
res = self._tank_damage_table
-
+
elif element_type.upper() == 'PUMP':
if iCopy:
res = self._pump_damage_table.copy()
else:
res = self._pump_damage_table
-
+
elif element_type.upper() == 'RESERVOIR':
if iCopy:
res = self._reservoir_damage_table.copy()
else:
res = self._reservoir_damage_table
-
+
else:
raise ValueError('Unknown element type: ' + element_type)
return res
-
- def getOrginalElement(self, damaged_node_name, element_type):
+
+ def getOrginalElement(self, damaged_node_name, element_type): # noqa: N802, D102
element_damage_data = self.getDamageData(element_type, iCopy=False)
return element_damage_data.loc[damaged_node_name, 'Orginal_element']
-
- def getPipeData(self, attr, name=None):
- if name != None:
+
+ def getPipeData(self, attr, name=None): # noqa: N802, D102
+ if name != None: # noqa: E711
return self._pipe_data[attr].loc[name]
- else:
+ else: # noqa: RET505
return self._pipe_data[attr]
-
- def setDamageData(self, element,col, value):
+
+ def setDamageData(self, element, col, value): # noqa: N802, D102
if element.upper() == 'PIPE':
if col not in self._pipe_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
+ raise ValueError('Columns is not in damage table: ' + col)
self._pipe_damage_table[col] = value
elif element.upper() == 'DISTNODE':
if col not in self._node_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
+ raise ValueError('Columns is not in damage table: ' + col)
self._node_damage_table[col] = value
elif element.upper() == 'GNODE':
self._gnode_damage_table[col] = value
@@ -667,189 +917,243 @@ def setDamageData(self, element,col, value):
elif element.upper() == 'RESERVOIR':
self._reservoir_damage_table[col] = value
else:
- raise ValueError('Element is not defined: '+ element)
-
- def setDamageDataByRowAndColumn(self, element, index, col, value, iCheck=False):
- #if element.upper() == 'PIPE':
+ raise ValueError('Element is not defined: ' + element)
+
+ def setDamageDataByRowAndColumn(self, element, index, col, value, iCheck=False): # noqa: FBT002, N802, N803, D102
+ # if element.upper() == 'PIPE':
damage_table = self.getDamageData(element, iCopy=False)
if col not in damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
- if type(index)==list:
+ raise ValueError('Columns is not in damage table: ' + col)
+ if type(index) == list or ( # noqa: E721
+ (index in damage_table.index and col in damage_table.columns)
+ or iCheck == True # noqa: E712
+ ):
damage_table.loc[index, col] = value
else:
- if (index in damage_table.index and col in damage_table.columns) or iCheck==True:
- damage_table.loc[index, col] = value
- else:
- raise ValueError(index)
-
-
- def setDamageDataByList(self, element, index_list, col, value, iCheck=False):
-
- if type(index_list)!= list:
- raise ValueError('index_list is not data type list')
-
+ raise ValueError(index)
+
+ def setDamageDataByList(self, element, index_list, col, value, iCheck=False): # noqa: FBT002, C901, N802, N803, D102
+ if type(index_list) != list: # noqa: E721
+ raise ValueError('index_list is not data type list') # noqa: EM101, TRY003
+
if element.upper() == 'PIPE':
if col not in self._pipe_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
-
+ raise ValueError('Columns is not in damage table: ' + col)
+
for damage_node_name in index_list:
- if damage_node_name in self._pipe_damage_table.index or iCheck==True:
+ if (
+ damage_node_name in self._pipe_damage_table.index
+ or iCheck == True # noqa: E712
+ ):
self._pipe_damage_table.loc[damage_node_name, col] = value
else:
raise ValueError(damage_node_name)
-
+
elif element.upper() == 'DISTNODE':
if col not in self._node_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
+ raise ValueError('Columns is not in damage table: ' + col)
-
for damage_node_name in index_list:
- if damage_node_name in self._node_damage_table.index or iCheck==True:
+ if (
+ damage_node_name in self._node_damage_table.index
+ or iCheck == True # noqa: E712
+ ):
self._node_damage_table.loc[damage_node_name, col] = value
else:
raise ValueError(damage_node_name)
-
+
elif element.upper() == 'GNODE':
if col not in self._gnode_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
-
+ raise ValueError('Columns is not in damage table: ' + col)
+
for gnode_name in index_list:
- if gnode_name in self._gnode_damage_table.index or iCheck==True:
+ if gnode_name in self._gnode_damage_table.index or iCheck == True: # noqa: E712
self._gnode_damage_table.loc[gnode_name, col] = value
else:
raise ValueError(gnode_name)
-
+
elif element.upper() == 'TANK':
if col not in self._tank_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
-
+ raise ValueError('Columns is not in damage table: ' + col)
+
for _tank_damage_table in index_list:
- if _tank_damage_table in self._tank_damage_table.index or iCheck==True:
+ if (
+ _tank_damage_table in self._tank_damage_table.index
+ or iCheck == True # noqa: E712
+ ):
self._tank_damage_table.loc[_tank_damage_table, col] = value
else:
raise ValueError(_tank_damage_table)
-
+
elif element.upper() == 'PUMP':
if col not in self._pump_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
-
+ raise ValueError('Columns is not in damage table: ' + col)
+
for _pump_damage_table in index_list:
- if _pump_damage_table in self._pump_damage_table.index or iCheck==True:
+ if (
+ _pump_damage_table in self._pump_damage_table.index
+ or iCheck == True # noqa: E712
+ ):
self._pump_damage_table.loc[_pump_damage_table, col] = value
else:
raise ValueError(_pump_damage_table)
-
+
elif element.upper() == 'RESERVOIR':
if col not in self._reservoir_damage_table.columns:
- raise ValueError('Columns is not in damage table: '+ col)
-
+ raise ValueError('Columns is not in damage table: ' + col)
+
for _reservoir_damage_table in index_list:
- if _reservoir_damage_table in self._reservoir_damage_table.index or iCheck==True:
- self._reservoir_damage_table.loc[_reservoir_damage_table, col] = value
+ if (
+ _reservoir_damage_table in self._reservoir_damage_table.index
+ or iCheck == True # noqa: E712
+ ):
+ self._reservoir_damage_table.loc[
+ _reservoir_damage_table, col
+ ] = value
else:
raise ValueError(_reservoir_damage_table)
else:
- raise ValueError('Element is not defined: '+ element)
-
-
+ raise ValueError('Element is not defined: ' + element)
-
- def updatePipeDamageTableTimeSeries(self, time):
+ def updatePipeDamageTableTimeSeries(self, time): # noqa: N802, D102
if time in self._pipe_damage_table_time_series:
- raise ValueError('Time exist in pipe damage table time history')
-
- self._pipe_damage_table_time_series[time]=self._pipe_damage_table.copy()
-
- def updateNodeDamageTableTimeSeries(self, time):
+ raise ValueError('Time exist in pipe damage table time history') # noqa: EM101, TRY003
+
+ self._pipe_damage_table_time_series[time] = self._pipe_damage_table.copy()
+
+ def updateNodeDamageTableTimeSeries(self, time): # noqa: N802, D102
if time in self._node_damage_table_time_series:
- raise ValueError('Time exist in node damage table time history')
-
+ raise ValueError('Time exist in node damage table time history') # noqa: EM101, TRY003
+
self._node_damage_table_time_series[time] = self._node_damage_table.copy()
-
-
- def updateTankTimeSeries(self, wn, time):
-
+
+ def updateTankTimeSeries(self, wn, time): # noqa: N802, D102
if time in self._tank_level_time_series:
- raise ValueError('Time exist in tank damage table time history')
-
+ raise ValueError('Time exist in tank damage table time history') # noqa: EM101, TRY003
+
tank_name_list = wn.tank_name_list
tank_level_res = pd.Series(index=tank_name_list)
-
+
for tank_name in wn.tank_name_list:
node = wn.get_node(tank_name)
- net_water_level = node.level-node.min_level
- if net_water_level<0.001:
- raise ValueError('Net Water Level in tank cannot be less than zero:'+repr(tank_name)+' '+repr(net_water_level))
- tank_level_res.loc[tank_name]=net_water_level
-
- self._tank_level_time_series[time]=tank_level_res
-
- def updateRestorationIncomeWaterTimeSeries(self, wn, time):
-
+ net_water_level = node.level - node.min_level
+ if net_water_level < 0.001: # noqa: PLR2004
+ raise ValueError(
+ 'Net Water Level in tank cannot be less than zero:'
+ + repr(tank_name)
+ + ' '
+ + repr(net_water_level)
+ )
+ tank_level_res.loc[tank_name] = net_water_level
+
+ self._tank_level_time_series[time] = tank_level_res
+
+ def updateRestorationIncomeWaterTimeSeries(self, wn, time): # noqa: ARG002, N802, D102
if time in self._restoration_reservoir_name_time_series:
- raise ValueError('Time exist in restoration reservoir damage table time history')
+ raise ValueError( # noqa: TRY003
+ 'Time exist in restoration reservoir damage table time history' # noqa: EM101
+ )
res = []
for list_of_restoration in self._record_registry:
for key, value in list_of_restoration.items():
- if key=='ADDED_RESERVOIR':
+ if key == 'ADDED_RESERVOIR':
res.append(value)
-
- self._restoration_reservoir_name_time_series[time]=res
-
-
- def updateElementDamageTable(self, element, attr, index, value, icheck=False):
-
+
+ self._restoration_reservoir_name_time_series[time] = res
+
+ def updateElementDamageTable(self, element, attr, index, value, icheck=False): # noqa: FBT002, C901, N802, D102
if element == 'PIPE':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._pipe_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set")
-
+ raise ValueError('the value is already set') # noqa: EM101, TRY003
+
self._pipe_damage_table.loc[index, attr] = value
-
+
elif element == 'DISTNODE':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._node_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set in element: "+element+', attr: '+attr+', index: '+index+', value: '+value)
-
+ raise ValueError(
+ 'the value is already set in element: '
+ + element
+ + ', attr: '
+ + attr
+ + ', index: '
+ + index
+ + ', value: '
+ + value
+ )
+
self._node_damage_table.loc[index, attr] = value
-
-
+
elif element == 'GNODE':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._gnode_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set in element: "+element+', attr: '+attr+', index: '+index+', value: '+value)
-
+ raise ValueError(
+ 'the value is already set in element: '
+ + element
+ + ', attr: '
+ + attr
+ + ', index: '
+ + index
+ + ', value: '
+ + value
+ )
+
self._gnode_damage_table.loc[index, attr] = value
-
+
elif element == 'TANK':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._tank_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set in element: "+element+', attr: '+attr+', index: '+index+', value: '+value)
-
+ raise ValueError(
+ 'the value is already set in element: '
+ + element
+ + ', attr: '
+ + attr
+ + ', index: '
+ + index
+ + ', value: '
+ + value
+ )
+
self._tank_damage_table.loc[index, attr] = value
-
+
elif element == 'PUMP':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._pump_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set in element: "+element+', attr: '+attr+', index: '+index+', value: '+value)
-
+ raise ValueError(
+ 'the value is already set in element: '
+ + element
+ + ', attr: '
+ + attr
+ + ', index: '
+ + index
+ + ', value: '
+ + value
+ )
+
self._pump_damage_table.loc[index, attr] = value
-
+
elif element == 'RESERVOIR':
- if icheck == True:
+ if icheck == True: # noqa: E712
if self._reservoir_damage_table[attr].loc[index] == value:
- raise ValueError("the value is already set in element: "+element+', attr: '+attr+', index: '+index+', value: '+value)
-
+ raise ValueError(
+ 'the value is already set in element: '
+ + element
+ + ', attr: '
+ + attr
+ + ', index: '
+ + index
+ + ', value: '
+ + value
+ )
+
self._reservoir_damage_table.loc[index, attr] = value
-
-
+
else:
- raise ValueError('Unknown element: ' + element)
-
-
-
- def addAttrToElementDamageTable(self, element, attr, def_data):
- if element == 'PIPE':
+ raise ValueError('Unknown element: ' + element)
+
+ def addAttrToElementDamageTable(self, element, attr, def_data): # noqa: N802, D102
+ if element == 'PIPE':
self.addAttrToPipeDamageTable(attr, def_data)
elif element == 'DISTNODE':
self.addAttrToDistNodeDamageTable(attr, def_data)
@@ -861,67 +1165,66 @@ def addAttrToElementDamageTable(self, element, attr, def_data):
self.addAttrToPumpDamageTable(attr, def_data)
elif element == 'RESERVOIR':
self.addAttrToReservoirDamageTable(attr, def_data)
-
+
else:
raise ValueError('Undefined element: ' + element)
-
- def addAttrToPipeDamageTable(self, attr, def_data):
+
+ def addAttrToPipeDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._pipe_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._pipe_damage_table[attr] = np.nan
else:
self._pipe_damage_table[attr] = def_data
-
- def addAttrToDistNodeDamageTable(self, attr, def_data):
+
+ def addAttrToDistNodeDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._node_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._node_damage_table[attr] = np.nan
else:
self._node_damage_table[attr] = def_data
-
- def addAttrToGeneralNodeDamageTable(self, attr, def_data):
+
+ def addAttrToGeneralNodeDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._gnode_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._gnode_damage_table[attr] = np.nan
else:
self._gnode_damage_table[attr] = def_data
-
- def addAttrToTankDamageTable(self, attr, def_data):
+
+ def addAttrToTankDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._tank_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._tank_damage_table[attr] = np.nan
else:
self._tank_damage_table[attr] = def_data
-
- def addAttrToPumpDamageTable(self, attr, def_data):
+
+ def addAttrToPumpDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._pump_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._pump_damage_table[attr] = np.nan
else:
self._pump_damage_table[attr] = def_data
-
- def addAttrToReservoirDamageTable(self, attr, def_data):
+
+ def addAttrToReservoirDamageTable(self, attr, def_data): # noqa: N802, D102
if attr in self._reservoir_damage_table.columns:
- raise ValueError("attribute already in the damage table")
-
- if def_data == None:
+ raise ValueError('attribute already in the damage table') # noqa: EM101, TRY003
+
+ if def_data == None: # noqa: E711
self._reservoir_damage_table[attr] = np.nan
else:
self._reservoir_damage_table[attr] = def_data
-
- def iOccupied(self, node_name):
- """
- checks if the node is occuoied
+
+ def iOccupied(self, node_name): # noqa: N802
+ """Checks if the node is occuoied
Parameters
----------
@@ -933,36 +1236,33 @@ def iOccupied(self, node_name):
bool
result.
- """
- return (node_name in self._occupancy.index)
-
- def _getDamagedPipesRegistry(self):
- """
- Gets the whole damage registry. Not safe to be used outside the class.
+ """ # noqa: D400, D401
+ return node_name in self._occupancy.index
+
+ def _getDamagedPipesRegistry(self): # noqa: N802
+ """Gets the whole damage registry. Not safe to be used outside the class.
Returns
-------
Pandas.Series
damage locations by node name.
- """
+ """ # noqa: D401
return self._pipe_node_damage_status
-
- def getNumberofDamagedNodes(self):
- """
- Gets numbers of Damaged locations. Counts two for broken pipes
+
+ def getNumberofDamagedNodes(self): # noqa: N802
+ """Gets numbers of Damaged locations. Counts two for broken pipes
Returns
-------
Int
Number of damaged locations by node name.
- """
+ """ # noqa: D400, D401
return len(self._pipe_node_damage_status)
-
- def occupyNode(self, node_name, occupier_name):
- """
- Put adds node and its occupier in occupency list
+
+ def occupyNode(self, node_name, occupier_name): # noqa: N802
+ """Put adds node and its occupier in occupency list
Parameters
----------
@@ -980,15 +1280,18 @@ def occupyNode(self, node_name, occupier_name):
-------
None.
- """
+ """ # noqa: D400
if occupier_name in self._occupancy:
- #if not iNodeCoupled(node_name):
- raise ValueError('Occupier name already in the list. Forget to remove another occupancy or double adding?')
- self._occupancy = self._occupancy.append(pd.Series(data=occupier_name, index=[node_name]))
-
- def removeOccupancy(self, occupier_name):
- """
- Removes occupency in the node by occupier's name.
+ # if not iNodeCoupled(node_name):
+ raise ValueError( # noqa: TRY003
+ 'Occupier name already in the list. Forget to remove another occupancy or double adding?' # noqa: EM101
+ )
+ self._occupancy = self._occupancy.append(
+ pd.Series(data=occupier_name, index=[node_name])
+ )
+
+ def removeOccupancy(self, occupier_name): # noqa: N802
+ """Removes occupency in the node by occupier's name.
Parameters
----------
@@ -1004,18 +1307,17 @@ def removeOccupancy(self, occupier_name):
-------
None.
- """
- temp = self._occupancy[self._occupancy==occupier_name]
-
- if len(temp)==0:
- raise ValueError("there is no node occupied with thsi occupier name")
-
+ """ # noqa: D401
+ temp = self._occupancy[self._occupancy == occupier_name]
+
+ if len(temp) == 0:
+ raise ValueError('there is no node occupied with this occupier name') # noqa: EM101, TRY003
+
ind = temp.index.tolist()
self._occupancy = self._occupancy.drop(ind)
-
- def whoOccupiesIn(self, node_name):
- """
- Gets name of the occupier
+
+ def whoOccupiesIn(self, node_name): # noqa: N802
+ """Gets name of the occupier
Parameters
----------
@@ -1027,12 +1329,11 @@ def whoOccupiesIn(self, node_name):
string
Occupier's name.
- """
+ """ # noqa: D400, D401
return self._occupancy[node_name]
-
- def whereIsOccupiedByName(self, occupier_name):
- """
- Get's node(s) occupied by occupier
+
+ def whereIsOccupiedByName(self, occupier_name): # noqa: N802
+ """Gets node(s) occupied by occupier
Parameters
----------
@@ -1049,30 +1350,30 @@ def whereIsOccupiedByName(self, occupier_name):
str or series
node(s) ID.
- """
- temp = self._occupancy[self._occupancy == occupier_name]
- if len(temp)==0:
- raise ValueError('there is no occupancy with this name')
+ """ # noqa: D400, D401
+ temp = self._occupancy[self._occupancy == occupier_name]
+ if len(temp) == 0:
+ raise ValueError('there is no occupancy with this name') # noqa: EM101, TRY003
+ def getListofFreeRepairAgents(self): # noqa: N802
+ """MAYBE NOT NEEDED Gets a list of free agents. Not needed anymore.
- def getListofFreeRepairAgents(self):
- """
- MAYBE NOT NEEDED Gets a list of free agents. Not needed anymore.
Returns
-------
Free_RepairAgents : TYPE
DESCRIPTION.
"""
- working_RepairAgents = set(self._occupancy.tolist())
- RepairAgentsNameList = self._pipe_RepairAgentNameRegistry
- Free_RepairAgents = [name for name in RepairAgentsNameList if name not in working_RepairAgents]
- return Free_RepairAgents
-
- def coupleTwoBreakNodes(self, break_point_1_name, break_point_2_name):
- """
- Couples two nodes in registry for the time which we have a break.
- PLEASE NOTE THAT THE FIRST NODE MUST BE TEH ONE CONNECTED TO THE
+ working_RepairAgents = set(self._occupancy.tolist()) # noqa: N806
+ RepairAgentsNameList = self._pipe_RepairAgentNameRegistry # noqa: N806
+ Free_RepairAgents = [ # noqa: N806
+ name for name in RepairAgentsNameList if name not in working_RepairAgents
+ ]
+ return Free_RepairAgents # noqa: RET504
+
+ def coupleTwoBreakNodes(self, break_point_1_name, break_point_2_name): # noqa: N802
+ """Couples two nodes in registry for the time which we have a break.
+ PLEASE NOTE THAT THE FIRST NODE MUST BE THE ONE CONNECTED TO THE
MAIN(ORIGINAL) PIPE THAT IS BROKEN NOW.
Parameters
@@ -1086,17 +1387,15 @@ def coupleTwoBreakNodes(self, break_point_1_name, break_point_2_name):
-------
None.
- """
-
+ """ # noqa: D205
self._pipe_break_node_coupling[break_point_1_name] = break_point_2_name
self._pipe_break_node_coupling[break_point_2_name] = break_point_1_name
self._break_point_attached_to_mainPipe.append(break_point_1_name)
-
- def getCoupledBreakNode(self, break_point_name):
- """
- Gets the coupled node given the first coupled node, and checks if the
- given coupled node is connected to the main pipe.
-
+
+ def getCoupledBreakNode(self, break_point_name): # noqa: N802
+ """Gets the coupled node given the first coupled node, and checks if the
+ given coupled node is connected to the main pipe.
+
Parameters
----------
break_point_name : str
@@ -1107,36 +1406,36 @@ def getCoupledBreakNode(self, break_point_name):
out1 : str
the other coupled node name
is_breakPoint_1_attacjedToMainPipe : bool
- If teh given (first node) is the one coonected to the main(orginal)
+ If the given (first node) is the one connected to the main(original)
pipe
- """
-
+ """ # noqa: D205, D401
out1 = self._pipe_break_node_coupling[break_point_name]
- is_breakPoint_1_attacjedToMainPipe = break_point_name in self._break_point_attached_to_mainPipe
+ is_breakPoint_1_attacjedToMainPipe = ( # noqa: N806
+ break_point_name in self._break_point_attached_to_mainPipe
+ )
return out1, is_breakPoint_1_attacjedToMainPipe
-
- def iNodeCoupled(self, node_name):
+
+ def iNodeCoupled(self, node_name): # noqa: N802, D102
return node_name in self._pipe_break_node_coupling
-
- def iDamagedPipeReminded(self):
+
+ def iDamagedPipeReminded(self): # noqa: N802, D102
damaged_nodes = self._pipe_node_damage_status.index
- if len(damaged_nodes)==0:
+ if len(damaged_nodes) == 0:
return False
is_reminded = False
for node_name in iter(damaged_nodes):
- if not node_name in self._occupancy.index:
+ if node_name not in self._occupancy.index:
is_reminded = True
- return is_reminded
+ return is_reminded # noqa: RET504
return is_reminded
-
- def getOtherCoupledBreakPoint(self, node_name):
+
+ def getOtherCoupledBreakPoint(self, node_name): # noqa: N802, D102
return self._pipe_break_node_coupling[node_name]
-
- def removeCoupledBreakNodes(self, break_point_name):
- """
- Removes tghe coupled
-
+
+ def removeCoupledBreakNodes(self, break_point_name): # noqa: N802
+ """Removes the coupled
+
Parameters
----------
break_point_name : str
@@ -1149,37 +1448,42 @@ def removeCoupledBreakNodes(self, break_point_name):
second : str
Name of second node(connected to the pipe created after break)
- """
- other_coupled_break_point = self._pipe_break_node_coupling.pop(break_point_name)
+ """ # noqa: D400, D401
+ other_coupled_break_point = self._pipe_break_node_coupling.pop(
+ break_point_name
+ )
self._pipe_break_node_coupling.pop(other_coupled_break_point)
- #self._break_node_coupling.pop(break_point_name)
-
+ # self._break_node_coupling.pop(break_point_name)
+
i_in_list = break_point_name in self._break_point_attached_to_mainPipe
if i_in_list:
self._break_point_attached_to_mainPipe.remove(break_point_name)
- first= break_point_name
+ first = break_point_name
second = other_coupled_break_point
else:
- first= other_coupled_break_point
+ first = other_coupled_break_point
second = break_point_name
return first, second
-
- def recordPipeDamageTable(self, stop_time):
- if self.settings['result_details']== 'minimal':
- return
+
+ def recordPipeDamageTable(self, stop_time): # noqa: N802, D102
+ if self.settings['result_details'] == 'minimal':
+ return None
if stop_time in self._pipe_damage_table_history:
- return ValueError("Time exists in pipe damage hostry: " + str(stop_time))
- self._pipe_damage_table_history['stop_time'] = self._pipe_damage_table_history
-
- def getMostLeakAtCheck(self, real_node_name_list, element_type):
- if element_type == "DISTNODE":
- total_demand = self._node_damage_table.loc[real_node_name_list, 'Demand2']
- total_demand.loc[total_demand[total_demand.isna()].index ] = 0
+ return ValueError('Time exists in pipe damage hostry: ' + str(stop_time))
+ self._pipe_damage_table_history['stop_time'] = ( # noqa: RET503
+ self._pipe_damage_table_history
+ )
+
+ def getMostLeakAtCheck(self, real_node_name_list, element_type): # noqa: N802, D102
+ if element_type == 'DISTNODE':
+ total_demand = self._node_damage_table.loc[
+ real_node_name_list, 'Demand2'
+ ]
+ total_demand.loc[total_demand[total_demand.isna()].index] = 0
return total_demand
- elif element_type == "PIPE":
+ elif element_type == 'PIPE': # noqa: RET505
leak = self._pipe_damage_table.loc[real_node_name_list, 'LeakAtCheck']
- leak.loc[leak[leak.isna()].index ] = 0
+ leak.loc[leak[leak.isna()].index] = 0
return leak
else:
return None
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET/restoration/restorationlog.py b/modules/systemPerformance/REWET/REWET/restoration/restorationlog.py
index ab1a69197..d25eb7a00 100644
--- a/modules/systemPerformance/REWET/REWET/restoration/restorationlog.py
+++ b/modules/systemPerformance/REWET/REWET/restoration/restorationlog.py
@@ -1,81 +1,178 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Sun Jan 31 21:54:19 2021
+"""Created on Sun Jan 31 21:54:19 2021
@author: snaeimi
-"""
+""" # noqa: CPY001, D400, INP001
from collections import OrderedDict
+
import pandas as pd
-class RestorationLog():
+
+class RestorationLog: # noqa: D101
def __init__(self, settings):
self.settings = settings
- self._agent_state_log_book = pd.DataFrame(columns=['Time','Name', 'Type','Lable','Action','EFN','MN','Location','X','Y'])
- self._agent_action_log_book = pd.DataFrame(columns=['Agent', 'Node', 'Entity', 'Action', 'Time', 'End_time', 'Travel_time', 'effect_definition_name', 'method_name', 'iFinished'])
+ self._agent_state_log_book = pd.DataFrame(
+ columns=[
+ 'Time',
+ 'Name',
+ 'Type',
+ 'Lable',
+ 'Action',
+ 'EFN',
+ 'MN',
+ 'Location',
+ 'X',
+ 'Y',
+ ]
+ )
+ self._agent_action_log_book = pd.DataFrame(
+ columns=[
+ 'Agent',
+ 'Node',
+ 'Entity',
+ 'Action',
+ 'Time',
+ 'End_time',
+ 'Travel_time',
+ 'effect_definition_name',
+ 'method_name',
+ 'iFinished',
+ ]
+ )
self.crew_history = OrderedDict()
-
- def updateAgentHistory(self, agent_table, time):
- if self.settings["record_restoration_agent_logs"] == False:
+
+ def updateAgentHistory(self, agent_table, time): # noqa: N802, D102
+ if self.settings['record_restoration_agent_logs'] == False: # noqa: E712
return
-
+
self.crew_history[time] = agent_table.copy()
-
- def updateAgentLogBook(self, agent_table, time):
- if self.settings["record_restoration_agent_logs"] == False:
+
+ def updateAgentLogBook(self, agent_table, time): # noqa: N802, D102
+ if self.settings['record_restoration_agent_logs'] == False: # noqa: E712
return
-
- for agent_name, line in agent_table.iterrows():
+
+ for agent_name, line in agent_table.iterrows(): # noqa: B007
temp = None
- if line['active']==True and line['ready']==False:
- data = line['data']
- _x = data.current_location.coord.x
- _y = data.current_location.coord.y
- _name = data.name
- _type = data.agent_type
- _lable = data.cur_job_entity
+ if line['active'] == True and line['ready'] == False: # noqa: E712
+ data = line['data']
+ _x = data.current_location.coord.x
+ _y = data.current_location.coord.y
+ _name = data.name
+ _type = data.agent_type
+ _lable = data.cur_job_entity
_action = data.cur_job_action
- _EFN = data.cur_job_effect_definition_name
- _MN = data.cur_job_method_name
- _loc = data.cur_job_location
-
- temp = pd.Series(data=[int(time), _name, _type, _lable, _action, _EFN, _MN, _loc, _x, _y], index=['Time','Name', 'Type','Lable','Action','EFN','MN', 'Location','X','Y'] )
-
- #if temp != None:
- self._agent_state_log_book=self._agent_state_log_book.append(temp, ignore_index=True)
-
- def addAgentActionToLogBook(self, agent_name, node_name, entity, action, time, end_time, travel_time, effect_definition_name ,method_name, iFinished=True):
- if self.settings["record_restoration_agent_logs"] == False:
+ _EFN = data.cur_job_effect_definition_name # noqa: N806
+ _MN = data.cur_job_method_name # noqa: N806
+ _loc = data.cur_job_location
+
+ temp = pd.Series(
+ data=[
+ int(time),
+ _name,
+ _type,
+ _lable,
+ _action,
+ _EFN,
+ _MN,
+ _loc,
+ _x,
+ _y,
+ ],
+ index=[
+ 'Time',
+ 'Name',
+ 'Type',
+ 'Lable',
+ 'Action',
+ 'EFN',
+ 'MN',
+ 'Location',
+ 'X',
+ 'Y',
+ ],
+ )
+
+ # if temp != None:
+ self._agent_state_log_book = self._agent_state_log_book.append(
+ temp, ignore_index=True
+ )
+
+ def addAgentActionToLogBook( # noqa: N802, D102
+ self,
+ agent_name,
+ node_name,
+ entity,
+ action,
+ time,
+ end_time,
+ travel_time,
+ effect_definition_name,
+ method_name,
+ iFinished=True, # noqa: FBT002, N803
+ ):
+ if self.settings['record_restoration_agent_logs'] == False: # noqa: E712
return
-
- temp = pd.Series(data=[agent_name, node_name, entity, action, time, end_time, travel_time, effect_definition_name, method_name, iFinished], index=['Agent', 'Node', 'Entity', 'Action', 'Time', 'End_time', 'Travel_time', 'effect_definition_name', 'method_name', 'iFinished'])
- self._agent_action_log_book = self._agent_action_log_book.append(temp, ignore_index=True)
-
- def addEndTimegentActionToLogBook(self, agent_name, time, modified_end_time):
- if self.settings["record_restoration_agent_logs"] == False:
+
+ temp = pd.Series(
+ data=[
+ agent_name,
+ node_name,
+ entity,
+ action,
+ time,
+ end_time,
+ travel_time,
+ effect_definition_name,
+ method_name,
+ iFinished,
+ ],
+ index=[
+ 'Agent',
+ 'Node',
+ 'Entity',
+ 'Action',
+ 'Time',
+ 'End_time',
+ 'Travel_time',
+ 'effect_definition_name',
+ 'method_name',
+ 'iFinished',
+ ],
+ )
+ self._agent_action_log_book = self._agent_action_log_book.append(
+ temp, ignore_index=True
+ )
+
+ def addEndTimegentActionToLogBook(self, agent_name, time, modified_end_time): # noqa: N802, D102
+ if self.settings['record_restoration_agent_logs'] == False: # noqa: E712
return
-
- temp = self._agent_action_log_book[['Agent','Time']]==[agent_name, time]
+
+ temp = self._agent_action_log_book[['Agent', 'Time']] == [agent_name, time]
temp = self._agent_action_log_book[temp.all(1)]
-
- if len(temp)>1:
- raise ValueError('There are too many agents record with the same time and name')
-
- elif len(temp)==0:
- raise ValueError('There is not agent agent record with this time and name')
-
- ind=temp.index
-
+
+ if len(temp) > 1:
+ raise ValueError( # noqa: TRY003
+ 'There are too many agents record with the same time and name' # noqa: EM101
+ )
+
+ elif len(temp) == 0: # noqa: RET506
+ raise ValueError( # noqa: TRY003
+ 'There is not agent agent record with this time and name' # noqa: EM101
+ )
+
+ ind = temp.index
+
self._agent_action_log_book.loc[ind, 'Modified_end_time'] = modified_end_time
-
+
+
# =============================================================================
# def getAgentActioLogBookat(self, time, end_time=True):
# res=None
-#
+#
# if end_time==True:
# res=self._agent_action_log_book[self._agent_action_log_book['Modified_end_time']==time]
# else:
# res=self._agent_action_log_book[self._agent_action_log_book['Time']==time]
-#
+#
# return res
# =============================================================================
diff --git a/modules/systemPerformance/REWET/REWET/timeline.py b/modules/systemPerformance/REWET/REWET/timeline.py
index 623f280fc..964977fa1 100644
--- a/modules/systemPerformance/REWET/REWET/timeline.py
+++ b/modules/systemPerformance/REWET/REWET/timeline.py
@@ -1,105 +1,133 @@
-# -*- coding: utf-8 -*-
-"""
-Created on Sat Dec 26 02:00:40 2020
+"""Created on Sat Dec 26 02:00:40 2020
@author: snaeimi
-"""
+""" # noqa: CPY001, D400
-import pandas as pd
-import numpy
import logging
+import numpy # noqa: ICN001
+import pandas as pd
+
logger = logging.getLogger(__name__)
-EVENT_TYPE=['dmg','rpr','rst'] #event types are defined here
-class Timeline():
-
-# =============================================================================
-# This classs has many functions that can make a lot of exceptions.
-# We need to modify their codes, so their usage be safe and bug-free.
-# =============================================================================
+EVENT_TYPE = ['dmg', 'rpr', 'rst'] # event types are defined here
+
+
+class Timeline: # noqa: D101
+ # =============================================================================
+ # This class has many functions that can make a lot of exceptions.
+ # We need to modify their codes, so their usage be safe and bug-free.
+ # =============================================================================
def __init__(self, simulation_end_time, restoration, registry):
- if simulation_end_time<0:
- raise ValueError('simulation end time must be zero or bigger than zero')
+ if simulation_end_time < 0:
+ raise ValueError('simulation end time must be zero or bigger than zero') # noqa: EM101, TRY003
self._current_time = 0
- self._event_time_register = pd.DataFrame(dtype = 'bool') #craete event at time 0 with No event marked as True
- #print(type(self._event_time_register))
- self._event_time_register.loc[0, EVENT_TYPE] = [False for i in range(len(EVENT_TYPE))] #create event at time 0 with No event marked as True
- self._event_time_register.loc[simulation_end_time, EVENT_TYPE] = [False for i in range(len(EVENT_TYPE))] #create event at time simulation end time with No event marked as True
- self.restoration = restoration
+ self._event_time_register = pd.DataFrame(
+ dtype='bool'
+ ) # create event at time 0 with No event marked as True
+ # print(type(self._event_time_register))
+ self._event_time_register.loc[0, EVENT_TYPE] = [
+ False for i in range(len(EVENT_TYPE))
+ ] # create event at time 0 with No event marked as True
+ self._event_time_register.loc[simulation_end_time, EVENT_TYPE] = [
+ False for i in range(len(EVENT_TYPE))
+ ] # create event at time simulation end time with No event marked as True
+ self.restoration = restoration
self._simulation_end_time = simulation_end_time
- self._ending_Event_ignore_time = 0 # in seconds - events in less than this value is ignored
+ self._ending_Event_ignore_time = (
+ 0 # in seconds - events in less than this value is ignored
+ )
self._iFirst_time_zero = True
self._current_time_indexOfIndex = 0
self.registry = registry
-
- def iContinue(self):
- if self._current_time==0 and self._iFirst_time_zero == True: #So that the other condition happens
+
+ def iContinue(self): # noqa: N802, D102
+ if (
+ self._current_time == 0 and self._iFirst_time_zero == True # noqa: E712
+ ): # So that the other condition happens
self._iFirst_time_zero = False
-
+
else:
- self._current_time = self.getNextTime()
- self._current_time_indexOfIndex += 1
- if abs(self._simulation_end_time - self._current_time) <= abs(self._ending_Event_ignore_time):
- print("End_Time_Reached")
+ self._current_time = self.getNextTime()
+ self._current_time_indexOfIndex += 1
+ if abs(self._simulation_end_time - self._current_time) <= abs(
+ self._ending_Event_ignore_time
+ ):
+ print('End_Time_Reached') # noqa: T201
return False
-
- simulation_minimum_time = self.restoration._registry.settings["minimum_simulation_time"]
- minimum_simulation_time_satisfied = self._current_time >= simulation_minimum_time
- consider_last_sequence_termination = self.registry.settings.process['last_sequence_termination']
- consider_node_demand_temination = self.registry.settings.process['node_demand_temination']
-
- if minimum_simulation_time_satisfied == True:
- if consider_last_sequence_termination == True:
+
+ simulation_minimum_time = self.restoration._registry.settings[ # noqa: SLF001
+ 'minimum_simulation_time'
+ ]
+ minimum_simulation_time_satisfied = (
+ self._current_time >= simulation_minimum_time
+ )
+ consider_last_sequence_termination = self.registry.settings.process[
+ 'last_sequence_termination'
+ ]
+ consider_node_demand_temination = self.registry.settings.process[
+ 'node_demand_temination'
+ ]
+
+ if minimum_simulation_time_satisfied == True: # noqa: E712
+ if consider_last_sequence_termination == True: # noqa: E712
if self.restoration.iRestorationStopTime():
- print("Last_sequence_termination")
+ print('Last_sequence_termination') # noqa: T201
return False
-
- if consider_node_demand_temination == True:
+
+ if consider_node_demand_temination == True: # noqa: E712
if self.iFunctionalityRequirementReached():
- print("FunctionalityRequirementReached")
+ print('FunctionalityRequirementReached') # noqa: T201
return False
-
+
return True
-
- def getNextTime(self):
- if not self._event_time_register.index.is_monotonic_increasing: # for just in case if the index of event time register is not sorted
+
+ def getNextTime(self): # noqa: N802, D102
+ if (
+ not self._event_time_register.index.is_monotonic_increasing
+ ): # for just in case if the index of event time register is not sorted
self._event_time_register.sort_index()
-
- if self._event_time_register.index[self._current_time_indexOfIndex]\
- !=self._current_time:
- raise RuntimeError('A possible violation of time in timeline event variables and/or event time registry')
- next_time = self._event_time_register.index[self._current_time_indexOfIndex+1]
- return next_time
-
- def getCurrentStopTime(self):
+
+ if (
+ self._event_time_register.index[self._current_time_indexOfIndex]
+ != self._current_time
+ ):
+ raise RuntimeError( # noqa: TRY003
+ 'A possible violation of time in timeline event variables and/or event time registry' # noqa: EM101
+ )
+ next_time = self._event_time_register.index[
+ self._current_time_indexOfIndex + 1
+ ]
+ return next_time # noqa: RET504
+
+ def getCurrentStopTime(self): # noqa: N802, D102
return int(self._current_time)
-
- def iCurrentTimeRepairEvent(self):
+
+ def iCurrentTimeRepairEvent(self): # noqa: N802, D102
return self._event_time_register['rpr'].loc[self._current_time]
-
- def iCurenttimeRestorationEvent(self):
- print("current_time is= "+str(self._current_time) )
- print(self._event_time_register['rst'].loc[self._current_time])
+
+ def iCurenttimeRestorationEvent(self): # noqa: N802, D102
+ print('current_time is= ' + str(self._current_time)) # noqa: T201
+ print(self._event_time_register['rst'].loc[self._current_time]) # noqa: T201
return self._event_time_register['rst'].loc[self._current_time]
-
- def iCurrentTimeDamageEvent(self):
+
+ def iCurrentTimeDamageEvent(self): # noqa: N802, D102
return self._event_time_register['dmg'].loc[self._current_time]
-
- def addEventTime(self, event_distinct_time, event_type='dmg'):
- """
- This function is a low-level function to add event type in an already-
- existing event_time in event_time_register. FOR NOW TEH DISTICT TIMES
- CAN BE A LIST OR A LIST. MAYBE IN THE FUTURE WE CAN DECIDE WETHER IT
+
+ def addEventTime(self, event_distinct_time, event_type='dmg'): # noqa: N802
+ """This function is a low-level function to add event type in an already-
+ existing event_time in event_time_register. FOR NOW THE DISTINCT TIMES
+ CAN BE A LIST OR A LIST. MAYBE IN THE FUTURE WE CAN DECIDE WEATHER IT
SHOULD BE LEFT THE WAY IT IS OR IT SHOULD BE MODIFIED IN A SINGLE
VARIABLE OR LIST VARIABLE.
+
Parameters
----------
event_distinct_time : numpy.float64 or int or float or list
This variable is either a list or a seriest of data to represent
time of an specified event
-
+
event_type : str, optional
Evenet type. FOR CURRENT VERSSION AN EVENT COULD BE EIOTHER
dmg(damage) or rpr(repair). The default is 'dmg'.
@@ -115,45 +143,53 @@ def addEventTime(self, event_distinct_time, event_type='dmg'):
-------
None.
- """
- if type(event_distinct_time)!=pd.core.series.Series:
- if type(event_distinct_time) == numpy.float64 or type(event_distinct_time) == int or type(event_distinct_time) == float or type(event_distinct_time) == list:
- event_distinct_time = pd.Series(data=event_distinct_time, dtype="int64")
+ """ # noqa: D205, D401, D404
+ if type(event_distinct_time) != pd.core.series.Series: # noqa: E721
+ if (
+ type(event_distinct_time) == numpy.float64 # noqa: E721
+ or type(event_distinct_time) == int # noqa: E721
+ or type(event_distinct_time) == float # noqa: E721
+ or type(event_distinct_time) == list # noqa: E721
+ ):
+ event_distinct_time = pd.Series(
+ data=event_distinct_time, dtype='int64'
+ )
else:
- print(type(event_distinct_time))
- raise ValueError('event_distinct_time must be pandas.Series type')
-
+ print(type(event_distinct_time)) # noqa: T201
+ raise ValueError('event_distinct_time must be pandas.Series type') # noqa: EM101, TRY003
+
if event_type not in EVENT_TYPE:
- raise ValueError('unrecognized value for event_type')
-
- #check for duplicate in time index. if there is duplicate, we will only change the true and false value in the DataFrame
+ raise ValueError('unrecognized value for event_type') # noqa: EM101, TRY003
+
+ # check for duplicate in time index. if there is duplicate, we will only change the true and false value in the DataFrame
temp_to_pop = []
- logger.debug("event distinct time "+ repr(event_distinct_time))
-
- for i, i_time in event_distinct_time.items():
+ logger.debug('event distinct time ' + repr(event_distinct_time)) # noqa: G003
+
+ for i, i_time in event_distinct_time.items(): # noqa: B007, PERF102
if i_time in self._event_time_register.index:
- self._event_time_register.loc[i_time, event_type]=True
+ self._event_time_register.loc[i_time, event_type] = True
self.checkAndAmendTime()
temp_to_pop.append(i_time)
- logger.debug('temp_to_pop'+repr(temp_to_pop))
-
+ logger.debug('temp_to_pop' + repr(temp_to_pop)) # noqa: G003
+
for i_time in temp_to_pop:
- ind = event_distinct_time[event_distinct_time==i_time].index[0]
+ ind = event_distinct_time[event_distinct_time == i_time].index[0]
event_distinct_time.pop(ind)
-
+
if len(event_distinct_time) != 0:
- for i, i_time in event_distinct_time.items():
- self._event_time_register.loc[i_time, EVENT_TYPE] = [False for i in range(len(EVENT_TYPE))]
+ for i, i_time in event_distinct_time.items(): # noqa: PERF102
+ self._event_time_register.loc[i_time, EVENT_TYPE] = [
+ False for i in range(len(EVENT_TYPE))
+ ]
self._event_time_register.loc[i_time, event_type] = True
self._event_time_register = self._event_time_register.sort_index()
self.checkAndAmendTime()
-
- def iEventTypeAt(self, begin_time, event_type):
- """
- Checks if an event type is in event registry at the time of begin_time
+
+ def iEventTypeAt(self, begin_time, event_type): # noqa: N802
+ """Checks if an event type is in event registry at the time of begin_time
----------
begin_time : int
- begining time
+ beginning time
event_type : str
damage type
@@ -162,20 +198,18 @@ def iEventTypeAt(self, begin_time, event_type):
bool
rResult if such data exist or not
- """
-
- if not begin_time in self._event_time_register.index:
+ """ # noqa: D205, D400, D401
+ if begin_time not in self._event_time_register.index:
return False
- if self._event_time_register[event_type].loc[begin_time]:
+ if self._event_time_register[event_type].loc[begin_time]: # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
-
- def checkAndAmendTime(self):
- """
- Checks if the time of event is higher than the sim time.Also checks
- if the the ending event has any thing event(nothings must be true).
-
+
+ def checkAndAmendTime(self): # noqa: N802
+ """Checks if the time of event is higher than the sim time.Also checks
+ if the the ending event has any thing event(nothings must be true).
+
Parameters
----------
None.
@@ -184,108 +218,157 @@ def checkAndAmendTime(self):
-------
None.
- """
-
+ """ # noqa: D205, D401
first_length = len(self._event_time_register.index)
- self._event_time_register = self._event_time_register[self._event_time_register.index <= self._simulation_end_time]
+ self._event_time_register = self._event_time_register[
+ self._event_time_register.index <= self._simulation_end_time
+ ]
if first_length > len(self._event_time_register):
- print("here was " + repr(first_length - len(self._event_time_register)) + "amended")
-
- # Sina: I removed teh following code at the tiem for immegration to
- #Pandas 1.5.2. Not only it is not efficient piece of code, but also
- #this nto required. The end time event is already made when teh event
- #table is created.
- #if self._event_time_register[self._event_time_register.index==self._simulation_end_time].empty==True:
- #self._event_time_register=self._event_time_register.append(pd.DataFrame(data = False , index = [self._simulation_end_time], columns = EVENT_TYPE))
-
- def iFunctionalityRequirementReached(self):
-
- logger.debug("Func: node functionality")
- ratio_criteria = self.registry.settings.process['node_demand_termination_ratio']
- time_window = self.registry.settings.process.settings['node_demand_termination_time' ]
- stop_time = self.getCurrentStopTime()
- if self.registry.if_first_event_occured == False:
+ print( # noqa: T201
+ 'here was '
+ + repr(first_length - len(self._event_time_register))
+ + 'amended'
+ )
+
+ # Sina: I removed the following code at the time for immigration to
+ # Pandas 1.5.2. Not only it is not efficient piece of code, but also
+ # this not required. The end time event is already made when the event
+ # table is created.
+ # if self._event_time_register[self._event_time_register.index==self._simulation_end_time].empty==True:
+ # self._event_time_register=self._event_time_register.append(pd.DataFrame(data = False , index = [self._simulation_end_time], columns = EVENT_TYPE))
+
+ def iFunctionalityRequirementReached(self): # noqa: C901, N802, D102
+ logger.debug('Func: node functionality')
+ ratio_criteria = self.registry.settings.process[
+ 'node_demand_termination_ratio'
+ ]
+ time_window = self.registry.settings.process.settings[
+ 'node_demand_termination_time'
+ ]
+ stop_time = self.getCurrentStopTime()
+ if self.registry.if_first_event_occured == False: # noqa: RET503, E712
return False
- elif self.registry.if_first_event_occured == True:
- if self.registry.result == None:
+ elif self.registry.if_first_event_occured == True: # noqa: RET505, E712
+ if self.registry.result == None: # noqa: E711
return False
-
- #for checking if we have still any leak in the system, since we
- #cannot measure the effect of exessive leak in the LDN
+
+ # for checking if we have still any leak in the system, since we
+ # cannot measure the effect of exessive leak in the LDN
if stop_time in self.registry.result.node['leak'].index:
return False
-
- last_pre_event_time = self.registry.pre_event_demand_met.index.max()
- pre_event_demand = self.registry.pre_event_demand_met[self.registry.pre_event_demand_met.index <= (last_pre_event_time - time_window)]
- demand_met = self.registry.result.node['demand']
- begining_time_window = stop_time - time_window
- demand_met = demand_met.loc[demand_met.index > begining_time_window]
-
+
+ last_pre_event_time = self.registry.pre_event_demand_met.index.max()
+ pre_event_demand = self.registry.pre_event_demand_met[
+ self.registry.pre_event_demand_met.index
+ <= (last_pre_event_time - time_window)
+ ]
+ demand_met = self.registry.result.node['demand']
+ begining_time_window = stop_time - time_window
+ demand_met = demand_met.loc[demand_met.index > begining_time_window]
+
"""
- calcualting requried demand for each dmeand node
+ calculating required demand for each demand node
"""
-
- #demand_ratio = self.registry.settings['demand_ratio']
- time_index = demand_met.index
- req_node_demand = {}
- default_pattern = self.registry.wn.options.hydraulic.pattern
- #node_pattern_list = pd.Series(index=self.registry.demand_node_name_list, dtype=str)
-
- #req_node_demand = req_node_demand.transpose()
-
- demand_nodes_list = [self.registry.wn.get_node(node_name) for node_name in self.registry.demand_node_name_list]
-
- if type(default_pattern) != type(None):
- node_pattern_list = [(node.name, node.demand_timeseries_list.pattern_list()[0]) if node.demand_timeseries_list.pattern_list()[0] != None else (node.name, default_pattern) for node in demand_nodes_list]
+
+ # demand_ratio = self.registry.settings['demand_ratio']
+ time_index = demand_met.index
+ req_node_demand = {}
+ default_pattern = self.registry.wn.options.hydraulic.pattern
+ # node_pattern_list = pd.Series(index=self.registry.demand_node_name_list, dtype=str)
+
+ # req_node_demand = req_node_demand.transpose()
+
+ demand_nodes_list = [
+ self.registry.wn.get_node(node_name)
+ for node_name in self.registry.demand_node_name_list
+ ]
+
+ if default_pattern is not None:
+ node_pattern_list = [
+ (node.name, node.demand_timeseries_list.pattern_list()[0])
+ if node.demand_timeseries_list.pattern_list()[0] != None # noqa: E711
+ else (node.name, default_pattern)
+ for node in demand_nodes_list
+ ]
else:
- node_pattern_list = [(node.name, node.demand_timeseries_list.pattern_list()[0]) for node in demand_nodes_list if node.demand_timeseries_list.pattern_list()[0] != None]
-
- base_demand_list = [ node.base_demand for node in demand_nodes_list]
- one_time_base_demand = dict(zip(self.registry.demand_node_name_list, base_demand_list))
- req_node_demand = pd.DataFrame.from_dict([one_time_base_demand] * len(time_index))
+ node_pattern_list = [
+ (node.name, node.demand_timeseries_list.pattern_list()[0])
+ for node in demand_nodes_list
+ if node.demand_timeseries_list.pattern_list()[0] != None # noqa: E711
+ ]
+
+ base_demand_list = [node.base_demand for node in demand_nodes_list]
+ one_time_base_demand = dict(
+ zip(self.registry.demand_node_name_list, base_demand_list)
+ )
+ req_node_demand = pd.DataFrame.from_dict(
+ [one_time_base_demand] * len(time_index)
+ )
req_node_demand.index = time_index
-
req_node_demand = pd.DataFrame.from_dict(req_node_demand)
- #node_pattern_list = node_pattern_list.dropna()
+ # node_pattern_list = node_pattern_list.dropna()
if len(node_pattern_list) > 0:
- node_pattern_list = pd.Series(index=[node_pattern_iter[0] for node_pattern_iter in node_pattern_list], data=[node_pattern_iter[1] for node_pattern_iter in node_pattern_list])
- patterns_list = node_pattern_list.unique()
- multiplier = pd.DataFrame(index=time_index, columns = list(patterns_list) )
-
+ node_pattern_list = pd.Series(
+ index=[
+ node_pattern_iter[0]
+ for node_pattern_iter in node_pattern_list
+ ],
+ data=[
+ node_pattern_iter[1]
+ for node_pattern_iter in node_pattern_list
+ ],
+ )
+ patterns_list = node_pattern_list.unique()
+ multiplier = pd.DataFrame(
+ index=time_index, columns=list(patterns_list)
+ )
+
for pattern_name in patterns_list:
cur_pattern = self.registry.wn.get_pattern(pattern_name)
time_index = time_index.unique()
- cur_patern_time = [cur_pattern.at(time) for time in iter(time_index)]
+ cur_patern_time = [
+ cur_pattern.at(time) for time in iter(time_index)
+ ]
multiplier.loc[:, pattern_name] = cur_patern_time
-
+
for node_name, pattern_name in node_pattern_list.items():
- cur_node_req_demand = multiplier[pattern_name] * self.registry.wn.get_node(node_name).demand_timeseries_list[0].base_value
+ cur_node_req_demand = (
+ multiplier[pattern_name]
+ * self.registry.wn.get_node(node_name)
+ .demand_timeseries_list[0]
+ .base_value
+ )
cur_node_req_demand.name = node_name
- cur_node_req_demand=pd.DataFrame(cur_node_req_demand).transpose()
- req_node_demand = pd.concat([req_node_demand, cur_node_req_demand])
-
- #print(req_node_demand)
- #raise
- #req_node_demand = req_node_demand.transpose()
- req_node_demand = req_node_demand.filter(self.registry.demand_node_name_list)
- req_node_demand = req_node_demand.filter(pre_event_demand.columns)
- demand_met = demand_met.filter(self.registry.demand_node_name_list)
- demand_met = demand_met.filter(pre_event_demand.columns)
- demand_met = demand_met.dropna(axis=1)
-
+ cur_node_req_demand = pd.DataFrame(
+ cur_node_req_demand
+ ).transpose()
+ req_node_demand = pd.concat(
+ [req_node_demand, cur_node_req_demand]
+ )
+
+ # print(req_node_demand)
+ # raise
+ # req_node_demand = req_node_demand.transpose()
+ req_node_demand = req_node_demand.filter(
+ self.registry.demand_node_name_list
+ )
+ req_node_demand = req_node_demand.filter(pre_event_demand.columns)
+ demand_met = demand_met.filter(self.registry.demand_node_name_list)
+ demand_met = demand_met.filter(pre_event_demand.columns)
+ demand_met = demand_met.dropna(axis=1)
+
pre_event_demand = demand_met.filter(self.registry.demand_node_name_list)
-
+
if len(demand_met.columns) < len(pre_event_demand.columns):
return False
-
+
ratio = demand_met.mean() / pre_event_demand.mean()
mean_of_ratio_satisfied = (ratio >= ratio_criteria).sum() / len(ratio)
- logger.debug("ratio that is= " + repr(mean_of_ratio_satisfied))
- if (ratio >= ratio_criteria).all():
+ logger.debug('ratio that is= ' + repr(mean_of_ratio_satisfied)) # noqa: G003
+ if (ratio >= ratio_criteria).all(): # noqa: SIM103
return True
- else:
+ else: # noqa: RET505
return False
-
\ No newline at end of file
diff --git a/modules/systemPerformance/REWET/REWET_Wrapper.py b/modules/systemPerformance/REWET/REWET_Wrapper.py
index 82d0d4f1f..67e5b1d99 100644
--- a/modules/systemPerformance/REWET/REWET_Wrapper.py
+++ b/modules/systemPerformance/REWET/REWET_Wrapper.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
# Copyright (c) 2024 Leland Stanford Junior University
#
@@ -38,75 +37,71 @@
# Sina Naeimi
# Jinyan Zhao
-import os
import argparse
-import random
-import string
import importlib
import json
-from pathlib import Path
-import pandas as pd
-import subprocess
-from shapely import geometry
+import os
+import random
+import string
import sys
-
+from pathlib import Path
import damage_convertor
+import pandas as pd
import preprocessorIO
+from shapely import geometry
-#try:
- #import REWET
- #print("Imported")
-#except:
- # This is only for now
-#print("HERE")
-this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve()
-#main_dir = this_dir.parent
+# try:
+# import REWET
+# print("Imported")
+# except:
+# This is only for now
+# print("HERE")
+this_dir = Path(os.path.dirname(os.path.abspath(__file__))).resolve() # noqa: PTH100, PTH120
+# main_dir = this_dir.parent
-import wntrfr
sys.path.insert(0, str(this_dir / 'REWET'))
-import Input.Settings as Settings
-from Result_Project import Project_Result
-
-from initial import Starter
-
-def createScnearioList(run_directory, scn_number):
-
- damage_input_dir = os.path.join(run_directory, "Results", "WaterDistributionNetwork",
- "damage_input")
-
- if not os.path.exists(damage_input_dir):
- os.makedirs(damage_input_dir)
-
+from initial import Starter # noqa: E402
+from Result_Project import Project_Result # noqa: E402
+
+
+def createScnearioList(run_directory, scn_number): # noqa: N802, D103
+ damage_input_dir = os.path.join( # noqa: PTH118
+ run_directory, 'Results', 'WaterDistributionNetwork', 'damage_input'
+ )
+
+ if not os.path.exists(damage_input_dir): # noqa: PTH110
+ os.makedirs(damage_input_dir) # noqa: PTH103
+
# REWET_input_data["damage_input_dir"] = damage_input_dir
-
+
prefix = chooseARandomPreefix(damage_input_dir)
-
- scenario_name = f"{prefix}_scn_{scn_number}"
- pipe_file_name = f"{prefix}_pipe_{scn_number}"
- node_file_name = f"{prefix}_node_{scn_number}"
- pump_file_name = f"{prefix}_pump_{scn_number}"
- tank_file_name = f"{prefix}_tank_{scn_number}"
-
-
- scenario = {"Scenario Name": scenario_name,
- "Pipe Damage": pipe_file_name,
- "Nodal Damage": node_file_name,
- "Pump Damage": pump_file_name,
- "Tank Damage": tank_file_name,
- "Probability": 1
- }
-
- scenario_list = scenario #pd.DataFrame([scenario]).set_index("Scenario Name")
-
- #REWET_input_data["scenario_list"] = scenario_list
-
+
+ scenario_name = f'{prefix}_scn_{scn_number}'
+ pipe_file_name = f'{prefix}_pipe_{scn_number}'
+ node_file_name = f'{prefix}_node_{scn_number}'
+ pump_file_name = f'{prefix}_pump_{scn_number}'
+ tank_file_name = f'{prefix}_tank_{scn_number}'
+
+ scenario = {
+ 'Scenario Name': scenario_name,
+ 'Pipe Damage': pipe_file_name,
+ 'Nodal Damage': node_file_name,
+ 'Pump Damage': pump_file_name,
+ 'Tank Damage': tank_file_name,
+ 'Probability': 1,
+ }
+
+ scenario_list = scenario # pd.DataFrame([scenario]).set_index("Scenario Name")
+
+ # REWET_input_data["scenario_list"] = scenario_list
+
return scenario_list, prefix
-
-def chooseARandomPreefix(damage_input_dir):
- """
- Choses a random prefix for sceranio and pipe, node, pump and tank damage
+
+
+def chooseARandomPreefix(damage_input_dir): # noqa: N802
+ """Choses a random prefix for sceranio and pipe, node, pump and tank damage
file. The is important to find and unused prefix so if this script is being
ran in parallel, then files are not being overwritten.
@@ -120,30 +115,29 @@ def chooseARandomPreefix(damage_input_dir):
random_prefix : str
The Chosen random prefix string.
- """
-
+ """ # noqa: D205
number_of_prefix = 4
dir_list = os.listdir(damage_input_dir)
-
- prefix_dir_list = [dir_name for dir_name in dir_list if dir_name.find("_") > 0]
- prefix_list = [dir_name.split("_")[0] for dir_name in prefix_dir_list]
-
+
+ prefix_dir_list = [dir_name for dir_name in dir_list if dir_name.find('_') > 0]
+ prefix_list = [dir_name.split('_')[0] for dir_name in prefix_dir_list]
+
random_prefix = random.choices(string.ascii_letters, k=number_of_prefix)
- s = ""
+ s = ''
for letter in random_prefix:
- s = s + letter
+ s = s + letter # noqa: PLR6104
random_prefix = s
-
+
while random_prefix in prefix_list:
random_prefix = random.choices(string.ascii_letters, k=number_of_prefix)
- s = ""
+ s = ''
for letter in random_prefix:
- s = s + letter
+ s = s + letter # noqa: PLR6104
random_prefix = s
return random_prefix
-#def setSettingsData(rwhale_data, REWET_input_data):
+ # def setSettingsData(rwhale_data, REWET_input_data):
"""
Sets the settings (future project file) for REWET. REWET input data
dictionary is both used as a source and destination for settinsg data. The
@@ -161,12 +155,11 @@ def chooseARandomPreefix(damage_input_dir):
-------
None.
- """
+ """ # noqa: RET503, W291
-def getDLFileName(run_dir, dl_file_path, scn_number):
- """
- If dl_file_path is not given, the path is acquired from rwhale input data.
+def getDLFileName(run_dir, dl_file_path, scn_number): # noqa: N802
+ """If dl_file_path is not given, the path is acquired from rwhale input data.
Parameters
----------
@@ -182,109 +175,224 @@ def getDLFileName(run_dir, dl_file_path, scn_number):
None.
"""
- if dl_file_path == None:
-
- file_name = f"WaterDistributionNetwork_{scn_number}.json"
- run_dir = run_dir
- file_dir = os.path.join(run_dir, "Results", "WaterDistributionNetwork")
- file_path = os.path.join(file_dir, file_name)
+ if dl_file_path == None: # noqa: E711
+ file_name = f'WaterDistributionNetwork_{scn_number}.json'
+ run_dir = run_dir # noqa: PLW0127
+ file_dir = os.path.join(run_dir, 'Results', 'WaterDistributionNetwork') # noqa: PTH118
+ file_path = os.path.join(file_dir, file_name) # noqa: PTH118
else:
file_path = dl_file_path
file_dir = Path(dl_file_path).parent
-
+
return file_path, file_dir
-def setSettingsData(input_json, REWET_input_data):
- policy_file_name = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Policy Definition"]
- policy_file_path = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Policy DefinitionPath"]
-
- policy_config_file = os.path.join(Path(policy_file_path), Path(policy_file_name) )
-
- REWET_input_data["settings"]["RUN_TIME" ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["simulationTime"]
- REWET_input_data["settings"]["simulation_time_step" ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["simulationTimeStep"]
-
- REWET_input_data["settings"]['last_sequence_termination' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["last_sequence_termination"]
- REWET_input_data["settings"]['node_demand_temination' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_demand_temination"]
- REWET_input_data["settings"]['node_demand_termination_time' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_demand_termination_time"]
- REWET_input_data["settings"]['node_demand_termination_ratio'] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_demand_termination_ratio"]
- REWET_input_data["settings"]['solver' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Solver"]
- REWET_input_data["settings"]['Restoration_on' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Restoration_on"]
- REWET_input_data["settings"]['minimum_job_time' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["minimum_job_time"]
- REWET_input_data["settings"]['Restortion_config_file' ] = policy_config_file # TODO: SINA unmark it
-
- p = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["pipe_damage_model"]
- REWET_input_data["settings"]['pipe_damage_model'] = {}
+
+def setSettingsData(input_json, REWET_input_data): # noqa: ARG001, N802, N803, D103
+ policy_file_name = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['Policy Definition']
+ policy_file_path = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['Policy DefinitionPath']
+
+ policy_config_file = os.path.join(Path(policy_file_path), Path(policy_file_name)) # noqa: PTH118
+
+ REWET_input_data['settings']['RUN_TIME'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['simulationTime']
+ REWET_input_data['settings']['simulation_time_step'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['simulationTimeStep']
+
+ REWET_input_data['settings']['last_sequence_termination'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['last_sequence_termination']
+ REWET_input_data['settings']['node_demand_temination'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['node_demand_temination']
+ REWET_input_data['settings']['node_demand_termination_time'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['node_demand_termination_time']
+ REWET_input_data['settings']['node_demand_termination_ratio'] = (
+ rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['node_demand_termination_ratio']
+ )
+ REWET_input_data['settings']['solver'] = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['Solver']
+ REWET_input_data['settings']['Restoration_on'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['Restoration_on']
+ REWET_input_data['settings']['minimum_job_time'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['minimum_job_time']
+ REWET_input_data['settings']['Restortion_config_file'] = (
+ policy_config_file # TODO: SINA unmark it # noqa: TD002
+ )
+
+ p = rwhale_input_Data['SystemPerformance']['WaterDistributionNetwork'][
+ 'pipe_damage_model'
+ ]
+ REWET_input_data['settings']['pipe_damage_model'] = {}
for mat_data in p:
- REWET_input_data["settings"]['pipe_damage_model'][mat_data[0]] = \
- {"alpha":mat_data[1], "beta":mat_data[2], "gamma":mat_data[3],\
- "a":mat_data[4], "b":mat_data[5] }
-
- n = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_damage_model"]
+ REWET_input_data['settings']['pipe_damage_model'][mat_data[0]] = {
+ 'alpha': mat_data[1],
+ 'beta': mat_data[2],
+ 'gamma': mat_data[3],
+ 'a': mat_data[4],
+ 'b': mat_data[5],
+ }
+
+ n = rwhale_input_Data['SystemPerformance']['WaterDistributionNetwork'][
+ 'node_damage_model'
+ ]
n = n[0]
- REWET_input_data["settings"]['node_damage_model'] = {'x':0.9012, 'a':n[0],\
- 'aa':n[1], 'b':n[2], 'bb':n[3], 'c':n[4], 'cc':n[5], 'd':n[6],\
- 'dd':n[7], 'e':n[8], 'ee1':n[9], 'ee2':n[10], 'f':n[11], 'ff1':n[12]\
- , 'ff2':n[13], "damage_node_model": "equal_diameter_emitter"}
-
- if rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Pipe_Leak_Based"]:
- pipe_leak_amount = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["pipe_leak_amount"]
- pipe_leak_time = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["pipe_leak_time"]
- pipe_damage_discovery_mode = {'method': 'leak_based', 'leak_amount': pipe_leak_amount, 'leak_time': pipe_leak_time}
+ REWET_input_data['settings']['node_damage_model'] = {
+ 'x': 0.9012,
+ 'a': n[0],
+ 'aa': n[1],
+ 'b': n[2],
+ 'bb': n[3],
+ 'c': n[4],
+ 'cc': n[5],
+ 'd': n[6],
+ 'dd': n[7],
+ 'e': n[8],
+ 'ee1': n[9],
+ 'ee2': n[10],
+ 'f': n[11],
+ 'ff1': n[12],
+ 'ff2': n[13],
+ 'damage_node_model': 'equal_diameter_emitter',
+ }
+
+ if rwhale_input_Data['SystemPerformance']['WaterDistributionNetwork'][
+ 'Pipe_Leak_Based'
+ ]:
+ pipe_leak_amount = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['pipe_leak_amount']
+ pipe_leak_time = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['pipe_leak_time']
+ pipe_damage_discovery_mode = {
+ 'method': 'leak_based',
+ 'leak_amount': pipe_leak_amount,
+ 'leak_time': pipe_leak_time,
+ }
else:
- pipe_time_discovery_ratio = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["pipe_time_discovery_ratio"]
- pipe_damage_discovery_mode = {'method': 'time_based', 'time_discovery_ratio': pipe_time_discovery_ratio}#pd.Series([line[0] for line in pipe_time_discovery_ratio], index = [line[1] for line in pipe_time_discovery_ratio])}
-
- if rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["Node_Leak_Based"]:
- node_leak_amount = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_leak_amount"]
- node_leak_time = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_leak_time"]
- node_damage_discovery_mode = {'method': 'leak_based', 'leak_amount': node_leak_amount, 'leak_time': node_leak_time}
+ pipe_time_discovery_ratio = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['pipe_time_discovery_ratio']
+ pipe_damage_discovery_mode = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pipe_time_discovery_ratio,
+ } # pd.Series([line[0] for line in pipe_time_discovery_ratio], index = [line[1] for line in pipe_time_discovery_ratio])}
+
+ if rwhale_input_Data['SystemPerformance']['WaterDistributionNetwork'][
+ 'Node_Leak_Based'
+ ]:
+ node_leak_amount = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['node_leak_amount']
+ node_leak_time = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['node_leak_time']
+ node_damage_discovery_mode = {
+ 'method': 'leak_based',
+ 'leak_amount': node_leak_amount,
+ 'leak_time': node_leak_time,
+ }
else:
- node_time_discovery_ratio = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["node_time_discovery_ratio"]
- node_damage_discovery_mode = {'method': 'time_based', 'time_discovery_ratio': node_time_discovery_ratio } # pd.Series([line[0] for line in node_time_discovery_ratio], index = [line[1] for line in node_time_discovery_ratio])}
-
- pump_time_discovery_ratio = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["pump_time_discovery_ratio"]
- tank_time_discovery_ratio = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["tank_time_discovery_ratio"]
- pump_damage_discovery_model = {'method': 'time_based', 'time_discovery_ratio': pump_time_discovery_ratio } # pd.Series([line[0] for line in pump_time_discovery_ratio], index = [line[1] for line in pump_time_discovery_ratio])}
- tank_damage_discovery_model = {'method': 'time_based', 'time_discovery_ratio': tank_time_discovery_ratio } # pd.Series([line[0] for line in tank_time_discovery_ratio], index = [line[1] for line in tank_time_discovery_ratio])}
-
- REWET_input_data["settings"]['pipe_damage_discovery_model'] = pipe_damage_discovery_mode
- REWET_input_data["settings"]['node_damage_discovery_model'] = node_damage_discovery_mode
- REWET_input_data["settings"]['pump_damage_discovery_model'] = pump_damage_discovery_model
- REWET_input_data["settings"]['tank_damage_discovery_model'] = tank_damage_discovery_model
- REWET_input_data["settings"]['minimum_pressure' ] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["minimum_pressure"]
- REWET_input_data["settings"]['required_pressure'] = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["required_pressure"]
-
- ############ Not Supposed to be in R2DTool GUI ############
- REWET_input_data["settings"]["minimum_simulation_time"] = 0 # TODO : HERE #REWET_input_data["event_time"] + REWET_input_data["settings"]["simulation_time_step"]
- REWET_input_data["settings"]["save_time_step" ] = True
- REWET_input_data["settings"]['record_restoration_agent_logs'] = True
- REWET_input_data["settings"]['record_damage_table_logs' ] = True
- REWET_input_data["settings"]["simulation_time_step"] = 3600
- REWET_input_data["settings"]["number_of_proccessor"] = 1
- REWET_input_data["settings"]['demand_ratio' ] = 1
- REWET_input_data["settings"]['dmg_rst_data_save' ] = True
- REWET_input_data["settings"]['Parameter_override' ] = True
- REWET_input_data["settings"]['mpi_resume' ] = True #ignores the scenarios that are done
- REWET_input_data["settings"]['ignore_empty_damage' ] = False
- REWET_input_data["settings"]['result_details' ] = 'extended'
- REWET_input_data["settings"]['negative_node_elmination' ] = True
- REWET_input_data["settings"]['nne_flow_limit' ] = 0.5
- REWET_input_data["settings"]['nne_pressure_limit' ] = -5
- REWET_input_data["settings"]['Virtual_node' ] = True
- REWET_input_data["settings"]['damage_node_model' ] = 'equal_diameter_emitter' #"equal_diameter_reservoir"
- REWET_input_data["settings"]['default_pipe_damage_model' ] = {"alpha":-0.0038, "beta":0.1096, "gamma":0.0196, "a":2, "b":1 }
-
- REWET_input_data["settings"]['limit_result_file_size' ] = -1 #in Mb. 0 means no limit
- REWET_input_data["settings"]['Pipe_damage_input_method' ] = 'pickle'
-
-def create_path(path):
+ node_time_discovery_ratio = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['node_time_discovery_ratio']
+ node_damage_discovery_mode = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': node_time_discovery_ratio,
+ } # pd.Series([line[0] for line in node_time_discovery_ratio], index = [line[1] for line in node_time_discovery_ratio])}
+
+ pump_time_discovery_ratio = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['pump_time_discovery_ratio']
+ tank_time_discovery_ratio = rwhale_input_Data['SystemPerformance'][
+ 'WaterDistributionNetwork'
+ ]['tank_time_discovery_ratio']
+ pump_damage_discovery_model = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': pump_time_discovery_ratio,
+ } # pd.Series([line[0] for line in pump_time_discovery_ratio], index = [line[1] for line in pump_time_discovery_ratio])}
+ tank_damage_discovery_model = {
+ 'method': 'time_based',
+ 'time_discovery_ratio': tank_time_discovery_ratio,
+ } # pd.Series([line[0] for line in tank_time_discovery_ratio], index = [line[1] for line in tank_time_discovery_ratio])}
+
+ REWET_input_data['settings']['pipe_damage_discovery_model'] = (
+ pipe_damage_discovery_mode
+ )
+ REWET_input_data['settings']['node_damage_discovery_model'] = (
+ node_damage_discovery_mode
+ )
+ REWET_input_data['settings']['pump_damage_discovery_model'] = (
+ pump_damage_discovery_model
+ )
+ REWET_input_data['settings']['tank_damage_discovery_model'] = (
+ tank_damage_discovery_model
+ )
+ REWET_input_data['settings']['minimum_pressure'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['minimum_pressure']
+ REWET_input_data['settings']['required_pressure'] = rwhale_input_Data[
+ 'SystemPerformance'
+ ]['WaterDistributionNetwork']['required_pressure']
+
+ # Not Supposed to be in R2DTool GUI ############
+ REWET_input_data['settings']['minimum_simulation_time'] = (
+ 0 # TODO : HERE #REWET_input_data["event_time"] + REWET_input_data["settings"]["simulation_time_step"] # noqa: TD002
+ )
+ REWET_input_data['settings']['save_time_step'] = True
+ REWET_input_data['settings']['record_restoration_agent_logs'] = True
+ REWET_input_data['settings']['record_damage_table_logs'] = True
+ REWET_input_data['settings']['simulation_time_step'] = 3600
+ REWET_input_data['settings']['number_of_proccessor'] = 1
+ REWET_input_data['settings']['demand_ratio'] = 1
+ REWET_input_data['settings']['dmg_rst_data_save'] = True
+ REWET_input_data['settings']['Parameter_override'] = True
+ REWET_input_data['settings']['mpi_resume'] = (
+ True # ignores the scenarios that are done
+ )
+ REWET_input_data['settings']['ignore_empty_damage'] = False
+ REWET_input_data['settings']['result_details'] = 'extended'
+ REWET_input_data['settings']['negative_node_elmination'] = True
+ REWET_input_data['settings']['nne_flow_limit'] = 0.5
+ REWET_input_data['settings']['nne_pressure_limit'] = -5
+ REWET_input_data['settings']['Virtual_node'] = True
+ REWET_input_data['settings']['damage_node_model'] = (
+ 'equal_diameter_emitter' # "equal_diameter_reservoir"
+ )
+ REWET_input_data['settings']['default_pipe_damage_model'] = {
+ 'alpha': -0.0038,
+ 'beta': 0.1096,
+ 'gamma': 0.0196,
+ 'a': 2,
+ 'b': 1,
+ }
+
+ REWET_input_data['settings'][
+ 'limit_result_file_size'
+ ] = -1 # in Mb. 0 means no limit
+ REWET_input_data['settings']['Pipe_damage_input_method'] = 'pickle'
+
+
+def create_path(path): # noqa: D103
if isinstance(path, str):
path = Path(path)
not_existing_hir = []
- while os.path.exists(path) == False:
+ while os.path.exists(path) == False: # noqa: PTH110, E712
not_existing_hir.append(path.name)
path = path.parent
-
+
while len(not_existing_hir):
new_path = path / not_existing_hir[-1]
new_path.mkdir()
@@ -294,324 +402,418 @@ def create_path(path):
if __name__ == '__main__':
# Setting arg parser
- argParser = argparse.ArgumentParser(
- "Preprocess rwhale workflow to REWET input.")
-
- argParser.add_argument("--input", "-i", default="inputRWHALE.json",
- help="rwhale input file json file")
-
- #argParser.add_argument("--damage", "-d",
- #default="water_damage_input_structure.json",
- #help="water damage input json file. If provided, number of realization is ignored if prvided and number of realization is set to 1.")
-
- argParser.add_argument("--dir", "-d",
- help="WDN damage result directory")
-
- argParser.add_argument("--number", "-n",
- default=None,
- help="If specified, indicates realization number, otherwise, all scnearios are run on all CPUS.")
-
- argParser.add_argument("--par", "-p",
- default=False,
- action="store_true",
- help="if speciied, uses all CPUS. 2 or more CPUs are not available, it will revert back to serial run.")
-
+ argParser = argparse.ArgumentParser('Preprocess rwhale workflow to REWET input.') # noqa: N816
+
+ argParser.add_argument(
+ '--input',
+ '-i',
+ default='inputRWHALE.json',
+ help='rwhale input file json file',
+ )
+
+ # argParser.add_argument("--damage", "-d",
+ # default="water_damage_input_structure.json",
+ # help="water damage input json file. If provided, number of realization is ignored if prvided and number of realization is set to 1.")
+
+ argParser.add_argument('--dir', '-d', help='WDN damage result directory')
+
+ argParser.add_argument(
+ '--number',
+ '-n',
+ default=None,
+ help='If specified, indicates realization number, otherwise, all scenarios are run on all CPUS.',
+ )
+
+ argParser.add_argument(
+ '--par',
+ '-p',
+ default=False,
+ action='store_true',
+ help='if specified, uses all CPUS. 2 or more CPUs are not available, it will revert back to serial run.',
+ )
+
parser_data = argParser.parse_args()
-
-
- # learnign about paralell or serial settings
-
- numP = 1
- procID = 0
- doParallel = False
-
- mpi_spec = importlib.util.find_spec("mpi4py")
+ # learning about parallel or serial settings
+
+ numP = 1 # noqa: N816
+ procID = 0 # noqa: N816
+ doParallel = False # noqa: N816
+
+ mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found and argParser.par:
-
- import mpi4py
from mpi4py import MPI
+
comm = MPI.COMM_WORLD
- numP = comm.Get_size()
- procID = comm.Get_rank();
- if numP < 2:
- doParallel = False
- numP = 1
- procID = 0
- print(f"Parallel running is not possible. Numebr of CPUS are are not enough.")
+ numP = comm.Get_size() # noqa: N816
+ procID = comm.Get_rank() # noqa: N816
+ if numP < 2: # noqa: PLR2004
+ doParallel = False # noqa: N816
+ numP = 1 # noqa: N816
+ procID = 0 # noqa: N816
+ print( # noqa: T201
+ 'Parallel running is not possible. Number of CPUS are are not enough.'
+ )
else:
- doParallel = True;
-
+ doParallel = True # noqa: N816
+
# Setting up run settings
-
+
REWET_input_data = {}
- REWET_input_data["settings"] = {}
-
- #print(parser_data.input)
- rwhale_input_Data = preprocessorIO.readJSONFile(parser_data.input)
+ REWET_input_data['settings'] = {}
+
+ # print(parser_data.input)
+ rwhale_input_Data = preprocessorIO.readJSONFile(parser_data.input) # noqa: N816
setSettingsData(rwhale_input_Data, REWET_input_data)
- event_time = rwhale_input_Data["SystemPerformance"]["WaterDistributionNetwork"]["eventTime"]
-
-
- # Set R2D enviroment and parameters
- water_asset_data = rwhale_input_Data["Applications"]\
- ["Assets"]["WaterDistributionNetwork"]
- inp_file_addr = water_asset_data["ApplicationData"]["inpFile"]
+ event_time = rwhale_input_Data['SystemPerformance']['WaterDistributionNetwork'][
+ 'eventTime'
+ ]
+
+ # Set R2D environment and parameters
+ water_asset_data = rwhale_input_Data['Applications']['Assets'][
+ 'WaterDistributionNetwork'
+ ]
+ inp_file_addr = water_asset_data['ApplicationData']['inpFile']
if '{Current_Dir}' in inp_file_addr:
- inp_file_addr = inp_file_addr.replace("{Current_Dir}", ".")
- sc_geojson = rwhale_input_Data["Applications"]["Assets"]\
- ["WaterDistributionNetwork"]["ApplicationData"]["assetSourceFile"]
-
- run_directory = rwhale_input_Data["runDir"]
- number_of_realization = rwhale_input_Data["Applications"]\
- ["DL"]["WaterDistributionNetwork"]["ApplicationData"]["Realizations"]
-
- REWET_input_data["settings"]["result_directory"] = os.path.join(\
- run_directory,"Results","WaterDistributionNetwork", "REWET_Result")
-
- REWET_input_data["settings"]["temp_directory"] = os.path.join(\
- run_directory,"Results", "WaterDistributionNetwork", "REWET_RunFiles")
-
- REWET_input_data["settings"]['WN_INP'] = inp_file_addr
-
- damage_save_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / "damage_input"
+ inp_file_addr = inp_file_addr.replace('{Current_Dir}', '.')
+ sc_geojson = rwhale_input_Data['Applications']['Assets'][
+ 'WaterDistributionNetwork'
+ ]['ApplicationData']['assetSourceFile']
+
+ run_directory = rwhale_input_Data['runDir']
+ number_of_realization = rwhale_input_Data['Applications']['DL'][
+ 'WaterDistributionNetwork'
+ ]['ApplicationData']['Realizations']
+
+ REWET_input_data['settings']['result_directory'] = os.path.join( # noqa: PTH118
+ run_directory, 'Results', 'WaterDistributionNetwork', 'REWET_Result'
+ )
+
+ REWET_input_data['settings']['temp_directory'] = os.path.join( # noqa: PTH118
+ run_directory, 'Results', 'WaterDistributionNetwork', 'REWET_RunFiles'
+ )
+
+ REWET_input_data['settings']['WN_INP'] = inp_file_addr
+
+ damage_save_path = (
+ Path(run_directory) / 'Results' / 'WaterDistributionNetwork' / 'damage_input'
+ )
damage_save_path_hir = damage_save_path
create_path(damage_save_path_hir)
-
-
-
- if parser_data.number == None:
- scneario_list_path = damage_save_path / f"scenario_table.xlsx"
+
+ if parser_data.number == None: # noqa: E711
+ scneario_list_path = damage_save_path / 'scenario_table.xlsx'
else:
- scneario_list_path = damage_save_path / f"scenario_table_{parser_data.number}.xlsx"
-
- REWET_input_data["settings"]["pipe_damage_file_list"] = str(scneario_list_path)
- REWET_input_data["settings"]["pipe_damage_file_directory"] = str(damage_save_path)
-
- # Add Single Scneario or mutiple scenario
+ scneario_list_path = (
+ damage_save_path / f'scenario_table_{parser_data.number}.xlsx'
+ )
+
+ REWET_input_data['settings']['pipe_damage_file_list'] = str(scneario_list_path)
+ REWET_input_data['settings']['pipe_damage_file_directory'] = str(
+ damage_save_path
+ )
+
+ # Add Single Scenario or multiple scenario
Damage_file_name = []
-
+
if doParallel and procID > 0:
pass
else:
settings_json_file_path = preprocessorIO.saveSettingsFile(
- REWET_input_data, damage_save_path, parser_data.number)
-
+ REWET_input_data, damage_save_path, parser_data.number
+ )
+
scenario_table = preprocessorIO.create_scneario_table()
-
- if parser_data.number == None:
- Damage_file_name = list(range(0, number_of_realization))
-
+
+ if parser_data.number == None: # noqa: E711
+ Damage_file_name = list(range(number_of_realization))
+
else:
Damage_file_name.append(parser_data.number)
-
+
damage_save_path = scneario_list_path.parent
for scn_number in Damage_file_name:
- dl_file_path, dl_file_dir = getDLFileName(run_directory, parser_data.dir,
- scn_number)
-
+ dl_file_path, dl_file_dir = getDLFileName(
+ run_directory, parser_data.dir, scn_number
+ )
+
damage_data = damage_convertor.readDamagefile(
- dl_file_path, run_directory, event_time , sc_geojson)
- #damage_save_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / "damage_input"
-
+ dl_file_path, run_directory, event_time, sc_geojson
+ )
+ # damage_save_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / "damage_input"
+
cur_damage_file_name_list = preprocessorIO.save_damage_data(
- damage_save_path, damage_data, scn_number)
-
+ damage_save_path, damage_data, scn_number
+ )
+
scenario_table = preprocessorIO.update_scenario_table(
- scenario_table, cur_damage_file_name_list, scn_number)
-
-
- preprocessorIO.save_scenario_table(scenario_table,
- REWET_input_data["settings"]["pipe_damage_file_list"])
-
- command = "python " + "C:\\Users\\naeim\\Desktop\\REWET\\main.py -j " + str(settings_json_file_path)
- #try:
- #result = subprocess.check_output(command, shell=True, text=True)
- #returncode = 0
- #except subprocess.CalledProcessError as e:
- #result = e.output
- #returncode = e.returncode
-
- #if returncode != 0:
- #print('return code: {}'.format(returncode))
- #if returncode == 0:
- #print("REWET ran Successfully")
-
- create_path(REWET_input_data["settings"]["result_directory"])
- create_path(REWET_input_data["settings"]["temp_directory"])
-
- rewet_log_path = \
- Path(run_directory) /"Results" / "WaterDistributionNetwork" / "rewet_log.txt"
-
+ scenario_table, cur_damage_file_name_list, scn_number
+ )
+
+ preprocessorIO.save_scenario_table(
+ scenario_table, REWET_input_data['settings']['pipe_damage_file_list']
+ )
+
+ command = (
+ 'python ' # noqa: ISC003
+ + 'C:\\Users\\naeim\\Desktop\\REWET\\main.py -j '
+ + str(settings_json_file_path)
+ )
+ # try:
+ # result = subprocess.check_output(command, shell=True, text=True)
+ # returncode = 0
+ # except subprocess.CalledProcessError as e:
+ # result = e.output
+ # returncode = e.returncode
+
+ # if returncode != 0:
+ # print('return code: {}'.format(returncode))
+ # if returncode == 0:
+ # print("REWET ran Successfully")
+
+ create_path(REWET_input_data['settings']['result_directory'])
+ create_path(REWET_input_data['settings']['temp_directory'])
+
+ rewet_log_path = (
+ Path(run_directory)
+ / 'Results'
+ / 'WaterDistributionNetwork'
+ / 'rewet_log.txt'
+ )
+
system_std_out = sys.stdout
- with open(rewet_log_path, "wt") as log_file:
+ with open(rewet_log_path, 'w') as log_file: # noqa: PLW1514, PTH123
sys.stdout = log_file
REWET_starter = Starter()
REWET_starter.run(settings_json_file_path)
-
- p = Project_Result(Path(REWET_input_data["settings"]["result_directory"]) / "project.prj")
-
- # these are the input for result section. They are not include in the
- requested_result = ["DL", "QN"]
- substitute_ft = {"DL":"Delivery", "QN":"Quantity"}
- consistency_time_window = 0 # 7200
- iConsider_leak = False # True
+
+ p = Project_Result(
+ Path(REWET_input_data['settings']['result_directory']) / 'project.prj'
+ )
+
+ # these are the input for result section. They are not include in the
+ requested_result = ['DL', 'QN']
+ substitute_ft = {'DL': 'Delivery', 'QN': 'Quantity'}
+ consistency_time_window = 0 # 7200
+ iConsider_leak = False # True # noqa: N816
# the following does not matter if iConsider_leak is false
- leak_ratio = {"DL":0.75, "QN":0}
-
- sub_asset_list = ["Junction", "Pipe", "Reservoir"]
- sub_asset_name_to_id = dict()
- sub_asset_id_to_name = dict()
+ leak_ratio = {'DL': 0.75, 'QN': 0}
+
+ sub_asset_list = ['Junction', 'Pipe', 'Reservoir']
+ sub_asset_name_to_id = dict() # noqa: C408
+ sub_asset_id_to_name = dict() # noqa: C408
for sub_asset in sub_asset_list:
sc_geojson_file = preprocessorIO.readJSONFile(sc_geojson)
- sub_asset_data = [ss for ss in sc_geojson_file["features"] if ss["properties"]["type"]==sub_asset]
- sub_asset_id = [str(ss["id"]) for ss in sub_asset_data]
- sub_asset_name = [ss["properties"]["InpID"] for ss in sub_asset_data]
- sub_asset_name_to_id.update({sub_asset : dict(zip(sub_asset_name, sub_asset_id))})
- sub_asset_id_to_name.update({sub_asset : dict(zip(sub_asset_id, sub_asset_name))})
-
+ sub_asset_data = [
+ ss
+ for ss in sc_geojson_file['features']
+ if ss['properties']['type'] == sub_asset
+ ]
+ sub_asset_id = [str(ss['id']) for ss in sub_asset_data]
+ sub_asset_name = [ss['properties']['InpID'] for ss in sub_asset_data]
+ sub_asset_name_to_id.update(
+ {sub_asset: dict(zip(sub_asset_name, sub_asset_id))}
+ )
+ sub_asset_id_to_name.update(
+ {sub_asset: dict(zip(sub_asset_id, sub_asset_name))}
+ )
+
res = {}
res_agg = {}
scneario_size = len(p.project.scenario_list.index)
# create a dictionary with keys for each scenario number (in int) and keys for a BSC metric (None)
- temp_realization_in_each_time_series = dict(zip(range(0, scneario_size), [None] * scneario_size))
- # create a dictionary for stroing tiems series, each BSC metric (requested_result) is a key and each key has the dictioanry created in line before
- time_series_result = dict(zip(requested_result, [temp_realization_in_each_time_series]*len(requested_result)))
-
- for scn_name, row in p.project.scenario_list.iterrows():
- realization_number = int(scn_name.strip("SCN_") )
+ temp_realization_in_each_time_series = dict(
+ zip(range(scneario_size), [None] * scneario_size)
+ )
+ # create a dictionary for storing times series, each BSC metric (requested_result) is a key and each key has the dictionary created in line before
+ time_series_result = dict(
+ zip(
+ requested_result,
+ [temp_realization_in_each_time_series] * len(requested_result),
+ )
+ )
+
+ for scn_name, row in p.project.scenario_list.iterrows(): # noqa: B007
+ realization_number = int(scn_name.strip('SCN_'))
for single_requested_result in requested_result:
- if single_requested_result == "DL" or single_requested_result == "QN":
-
- # Running Output module's method to get DL tiem series status
- time_series_result[single_requested_result][realization_number] = p.getBSCIndexPopulation_4(scn_name,
- bsc=single_requested_result,
- iPopulation=False,
- ratio= True,
- consider_leak=False,
- leak_ratio=1)
- time_series_result[single_requested_result][realization_number].index = \
- time_series_result[single_requested_result][realization_number].index / 3600
-
+ if (
+ single_requested_result == 'DL' # noqa: PLR1714
+ or single_requested_result == 'QN'
+ ):
+ # Running Output module's method to get DL time series status
+ time_series_result[single_requested_result][
+ realization_number
+ ] = p.getBSCIndexPopulation_4(
+ scn_name,
+ bsc=single_requested_result,
+ iPopulation=False,
+ ratio=True,
+ consider_leak=False,
+ leak_ratio=1,
+ )
+ time_series_result[single_requested_result][ # noqa: PLR6104
+ realization_number
+ ].index = (
+ time_series_result[single_requested_result][
+ realization_number
+ ].index
+ / 3600
+ )
+
# Running Output module's method to get BSC data for each junction (sum of outage)
res[single_requested_result] = p.getOutageTimeGeoPandas_5(
scn_name,
- bsc=single_requested_result ,
+ bsc=single_requested_result,
iConsider_leak=False,
leak_ratio=leak_ratio,
- consistency_time_window=consistency_time_window, sum_time=True)
- if res_agg.get(single_requested_result, None) is None:
- res_agg[single_requested_result] = res[single_requested_result].to_dict()
- for key in res_agg[single_requested_result].keys():
- res_agg[single_requested_result][key] = \
- [res_agg[single_requested_result][key]]
+ consistency_time_window=consistency_time_window,
+ sum_time=True,
+ )
+ if res_agg.get(single_requested_result) is None:
+ res_agg[single_requested_result] = res[
+ single_requested_result
+ ].to_dict()
+ for key in res_agg[single_requested_result].keys(): # noqa: SIM118
+ res_agg[single_requested_result][key] = [
+ res_agg[single_requested_result][key]
+ ]
else:
- for key in res_agg[single_requested_result].keys():
+ for key in res_agg[single_requested_result].keys(): # noqa: SIM118
res_agg[single_requested_result][key].append(
res[single_requested_result][key]
)
-
- cur_json_file_name = f"WaterDistributionNetwork_{realization_number}.json"
- cur_json_file_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / cur_json_file_name
-
- with open(cur_json_file_path, "rt") as f:
+
+ cur_json_file_name = (
+ f'WaterDistributionNetwork_{realization_number}.json'
+ )
+ cur_json_file_path = (
+ Path(run_directory)
+ / 'Results'
+ / 'WaterDistributionNetwork'
+ / cur_json_file_name
+ )
+
+ with open(cur_json_file_path) as f: # noqa: PLW1514, PTH123
json_data = json.load(f)
-
+
for single_requested_result in requested_result:
req_result = res[single_requested_result]
- result_key = f"{substitute_ft[single_requested_result]}Outage"
-
+ result_key = f'{substitute_ft[single_requested_result]}Outage'
+
# Only Junction needs to be added to rlz json
- junction_json_data = json_data["WaterDistributionNetwork"].get("Junction", {})
-
- for junction_name in req_result.keys():
+ junction_json_data = json_data['WaterDistributionNetwork'].get(
+ 'Junction', {}
+ )
+
+ for junction_name in req_result.keys(): # noqa: SIM118
junction_id = sub_asset_name_to_id['Junction'][junction_name]
cur_junction = junction_json_data.get(junction_id, {})
- cur_junction_SP = cur_junction.get("SystemPerformance", {})
- cur_junction_SP[result_key] = float(req_result[junction_name] )
-
- cur_junction["SystemPerformance"] = cur_junction_SP
+ cur_junction_SP = cur_junction.get('SystemPerformance', {}) # noqa: N816
+ cur_junction_SP[result_key] = float(req_result[junction_name])
+
+ cur_junction['SystemPerformance'] = cur_junction_SP
junction_json_data[junction_id] = cur_junction
-
- json_data["WaterDistributionNetwork"]["Junction"] = junction_json_data
-
-
- with open(cur_json_file_path, "wt") as f:
- json_data = json.dump(json_data, f, indent = 2)
-
- res_agg_mean = dict()
- res_agg_std = dict()
+
+ json_data['WaterDistributionNetwork']['Junction'] = (
+ junction_json_data
+ )
+
+ with open(cur_json_file_path, 'w') as f: # noqa: PLW1514, PTH123
+ json_data = json.dump(json_data, f, indent=2)
+
+ res_agg_mean = dict() # noqa: C408
+ res_agg_std = dict() # noqa: C408
for single_requested_result in requested_result:
- res_agg[single_requested_result] = pd.DataFrame(res_agg[single_requested_result])
- res_agg_mean[single_requested_result] = res_agg[single_requested_result].mean()
- res_agg_std[single_requested_result] = res_agg[single_requested_result].std()
+ res_agg[single_requested_result] = pd.DataFrame(
+ res_agg[single_requested_result]
+ )
+ res_agg_mean[single_requested_result] = res_agg[
+ single_requested_result
+ ].mean()
+ res_agg_std[single_requested_result] = res_agg[
+ single_requested_result
+ ].std()
sys.stdout = system_std_out
# Append junction and reservior general information to WaterDistributionNetwork_det
- det_json_path = cur_json_file_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / "WaterDistributionNetwork_det.json"
+ det_json_path = cur_json_file_path = (
+ Path(run_directory)
+ / 'Results'
+ / 'WaterDistributionNetwork'
+ / 'WaterDistributionNetwork_det.json'
+ )
det_json = preprocessorIO.readJSONFile(det_json_path)
inp_json = preprocessorIO.readJSONFile(sc_geojson)
inp_json = inp_json['features']
- for WDNtype in ['Reservoir', 'Junction']:
- json_to_attach = dict()
+ for WDNtype in ['Reservoir', 'Junction']:
+ json_to_attach = dict() # noqa: C408
for ft in inp_json:
prop = ft['properties']
if prop['type'] == WDNtype:
- id = str(ft['id'])
- generalInfo = dict()
+ id = str(ft['id']) # noqa: A001
+ generalInfo = dict() # noqa: C408, N816
json_geometry = ft['geometry']
shapely_geometry = geometry.shape(json_geometry)
wkt_geometry = shapely_geometry.wkt
- generalInfo.update({'geometry':wkt_geometry})
+ generalInfo.update({'geometry': wkt_geometry})
asset_name = sub_asset_id_to_name[WDNtype][id]
- generalInfo.update({'REWET_id':asset_name})
+ generalInfo.update({'REWET_id': asset_name})
generalInfo.update({'AIM_id': id})
for key, item in prop.items():
if key == 'id':
continue
- generalInfo.update({key:item})
- R2Dres = dict()
+ generalInfo.update({key: item})
+ R2Dres = dict() # noqa: C408
asset_name = sub_asset_id_to_name[WDNtype][id]
for single_requested_result in requested_result:
- if not asset_name in res_agg_mean[single_requested_result].index:
+ if asset_name not in res_agg_mean[single_requested_result].index:
continue
- R2Dres_key_mean = f"R2Dres_mean_{single_requested_result}"
- R2Dres_key_std = f"R2Dres_std_{single_requested_result}"
- R2Dres.update({R2Dres_key_mean:res_agg_mean[single_requested_result][asset_name],
- R2Dres_key_std:res_agg_std[single_requested_result][asset_name]})
+ R2Dres_key_mean = f'R2Dres_mean_{single_requested_result}'
+ R2Dres_key_std = f'R2Dres_std_{single_requested_result}'
+ R2Dres.update(
+ {
+ R2Dres_key_mean: res_agg_mean[single_requested_result][
+ asset_name
+ ],
+ R2Dres_key_std: res_agg_std[single_requested_result][
+ asset_name
+ ],
+ }
+ )
# location = dict()
# location.update({'latitude':ft['geometry']['coordinates'][1],\
# 'longitude':ft['geometry']['coordinates'][0]})
# generalInfo.update({'location':location})
- json_to_attach.update({id:{'GeneralInformation': generalInfo,
- 'R2Dres':R2Dres}})
- det_json['WaterDistributionNetwork'].update({WDNtype:json_to_attach})
- with open(det_json_path ,'w') as f:
+ json_to_attach.update(
+ {id: {'GeneralInformation': generalInfo, 'R2Dres': R2Dres}}
+ )
+ det_json['WaterDistributionNetwork'].update({WDNtype: json_to_attach})
+ with open(det_json_path, 'w') as f: # noqa: PLW1514, PTH123
json.dump(det_json, f, indent=2)
-
- ts_result_json_path = cur_json_file_path = Path(run_directory) / "Results" / "WaterDistributionNetwork" / "WaterDistributionNetwork_timeseries.json"
- time_series_result_struc = {"Type":"TimeSeries",
- "Asset":"WaterDistributionNetwork",
- "Result":{
- "QN":{"Name":"Water Quantity", "Data":{}},
- "DL":{"Name":"Water Delivery", "Data":{}}
- }
- }
-
- for single_requested_result in requested_result:
- for i in range(0, scneario_size):
- time_series_result_struc["Result"]\
- [single_requested_result]["Data"][i] =\
- time_series_result[single_requested_result][i].to_dict()
-
- with open(ts_result_json_path ,'w') as f:
- json.dump(time_series_result_struc, f, indent=2)
- print("here")
-
+ ts_result_json_path = cur_json_file_path = (
+ Path(run_directory)
+ / 'Results'
+ / 'WaterDistributionNetwork'
+ / 'WaterDistributionNetwork_timeseries.json'
+ )
+ time_series_result_struc = {
+ 'Type': 'TimeSeries',
+ 'Asset': 'WaterDistributionNetwork',
+ 'Result': {
+ 'QN': {'Name': 'Water Quantity', 'Data': {}},
+ 'DL': {'Name': 'Water Delivery', 'Data': {}},
+ },
+ }
-
-
-
\ No newline at end of file
+ for single_requested_result in requested_result:
+ for i in range(scneario_size):
+ time_series_result_struc['Result'][single_requested_result]['Data'][
+ i
+ ] = time_series_result[single_requested_result][i].to_dict()
+
+ with open(ts_result_json_path, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(time_series_result_struc, f, indent=2)
+ print('here') # noqa: T201
diff --git a/modules/systemPerformance/REWET/damage_convertor.py b/modules/systemPerformance/REWET/damage_convertor.py
index 9a58c3380..9b70d66e6 100644
--- a/modules/systemPerformance/REWET/damage_convertor.py
+++ b/modules/systemPerformance/REWET/damage_convertor.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
# Copyright (c) 2024 Leland Stanford Junior University
#
@@ -39,17 +38,15 @@
import os
from pathlib import Path
+
import pandas as pd
import preprocessorIO
CBIG_int = int(1e9)
-
-
-def createPipeDamageInputForREWET(pipe_damage_data, run_dir, event_time, sc_geojson):
- """
- Creates REWET-style piep damage file.
+def createPipeDamageInputForREWET(pipe_damage_data, run_dir, event_time, sc_geojson): # noqa: N802
+ """Creates REWET-style piep damage file.
Parameters
----------
@@ -68,69 +65,82 @@ def createPipeDamageInputForREWET(pipe_damage_data, run_dir, event_time, sc_geoj
pipe_damage_list : Pandas Series
REWET-style pipe damage file.
- """
- pipe_id_list = [key for key in pipe_damage_data]
-
+ """ # noqa: D401
+ pipe_id_list = [key for key in pipe_damage_data] # noqa: C416
+
damage_list = []
damage_time = event_time
sc_geojson_file = preprocessorIO.readJSONFile(sc_geojson)
- pipe_data = [ss for ss in sc_geojson_file["features"] if ss["properties"]["type"]=="Pipe"]
- pipe_index = [str(ss["id"]) for ss in pipe_data]
- pipe_id = [ss["properties"]["InpID"] for ss in pipe_data]
+ pipe_data = [
+ ss
+ for ss in sc_geojson_file['features']
+ if ss['properties']['type'] == 'Pipe'
+ ]
+ pipe_index = [str(ss['id']) for ss in pipe_data]
+ pipe_id = [ss['properties']['InpID'] for ss in pipe_data]
pipe_index_to_id = dict(zip(pipe_index, pipe_id))
-
+
for pipe_id in pipe_id_list:
cur_data = pipe_damage_data[pipe_id]
- cur_damage = cur_data["Damage"]
- cur_demand = cur_data["Demand"]
-
- aim_data = findAndReadAIMFile(pipe_id,os.path.join(
- "Results", "WaterDistributionNetwork", "Pipe"), run_dir)
-
- material = aim_data["GeneralInformation"].get("Material", None)
-
- if material == None:
- #raise ValueError("Material is none")
- material = "CI"
-
- aggregates_list = [cur_agg for cur_agg in list( cur_damage.keys() ) if "aggregate" in cur_agg]
- segment_sizes = len(aggregates_list )
+ cur_damage = cur_data['Damage']
+ cur_demand = cur_data['Demand'] # noqa: F841
+
+ aim_data = findAndReadAIMFile(
+ pipe_id,
+ os.path.join('Results', 'WaterDistributionNetwork', 'Pipe'), # noqa: PTH118
+ run_dir,
+ )
+
+ material = aim_data['GeneralInformation'].get('Material', None)
+
+ if material == None: # noqa: E711
+ # raise ValueError("Material is none")
+ material = 'CI'
+
+ aggregates_list = [
+ cur_agg for cur_agg in list(cur_damage.keys()) if 'aggregate' in cur_agg
+ ]
+ segment_sizes = len(aggregates_list)
segment_step = 1 / segment_sizes
c = 0
-
- for cur_agg in aggregates_list: #cur_damage["aggregate"]:
+
+ for cur_agg in aggregates_list: # cur_damage["aggregate"]:
damage_val = cur_damage[cur_agg]
if damage_val > 0:
if damage_val == 1:
- damage_type = "leak"
- elif damage_val == 2:
- damage_type = "break"
+ damage_type = 'leak'
+ elif damage_val == 2: # noqa: PLR2004
+ damage_type = 'break'
else:
- raise ValueError("The damage type must be eother 1 or 2")
+ raise ValueError('The damage type must be eother 1 or 2') # noqa: EM101, TRY003
else:
continue
-
cur_loc = c * segment_step + segment_step / 2
- #print(cur_loc)
+ # print(cur_loc)
c += 1
- damage_list.append( {"pipe_id": pipe_index_to_id[pipe_id], "damage_loc": cur_loc,
- "type": damage_type, "Material": material}
- )
+ damage_list.append(
+ {
+ 'pipe_id': pipe_index_to_id[pipe_id],
+ 'damage_loc': cur_loc,
+ 'type': damage_type,
+ 'Material': material,
+ }
+ )
damage_list.reverse()
- pipe_damage_list = pd.Series(data=damage_list,
- index=[damage_time for val in damage_list], dtype="O")
-
- #REWET_input_data["Pipe_damage_list"] = pipe_damage_list
- #REWET_input_data["AIM"] = aim_data
-
-
- return pipe_damage_list
-
-def createNodeDamageInputForREWET(node_damage_data, run_dir, event_time):
- """
- Creates REWET-style node damage file.
+ pipe_damage_list = pd.Series(
+ data=damage_list, index=[damage_time for val in damage_list], dtype='O'
+ )
+
+ # REWET_input_data["Pipe_damage_list"] = pipe_damage_list
+ # REWET_input_data["AIM"] = aim_data
+
+ return pipe_damage_list # noqa: RET504
+
+
+def createNodeDamageInputForREWET(node_damage_data, run_dir, event_time): # noqa: N802
+ """Creates REWET-style node damage file.
Parameters
----------
@@ -144,44 +154,52 @@ def createNodeDamageInputForREWET(node_damage_data, run_dir, event_time):
node_damage_list : Pandas Series
REWET-style node damage file.
- """
- node_id_list = [key for key in node_damage_data]
-
+ """ # noqa: D401
+ node_id_list = [key for key in node_damage_data] # noqa: C416
+
damage_list = []
damage_time = event_time
-
+
for node_id in node_id_list:
cur_damage = node_damage_data[node_id]
- aggregates_list = [cur_agg for cur_agg in list( cur_damage.keys() ) if "aggregate" in cur_agg]
-
+ aggregates_list = [
+ cur_agg for cur_agg in list(cur_damage.keys()) if 'aggregate' in cur_agg
+ ]
+
if len(aggregates_list) == 0:
continue
-
+
cur_data = node_damage_data[node_id]
- cur_damage = cur_data["Damage"]
- cur_demand = cur_data["Demand"]
-
- aim_data = findAndReadAIMFile(node_id,os.path.join(
- "Results", "WaterDistributionNetwork", "Node"),
- run_dir)
-
- total_length = aim_data["GeneralInformation"].get("Total_length", None)
- total_number_of_damages = cur_damage["aggregate"]
-
- damage_list.append( {"node_name": node_id,
- "number_of_damages": total_number_of_damages,
- "node_Pipe_Length": total_length}
- )
-
- node_damage_list = pd.Series(data=damage_list,
- index=[damage_time for val in damage_list], dtype="O")
-
- return node_damage_list
-
-def createPumpDamageInputForREWET(pump_damage_data, REWET_input_data):
- """
- Creates REWET-style pump damage file.
+ cur_damage = cur_data['Damage']
+ cur_demand = cur_data['Demand'] # noqa: F841
+
+ aim_data = findAndReadAIMFile(
+ node_id,
+ os.path.join('Results', 'WaterDistributionNetwork', 'Node'), # noqa: PTH118
+ run_dir,
+ )
+
+ total_length = aim_data['GeneralInformation'].get('Total_length', None)
+ total_number_of_damages = cur_damage['aggregate']
+
+ damage_list.append(
+ {
+ 'node_name': node_id,
+ 'number_of_damages': total_number_of_damages,
+ 'node_Pipe_Length': total_length,
+ }
+ )
+
+ node_damage_list = pd.Series(
+ data=damage_list, index=[damage_time for val in damage_list], dtype='O'
+ )
+
+ return node_damage_list # noqa: RET504
+
+
+def createPumpDamageInputForREWET(pump_damage_data, REWET_input_data): # noqa: N802, N803
+ """Creates REWET-style pump damage file.
Parameters
----------
@@ -195,41 +213,45 @@ def createPumpDamageInputForREWET(pump_damage_data, REWET_input_data):
pump_damage_list : Pandas Series
REWET-style pump damage file.
- """
- pump_id_list = [key for key in pump_damage_data]
-
+ """ # noqa: D401
+ pump_id_list = [key for key in pump_damage_data] # noqa: C416
+
damage_list = []
- damage_time = REWET_input_data["event_time"]
-
+ damage_time = REWET_input_data['event_time']
+
for pump_id in pump_id_list:
cur_data = pump_damage_data[pump_id]
- cur_damage = cur_data["Damage"]
- cur_repair_time = cur_data["Repair"]
-
+ cur_damage = cur_data['Damage']
+ cur_repair_time = cur_data['Repair']
+
if cur_damage == 0:
- continue # cur_damage_state = 0 means undamaged pump
-
+ continue # cur_damage_state = 0 means undamaged pump
+
# I'm not sure if we need any data about the pump at this point
-
- #aim_data = findAndReadAIMFile(tank_id, os.path.join(
- #"Results", "WaterDistributionNetwork", "Pump"),
- #REWET_input_data["run_dir"])
-
- #We are getting this data from PELICUN
- #restore_time = getPumpRetsoreTime(cur_damage)
- damage_list.append( {"pump_id": pump_id,
- "time": damage_time, "Restore_time": cur_repair_time}
- )
- pump_damage_list = pd.Series(index=[damage_time for val in damage_list], data=damage_list)
-
- return pump_damage_list
-
-
-
-def createTankDamageInputForREWET(tank_damage_data, REWET_input_data):
- """
- Creates REWET-style Tank damage file.
+
+ # aim_data = findAndReadAIMFile(tank_id, os.path.join(
+ # "Results", "WaterDistributionNetwork", "Pump"),
+ # REWET_input_data["run_dir"])
+
+ # We are getting this data from PELICUN
+ # restore_time = getPumpRetsoreTime(cur_damage)
+ damage_list.append(
+ {
+ 'pump_id': pump_id,
+ 'time': damage_time,
+ 'Restore_time': cur_repair_time,
+ }
+ )
+ pump_damage_list = pd.Series(
+ index=[damage_time for val in damage_list], data=damage_list
+ )
+
+ return pump_damage_list # noqa: RET504
+
+
+def createTankDamageInputForREWET(tank_damage_data, REWET_input_data): # noqa: N802, N803
+ """Creates REWET-style Tank damage file.
Parameters
----------
@@ -242,43 +264,49 @@ def createTankDamageInputForREWET(tank_damage_data, REWET_input_data):
-------
tank_damage_list : Pandas Series
REWET-style tank damage file.
- """
- tank_id_list = [key for key in tank_damage_data]
-
+
+ """ # noqa: D401
+ tank_id_list = [key for key in tank_damage_data] # noqa: C416
+
damage_list = []
- damage_time = REWET_input_data["event_time"]
-
+ damage_time = REWET_input_data['event_time']
+
for tank_id in tank_id_list:
cur_data = tank_damage_data[tank_id]
- cur_damage = cur_data["Damage"]
- cur_repair_time = cur_data["Repair"]
-
+ cur_damage = cur_data['Damage']
+ cur_repair_time = cur_data['Repair']
+
if cur_damage == 0:
- continue # cur_damage_state = 0 meeans undamged tank
-
-# =============================================================================
-# # We are getting his data from REWET
-#
-# aim_data = findAndReadAIMFile(tank_id, os.path.join(
-# "Results", "WaterDistributionNetwork", "Tank"),
-# REWET_input_data["run_dir"])
-# tank_type = aim_data["GeneralInformation"].get("Type", None)
-# restore_time = getTankRetsoreTime(tank_type, cur_damage)
-# =============================================================================
-
- damage_list.append( {"tank_id": tank_id,
- "time": damage_time, "Restore_time": cur_repair_time}
- )
-
- tank_damage_list = pd.Series(index=[damage_time for val in damage_list], data=damage_list)
-
- return tank_damage_list
-
-
-def findAndReadAIMFile(asset_id, asset_type, run_dir):
- """
- Finds and read the AIM file for an asset.
+ continue # cur_damage_state = 0 meeans undamged tank
+
+ # =============================================================================
+ # # We are getting his data from REWET
+ #
+ # aim_data = findAndReadAIMFile(tank_id, os.path.join(
+ # "Results", "WaterDistributionNetwork", "Tank"),
+ # REWET_input_data["run_dir"])
+ # tank_type = aim_data["GeneralInformation"].get("Type", None)
+ # restore_time = getTankRetsoreTime(tank_type, cur_damage)
+ # =============================================================================
+
+ damage_list.append(
+ {
+ 'tank_id': tank_id,
+ 'time': damage_time,
+ 'Restore_time': cur_repair_time,
+ }
+ )
+
+ tank_damage_list = pd.Series(
+ index=[damage_time for val in damage_list], data=damage_list
+ )
+
+ return tank_damage_list # noqa: RET504
+
+
+def findAndReadAIMFile(asset_id, asset_type, run_dir): # noqa: N802
+ """Finds and read the AIM file for an asset.
Parameters
----------
@@ -294,22 +322,23 @@ def findAndReadAIMFile(asset_id, asset_type, run_dir):
aim_file_data : dict
AIM file data as a dict.
- """
+ """ # noqa: D401
+ file_path = Path(
+ run_dir, asset_type, str(asset_id), 'templatedir', f'{asset_id}-AIM.json'
+ )
+ aim_file_data = preprocessorIO.readJSONFile(str(file_path))
+ return aim_file_data # noqa: RET504
+
+
+def getPumpRetsoreTime(damage_state): # noqa: N802
+ """NOT USED! WE WILL GET IT FROM PELICUN
- file_path = Path(run_dir, asset_type, str(asset_id), "templatedir", f"{asset_id}-AIM.json")
- aim_file_data = preprocessorIO.readJSONFile(str(file_path) )
- return aim_file_data
-
-def getPumpRetsoreTime(damage_state):
- """
- NOT USED! WE WILL GET IT FROM PELICUN
-
Provides the restore time based on HAZUS repair time or any other
approach available in the future. If damage state is slight, the restore
time is 3 days (in seconds). If damage state is 2, the restore time is 7
days (in seconds). If damage state is 3 or 4, the restore time is
indefinite (a big number).
-
+
Parameters
----------
damage_state : Int
@@ -319,33 +348,32 @@ def getPumpRetsoreTime(damage_state):
Returns
-------
Retstor time : int
-
- """
-
+
+ """ # noqa: D400
if damage_state == 1:
restore_time = int(3 * 24 * 3600)
- elif damage_state == 2:
+ elif damage_state == 2: # noqa: PLR2004
restore_time = int(7 * 24 * 3600)
else:
restore_time = CBIG_int
-
+
return restore_time
-def getTankRetsoreTime(tank_type, damage_state):
- """
- NOT USED! WE WILL GET IT FROM PELICUN
-
+
+def getTankRetsoreTime(tank_type, damage_state): # noqa: ARG001, N802
+ """NOT USED! WE WILL GET IT FROM PELICUN
+
Provides the restore time based on HAZUS repair time or any other
approach available in the future. if damage state is slight, the restore
time is 3 days (in seconds). If damage state is 2, the restore time is 7
days (in seconds). If damage state is 3 or 4, the restore time is
indefinite (a big number).
-
+
Parameters
----------
tank_type : STR
- Tank type based on the data schema. The parametr is not used for now.
+ Tank type based on the data schema. The parameter is not used for now.
damage_state : Int
Specifies the damage state (1 for slightly damages, 2 for moderate,
3 etensive, and 4 complete.
@@ -353,22 +381,21 @@ def getTankRetsoreTime(tank_type, damage_state):
Returns
-------
Retstor time : int
-
- """
-
+
+ """ # noqa: D400
if damage_state == 1:
restore_time = int(3 * 24 * 3600)
- elif damage_state == 2:
+ elif damage_state == 2: # noqa: PLR2004
restore_time = int(7 * 24 * 3600)
else:
restore_time = CBIG_int
-
+
return restore_time
-def readDamagefile(file_addr, run_dir, event_time, sc_geojson):
- """
- Reads PELICUN damage files and create REWET-Style damage for all
+
+def readDamagefile(file_addr, run_dir, event_time, sc_geojson): # noqa: N802
+ """Reads PELICUN damage files and create REWET-Style damage for all
WaterDistributionNetwork elements
Parameters
@@ -376,7 +403,7 @@ def readDamagefile(file_addr, run_dir, event_time, sc_geojson):
file_addr : path
PELICUN damage file in JSON format.
REWET_input_data : dict
- REWET input data, whcih is updated in the function.
+ REWET input data, which is updated in the function.
scn_number : dict
JSON FILE.
@@ -385,43 +412,47 @@ def readDamagefile(file_addr, run_dir, event_time, sc_geojson):
damage_data : dict
Damage data in PELICUN dict format.
- """
- # TODO: Make reading once for each scneario
-
- #wn = wntrfr.network.WaterNetworkModel(REWET_input_data["inp_file"] )
-
+ """ # noqa: D205, D400, D401
+ # TODO: Make reading once for each scenario # noqa: TD002
+
+ # wn = wntrfr.network.WaterNetworkModel(REWET_input_data["inp_file"] )
+
damage_data = preprocessorIO.readJSONFile(file_addr)
-
- wn_damage_data = damage_data["WaterDistributionNetwork"]
- if "Pipe" in wn_damage_data:
+ wn_damage_data = damage_data['WaterDistributionNetwork']
+
+ if 'Pipe' in wn_damage_data:
pipe_damage_data = createPipeDamageInputForREWET(
- wn_damage_data["Pipe"], run_dir, event_time, sc_geojson)
+ wn_damage_data['Pipe'], run_dir, event_time, sc_geojson
+ )
else:
- pipe_damage_data = pd.Series(dtype="O")
-
- if "Tank" in wn_damage_data:
+ pipe_damage_data = pd.Series(dtype='O')
+
+ if 'Tank' in wn_damage_data:
tank_damage_data = createTankDamageInputForREWET(
- wn_damage_data["Tank"], run_dir, event_time)
+ wn_damage_data['Tank'], run_dir, event_time
+ )
else:
- tank_damage_data = pd.Series(dtype="O")
-
- if "Pump" in wn_damage_data:
+ tank_damage_data = pd.Series(dtype='O')
+
+ if 'Pump' in wn_damage_data:
pump_damage_data = createPumpDamageInputForREWET(
- wn_damage_data["Pump"], run_dir, event_time)
+ wn_damage_data['Pump'], run_dir, event_time
+ )
else:
- pump_damage_data = pd.Series(dtype="O")
-
- if "Junction" in wn_damage_data:
+ pump_damage_data = pd.Series(dtype='O')
+
+ if 'Junction' in wn_damage_data:
node_damage_data = createNodeDamageInputForREWET(
- wn_damage_data["Junction"], run_dir, event_time)
+ wn_damage_data['Junction'], run_dir, event_time
+ )
else:
- node_damage_data = pd.Series(dtype="O")
-
+ node_damage_data = pd.Series(dtype='O')
+
damage_data = {}
- damage_data["Pipe"] = pipe_damage_data
- damage_data["Tank"] = tank_damage_data
- damage_data["Pump"] = pump_damage_data
- damage_data["Node"] = node_damage_data
-
- return damage_data
\ No newline at end of file
+ damage_data['Pipe'] = pipe_damage_data
+ damage_data['Tank'] = tank_damage_data
+ damage_data['Pump'] = pump_damage_data
+ damage_data['Node'] = node_damage_data
+
+ return damage_data
diff --git a/modules/systemPerformance/REWET/preprocessorIO.py b/modules/systemPerformance/REWET/preprocessorIO.py
index 4f2fe59d7..bc3d730fc 100644
--- a/modules/systemPerformance/REWET/preprocessorIO.py
+++ b/modules/systemPerformance/REWET/preprocessorIO.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
# Copyright (c) 2024 Leland Stanford Junior University
#
@@ -39,13 +38,12 @@
import json
import os
-from pathlib import Path
+
import pandas as pd
-
-def readJSONFile(file_addr):
- """
- Reads a json file.
+
+def readJSONFile(file_addr): # noqa: N802
+ """Reads a json file.
Parameters
----------
@@ -62,112 +60,123 @@ def readJSONFile(file_addr):
data : dict
JSON File data as a dict.
- """
-
- if not os.path.exists(file_addr):
- raise ValueError("INPUT WHALE FILE is not found.", repr(file_addr) )
-
- with open(file_addr, "rt") as f:
+ """ # noqa: D401
+ if not os.path.exists(file_addr): # noqa: PTH110
+ raise ValueError('INPUT WHALE FILE is not found.', repr(file_addr)) # noqa: EM101, TRY003
+
+ with open(file_addr) as f: # noqa: PLW1514, PTH123
data = json.load(f)
-
- return data
+
+ return data # noqa: RET504
+
# =============================================================================
# def readRWHALEFileForREWET(file_addr, REWET_input_data):
# """
# Reads rwhile input file and returns the data as a dict and updates REWET
# input file.
-#
+#
# Parameters
# ----------
# file_addr : Path
# rwhale input file path.
# REWET_input_data : dict
# REWET input data.
-#
+#
# Returns
# -------
# rwhale_data : dict
# rwhale inoput data as a dict.
-#
+#
# """
-#
-#
+#
+#
# water_asset_data = rwhale_data["Applications"]\
# ["Assets"]["WaterDistributionNetwork"]
# inp_file_addr = water_asset_data["ApplicationData"]["inpFile"]
# run_directory = rwhale_data["runDir"]
# number_of_realization = rwhale_data["Applications"]\
# ["DL"]["WaterDistributionNetwork"]["ApplicationData"]["Realizations"]
-#
+#
# REWET_input_data["inp_file" ] = inp_file_addr
# REWET_input_data["run_dir"] = run_directory
# REWET_input_data["number_of_realizations"] = number_of_realization
-#
+#
# return rwhale_data
# =============================================================================
-def save_damage_data(damage_save_path, damage_data, scn_number):
- pipe_damage_data = damage_data["Pipe"]
- node_damage_data = damage_data["Node"]
- pump_damage_data = damage_data["Pump"]
- tank_damage_data = damage_data["Tank"]
-
- pipe_damage_file_name = f"pipe_damage_{scn_number}"
- node_damage_file_name = f"node_damage_{scn_number}"
- pump_damage_file_name = f"pump_damage_{scn_number}"
- tank_damage_file_name = f"tank_damage_{scn_number}"
-
- pipe_damage_file_path = os.path.join(damage_save_path, pipe_damage_file_name)
- node_damage_file_path = os.path.join(damage_save_path, node_damage_file_name)
- pump_damage_file_path = os.path.join(damage_save_path, pump_damage_file_name)
- tank_damage_file_path = os.path.join(damage_save_path, tank_damage_file_name)
-
+
+def save_damage_data(damage_save_path, damage_data, scn_number): # noqa: D103
+ pipe_damage_data = damage_data['Pipe']
+ node_damage_data = damage_data['Node']
+ pump_damage_data = damage_data['Pump']
+ tank_damage_data = damage_data['Tank']
+
+ pipe_damage_file_name = f'pipe_damage_{scn_number}'
+ node_damage_file_name = f'node_damage_{scn_number}'
+ pump_damage_file_name = f'pump_damage_{scn_number}'
+ tank_damage_file_name = f'tank_damage_{scn_number}'
+
+ pipe_damage_file_path = os.path.join(damage_save_path, pipe_damage_file_name) # noqa: PTH118
+ node_damage_file_path = os.path.join(damage_save_path, node_damage_file_name) # noqa: PTH118
+ pump_damage_file_path = os.path.join(damage_save_path, pump_damage_file_name) # noqa: PTH118
+ tank_damage_file_path = os.path.join(damage_save_path, tank_damage_file_name) # noqa: PTH118
+
pipe_damage_data.to_pickle(pipe_damage_file_path)
node_damage_data.to_pickle(node_damage_file_path)
pump_damage_data.to_pickle(pump_damage_file_path)
tank_damage_data.to_pickle(tank_damage_file_path)
-
- damage_file_name_list = {"Pipe": pipe_damage_file_name,
- "Node": node_damage_file_name,
- "Pump": pump_damage_file_name,
- "Tank": tank_damage_file_name}
-
- return damage_file_name_list
-
-def create_scneario_table():
- scenario_table = pd.DataFrame(dtype="O", columns=["Scenario Name",
- "Pipe Damage",
- "Nodal Damage",
- "Pump Damage",
- "Tank Damage",
- "Probability"])
- return scenario_table
-def update_scenario_table(scenario_table, cur_damage_file_name_list, scn_number):
-
+ damage_file_name_list = {
+ 'Pipe': pipe_damage_file_name,
+ 'Node': node_damage_file_name,
+ 'Pump': pump_damage_file_name,
+ 'Tank': tank_damage_file_name,
+ }
+
+ return damage_file_name_list # noqa: RET504
+
+
+def create_scneario_table(): # noqa: D103
+ scenario_table = pd.DataFrame(
+ dtype='O',
+ columns=[
+ 'Scenario Name',
+ 'Pipe Damage',
+ 'Nodal Damage',
+ 'Pump Damage',
+ 'Tank Damage',
+ 'Probability',
+ ],
+ )
+ return scenario_table # noqa: RET504
+
+
+def update_scenario_table(scenario_table, cur_damage_file_name_list, scn_number): # noqa: D103
if isinstance(scenario_table, pd.core.frame.DataFrame):
- scenario_table = scenario_table.to_dict("records")
+ scenario_table = scenario_table.to_dict('records')
elif isinstance(scenario_table, list):
pass
else:
- raise ValueError("This is an unknown behavior.")
-
- new_row = {"Scenario Name":f"SCN_{scn_number}",
- "Pipe Damage":cur_damage_file_name_list["Pipe"],
- "Nodal Damage":cur_damage_file_name_list["Node"],
- "Pump Damage":cur_damage_file_name_list["Pump"],
- "Tank Damage":cur_damage_file_name_list["Tank"],
- "Probability":1}
-
+ raise ValueError('This is an unknown behavior.') # noqa: EM101, TRY003, TRY004
+
+ new_row = {
+ 'Scenario Name': f'SCN_{scn_number}',
+ 'Pipe Damage': cur_damage_file_name_list['Pipe'],
+ 'Nodal Damage': cur_damage_file_name_list['Node'],
+ 'Pump Damage': cur_damage_file_name_list['Pump'],
+ 'Tank Damage': cur_damage_file_name_list['Tank'],
+ 'Probability': 1,
+ }
+
scenario_table.append(new_row)
-
+
return scenario_table
+
def save_scenario_table(scenario_table, scenario_table_file_path):
- """
- Saves the scneario data including scneario table and damaghe data acording
- to the table data/
+ """Saves the scenario data including scenario table and damage data according
+ to the table data
Parameters
----------
@@ -178,23 +187,23 @@ def save_scenario_table(scenario_table, scenario_table_file_path):
-------
None.
- """
+ """ # noqa: D205, D400, D401
if isinstance(scenario_table, pd.core.frame.DataFrame):
pass
elif isinstance(scenario_table, list):
- scenario_table = pd.DataFrame(scenario_table)
+ scenario_table = pd.DataFrame(scenario_table)
else:
- raise ValueError("This is an unknown behavior.")
-
- scenario_table = scenario_table.set_index("Scenario Name")
-
- #scenario_list_file_path = os.path.join(damage_save_path, scenario_list_file_name)
-
+ raise ValueError('This is an unknown behavior.') # noqa: DOC501, EM101, TRY003, TRY004
+
+ scenario_table = scenario_table.set_index('Scenario Name')
+
+ # scenario_list_file_path = os.path.join(damage_save_path, scenario_list_file_name)
+
scenario_table.to_excel(scenario_table_file_path)
-
-def saveSettingsFile(REWET_input_data, save_directory, prefix):
- """
- Saves seetings data that REWET NEEDs.
+
+
+def saveSettingsFile(REWET_input_data, save_directory, prefix): # noqa: N802, N803
+ """Saves settings data that REWET NEEDs.
Parameters
----------
@@ -205,20 +214,14 @@ def saveSettingsFile(REWET_input_data, save_directory, prefix):
-------
None.
- """
-
- settings = REWET_input_data["settings"]
- if prefix == None:
- settings_file_name = "settings.json"
+ """ # noqa: D401
+ settings = REWET_input_data['settings']
+ if prefix == None: # noqa: E711
+ settings_file_name = 'settings.json'
else:
- settings_file_name = prefix + "_" + "settings.json"
+ settings_file_name = prefix + '_' + 'settings.json'
damage_save_path = save_directory / settings_file_name
- with open(damage_save_path, "w") as f:
- json.dump(settings , f, indent=4)
-
+ with open(damage_save_path, 'w') as f: # noqa: PLW1514, PTH123
+ json.dump(settings, f, indent=4)
+
return damage_save_path
-
-
-
-
-
\ No newline at end of file
diff --git a/modules/tools/BRAILS/getBRAILSAttributes.py b/modules/tools/BRAILS/getBRAILSAttributes.py
index 9737dd3c8..9311e46de 100644
--- a/modules/tools/BRAILS/getBRAILSAttributes.py
+++ b/modules/tools/BRAILS/getBRAILSAttributes.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
#
# This file is a part of SimCenter backend applications.
@@ -37,14 +36,14 @@
# Barbaros Cetiner
#
# Last updated:
-# 03-27-2024
+# 03-27-2024
# Import packages needed for setting up required packages:
+import subprocess # noqa: S404
import sys
-import subprocess
from importlib import metadata as importlib_metadata
-print('Initializing BRAILS...')
+print('Initializing BRAILS...') # noqa: T201
# If not installed, install BRAILS, argparse, and requests:
required = {'BRAILS', 'argparse', 'requests'}
@@ -52,75 +51,90 @@
# Detect installed packages using Python-provided importlib.metadata:
for x in importlib_metadata.distributions():
- try:
+ try: # noqa: SIM105
installed.add(x.name)
- except:
+ except: # noqa: S110, PERF203, E722
pass
# If installed packages could not be detected, use importlib_metadata backport:
if not installed:
import importlib_metadata
+
for x in importlib_metadata.distributions():
- try:
+ try: # noqa: SIM105
installed.add(x.name)
- except:
+ except: # noqa: S110, PERF203, E722
pass
missing = required - installed
# Install missing packages:
python = sys.executable
if missing:
- print('\nInstalling packages required for running this widget...')
- subprocess.check_call([python, '-m', 'pip', 'install', *missing],
- stdout=subprocess.DEVNULL)
- print('Successfully installed the required packages')
+ print('\nInstalling packages required for running this widget...') # noqa: T201
+ subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL
+ )
+ print('Successfully installed the required packages') # noqa: T201
# If requests and BRAILS were previously installed ensure they are at their latest versions:
-subprocess.check_call([python, '-m', 'pip', 'install', 'requests','-U'],
- stdout=subprocess.DEVNULL)
-
-import requests
-latestBrailsVersion = requests.get('https://pypi.org/pypi/BRAILS/json').json()['info']['version']
-if importlib_metadata.version('BRAILS')!=latestBrailsVersion:
- print('\nAn older version of BRAILS was detected. Updating to the latest BRAILS version..')
- subprocess.check_call([python, '-m', 'pip', 'install', 'BRAILS','-U'],
- stdout=subprocess.DEVNULL)
- print('Successfully installed the latest version of BRAILS')
-
+subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', 'requests', '-U'], stdout=subprocess.DEVNULL
+)
+
+import requests # noqa: E402
+
+latestBrailsVersion = requests.get('https://pypi.org/pypi/BRAILS/json').json()[ # noqa: S113, N816
+ 'info'
+]['version']
+if importlib_metadata.version('BRAILS') != latestBrailsVersion:
+ print( # noqa: T201
+ '\nAn older version of BRAILS was detected. Updating to the latest BRAILS version..'
+ )
+ subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', 'BRAILS', '-U'], stdout=subprocess.DEVNULL
+ )
+ print('Successfully installed the latest version of BRAILS') # noqa: T201
+
# Import packages required for running the latest version of BRAILS:
-import argparse
-import os
-from time import gmtime, strftime
-from brails.EnabledAttributes import BldgAttributes
+import argparse # noqa: E402
+import os # noqa: E402
+from time import gmtime, strftime # noqa: E402
+
+from brails.EnabledAttributes import BldgAttributes # noqa: E402
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
-# Define a way to call BRAILS BldgAttributes and write them in a file:
-def runBrails(outputfile):
+# Define a way to call BRAILS BldgAttributes and write them in a file:
+def runBrails(outputfile): # noqa: N802, D103
attributes = BldgAttributes()
- with open(outputfile,'w') as f:
+ with open(outputfile, 'w') as f: # noqa: FURB103, PLW1514, PTH123
f.write('\n'.join(attributes))
+
# Define a way to collect GUI input:
-def main(args):
- parser = argparse.ArgumentParser()
+def main(args): # noqa: D103
+ parser = argparse.ArgumentParser()
parser.add_argument('--outputFile', default=None)
args = parser.parse_args(args)
# Create the folder for the output file, if it does not exist:
- outdir = os.path.abspath(args.outputFile).replace(os.path.split(args.outputFile)[-1],'')
- os.makedirs(outdir, exist_ok=True)
+ outdir = os.path.abspath(args.outputFile).replace( # noqa: PTH100
+ os.path.split(args.outputFile)[-1], ''
+ )
+ os.makedirs(outdir, exist_ok=True) # noqa: PTH103
# Run BRAILS with the user-defined arguments:
runBrails(args.outputFile)
log_msg('BRAILS was successfully initialized')
-
+
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
+ main(sys.argv[1:])
diff --git a/modules/tools/BRAILS/getBRAILSBaselineInv.py b/modules/tools/BRAILS/getBRAILSBaselineInv.py
index 98883f091..799275df5 100644
--- a/modules/tools/BRAILS/getBRAILSBaselineInv.py
+++ b/modules/tools/BRAILS/getBRAILSBaselineInv.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
#
# This file is a part of SimCenter backend applications.
@@ -37,72 +36,100 @@
# Barbaros Cetiner
#
# Last updated:
-# 03-27-2024
+# 03-27-2024
# Import packages required for running the latest version of BRAILS:
-import sys
import argparse
import os
+import sys
from time import gmtime, strftime
-from brails.workflow.FootprintHandler import FootprintHandler
+
+from brails.workflow.FootprintHandler import FootprintHandler
from brails.workflow.NSIParser import NSIParser
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
-# Define a way to call BRAILS FootprintHandler and NSIParser:
-def runBrails(latMin, latMax, longMin, longMax, locationStr, fpSrc,
- invInp, invAttrMap, outputDataType, outputfile, lengthunit):
+# Define a way to call BRAILS FootprintHandler and NSIParser:
+def runBrails( # noqa: N802, D103
+ latMin, # noqa: N803
+ latMax, # noqa: N803
+ longMin, # noqa: N803
+ longMax, # noqa: N803
+ locationStr, # noqa: N803
+ fpSrc, # noqa: N803
+ invInp, # noqa: N803
+ invAttrMap, # noqa: N803
+ outputDataType, # noqa: N803
+ outputfile,
+ lengthunit,
+):
# Initialize FootprintHandler:
- fpHandler = FootprintHandler()
- if locationStr == "\"\"":
- locationStr = ""
+ fpHandler = FootprintHandler() # noqa: N806
+ if locationStr == '""':
+ locationStr = '' # noqa: N806
- if invInp=="NSI":
- nsiParser = NSIParser()
+ if invInp == 'NSI':
+ nsiParser = NSIParser() # noqa: N806
# Format location input based on the GUI input:
if 'geojson' in fpSrc.lower() or 'csv' in fpSrc.lower():
location = fpSrc
- fpSrc = 'osm'
- fpUserSpecified = True
- elif locationStr=="":
- location = (longMin,latMin,longMax,latMax)
- fpUserSpecified = False
+ fpSrc = 'osm' # noqa: N806
+ fpUserSpecified = True # noqa: N806
+ elif locationStr == '': # noqa: PLC1901
+ location = (longMin, latMin, longMax, latMax)
+ fpUserSpecified = False # noqa: N806
else:
location = locationStr
- fpUserSpecified = False
+ fpUserSpecified = False # noqa: N806
# Get raw NSI data:
- if outputDataType=='raw':
+ if outputDataType == 'raw':
if not fpUserSpecified:
# Run FootprintHandler to generate the boundary polygon for the entered location:
- if locationStr=="":
- bpoly, _, = fpHandler._FootprintHandler__bbox2poly(location)
+ if locationStr == '': # noqa: PLC1901
+ (
+ bpoly,
+ _,
+ ) = fpHandler._FootprintHandler__bbox2poly(location) # noqa: SLF001
else:
- bpoly, _, _ = fpHandler._FootprintHandler__fetch_roi(location)
- nsiParser.GetRawDataROI(bpoly,outputfile)
+ bpoly, _, _ = fpHandler._FootprintHandler__fetch_roi(location) # noqa: SLF001
+ nsiParser.GetRawDataROI(bpoly, outputfile)
else:
- fpHandler.fetch_footprint_data(location, fpSource=fpSrc, lengthUnit=lengthunit)
+ fpHandler.fetch_footprint_data(
+ location, fpSource=fpSrc, lengthUnit=lengthunit
+ )
footprints = fpHandler.footprints.copy()
- nsiParser.GetRawDataROI(footprints,outputfile)
+ nsiParser.GetRawDataROI(footprints, outputfile)
# Get footprint-merged NSI data:
- elif outputDataType=='processed':
+ elif outputDataType == 'processed':
# Run FootprintHandler to get the footprints for the entered location:
- fpHandler.fetch_footprint_data(location, fpSource=fpSrc, lengthUnit=lengthunit)
+ fpHandler.fetch_footprint_data(
+ location, fpSource=fpSrc, lengthUnit=lengthunit
+ )
footprints = fpHandler.footprints.copy()
- nsiParser.GetNSIData(footprints, outfile=outputfile, lengthUnit=lengthunit)
+ nsiParser.GetNSIData(
+ footprints, outfile=outputfile, lengthUnit=lengthunit
+ )
else:
# This option is for processed inventory data only. Raw data gets handled
# in the widget:
- fpHandler.fetch_footprint_data(invInp, attrmap=invAttrMap, fpSource=fpSrc,
- lengthUnit=lengthunit, outputFile=outputfile)
+ fpHandler.fetch_footprint_data(
+ invInp,
+ attrmap=invAttrMap,
+ fpSource=fpSrc,
+ lengthUnit=lengthunit,
+ outputFile=outputfile,
+ )
+
# Define a way to collect GUI input:
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--latMin', default=None, type=float)
parser.add_argument('--latMax', default=None, type=float)
@@ -110,26 +137,38 @@ def main(args):
parser.add_argument('--longMax', default=None, type=float)
parser.add_argument('--location', default=None, type=str)
parser.add_argument('--fpSource', default=None, type=str)
- parser.add_argument('--invInput', default=None, type=str)
- parser.add_argument('--invAttributeMap', default=None, type=str)
- parser.add_argument('--outputDataType', default=None, type=str)
+ parser.add_argument('--invInput', default=None, type=str)
+ parser.add_argument('--invAttributeMap', default=None, type=str)
+ parser.add_argument('--outputDataType', default=None, type=str)
parser.add_argument('--outputFile', default=None)
- parser.add_argument('--lengthUnit', default="m", type=str)
+ parser.add_argument('--lengthUnit', default='m', type=str)
args = parser.parse_args(args)
# Create the folder for the user-defined output directory, if it does not exist:
- outdir = os.path.abspath(args.outputFile).replace(os.path.split(args.outputFile)[-1],'')
- os.makedirs(outdir, exist_ok=True)
+ outdir = os.path.abspath(args.outputFile).replace( # noqa: PTH100
+ os.path.split(args.outputFile)[-1], ''
+ )
+ os.makedirs(outdir, exist_ok=True) # noqa: PTH103
# Run BRAILS with the user-defined arguments:
runBrails(
- args.latMin, args.latMax, args.longMin, args.longMax, args.location,
- args.fpSource, args.invInput, args.invAttributeMap, args.outputDataType,
- args.outputFile, args.lengthUnit)
+ args.latMin,
+ args.latMax,
+ args.longMin,
+ args.longMax,
+ args.location,
+ args.fpSource,
+ args.invInput,
+ args.invAttributeMap,
+ args.outputDataType,
+ args.outputFile,
+ args.lengthUnit,
+ )
log_msg('BRAILS successfully generated the requested inventory')
-
+
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
+ main(sys.argv[1:])
diff --git a/modules/tools/BRAILS/getBRAILSFootprints.py b/modules/tools/BRAILS/getBRAILSFootprints.py
index 8cedb09ff..d02f8f134 100644
--- a/modules/tools/BRAILS/getBRAILSFootprints.py
+++ b/modules/tools/BRAILS/getBRAILSFootprints.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
#
# This file is a part of SimCenter backend applications.
@@ -37,46 +36,66 @@
# Barbaros Cetiner
#
# Last updated:
-# 03-27-2024
-
+# 03-27-2024
+
# Import packages required for running the latest version of BRAILS:
-import sys
import argparse
import os
+import sys
from time import gmtime, strftime
-from brails.workflow.FootprintHandler import FootprintHandler
+
+from brails.workflow.FootprintHandler import FootprintHandler
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
+
# Define a way to call BRAILS FootprintHandler:
-def runBrails(latMin, latMax, longMin, longMax, locationStr, fpSrc, fpSourceAttrMap,
- outputfile, lengthunit):
+def runBrails( # noqa: N802, D103
+ latMin, # noqa: N803
+ latMax, # noqa: N803
+ longMin, # noqa: N803
+ longMax, # noqa: N803
+ locationStr, # noqa: N803
+ fpSrc, # noqa: N803
+ fpSourceAttrMap, # noqa: N803
+ outputfile,
+ lengthunit,
+):
# Initialize FootprintHandler:
- fpHandler = FootprintHandler()
- if locationStr == "\"\"":
- locationStr = ""
+ fpHandler = FootprintHandler() # noqa: N806
+ if locationStr == '""':
+ locationStr = '' # noqa: N806
# Format location input based on the GUI input:
if 'geojson' in fpSrc.lower() or 'csv' in fpSrc.lower():
location = fpSrc
- fpSrc = 'osm'
- elif locationStr=="":
- location = (longMin,latMin,longMax,latMax)
+ fpSrc = 'osm' # noqa: N806
+ elif locationStr == '': # noqa: PLC1901
+ location = (longMin, latMin, longMax, latMax)
else:
location = locationStr
# Run FootprintHandler to get GeoJSON file for the footprints of the entered location:
- if fpSourceAttrMap=='':
- fpHandler.fetch_footprint_data(location, fpSource=fpSrc, lengthUnit=lengthunit, outputFile=outputfile)
- else:
- fpHandler.fetch_footprint_data(location, fpSource=fpSrc, attrmap = fpSourceAttrMap,
- lengthUnit=lengthunit, outputFile = outputfile)
+ if fpSourceAttrMap == '': # noqa: PLC1901
+ fpHandler.fetch_footprint_data(
+ location, fpSource=fpSrc, lengthUnit=lengthunit, outputFile=outputfile
+ )
+ else:
+ fpHandler.fetch_footprint_data(
+ location,
+ fpSource=fpSrc,
+ attrmap=fpSourceAttrMap,
+ lengthUnit=lengthunit,
+ outputFile=outputfile,
+ )
+
# Define a way to collect GUI input:
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--latMin', default=None, type=float)
parser.add_argument('--latMax', default=None, type=float)
@@ -84,23 +103,34 @@ def main(args):
parser.add_argument('--longMax', default=None, type=float)
parser.add_argument('--location', default=None, type=str)
parser.add_argument('--fpSource', default=None, type=str)
- parser.add_argument('--fpSourceAttrMap', default=None, type=str)
+ parser.add_argument('--fpSourceAttrMap', default=None, type=str)
parser.add_argument('--outputFile', default=None)
- parser.add_argument('--lengthUnit', default="m", type=str)
-
+ parser.add_argument('--lengthUnit', default='m', type=str)
+
args = parser.parse_args(args)
# Create the folder for the user-defined output directory, if it does not exist:
- outdir = os.path.abspath(args.outputFile).replace(os.path.split(args.outputFile)[-1],'')
- os.makedirs(outdir, exist_ok=True)
+ outdir = os.path.abspath(args.outputFile).replace( # noqa: PTH100
+ os.path.split(args.outputFile)[-1], ''
+ )
+ os.makedirs(outdir, exist_ok=True) # noqa: PTH103
# Run BRAILS FootprintHandler with the user-defined arguments:
runBrails(
- args.latMin, args.latMax, args.longMin, args.longMax, args.location,
- args.fpSource, args.fpSourceAttrMap, args.outputFile, args.lengthUnit)
+ args.latMin,
+ args.latMax,
+ args.longMin,
+ args.longMax,
+ args.location,
+ args.fpSource,
+ args.fpSourceAttrMap,
+ args.outputFile,
+ args.lengthUnit,
+ )
log_msg('BRAILS successfully obtained the footprints for the entered location')
-
+
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
+ main(sys.argv[1:])
diff --git a/modules/tools/BRAILS/getBRAILSLocationBoundary.py b/modules/tools/BRAILS/getBRAILSLocationBoundary.py
index 31ff9ffa3..2c235d6b5 100644
--- a/modules/tools/BRAILS/getBRAILSLocationBoundary.py
+++ b/modules/tools/BRAILS/getBRAILSLocationBoundary.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2024 The Regents of the University of California
#
# This file is a part of SimCenter backend applications.
@@ -37,35 +36,40 @@
# Barbaros Cetiner
#
# Last updated:
-# 03-27-2024
-
+# 03-27-2024
+
# Import packages required for running the latest version of BRAILS:
-import sys
import argparse
import os
+import sys
from time import gmtime, strftime
-from brails.workflow.FootprintHandler import FootprintHandler
+
+from brails.workflow.FootprintHandler import FootprintHandler
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
+
# Define a way to call BRAILS FootprintHandler:
-def runBrails(latMin, latMax, longMin, longMax, locationStr, outputfile):
+def runBrails(latMin, latMax, longMin, longMax, locationStr, outputfile): # noqa: N802, N803, D103
# Initialize FootprintHandler:
- fpHandler = FootprintHandler()
- if locationStr == "\"\"":
- locationStr = ""
+ fpHandler = FootprintHandler() # noqa: N806
+ if locationStr == '""':
+ locationStr = '' # noqa: N806
# Run FootprintHandler to generate the boundary GeoJSON file for the entered location:
- if locationStr=="":
- fpHandler._FootprintHandler__bbox2poly((longMin,latMin,longMax,latMax),
- outfile = outputfile)
+ if locationStr == '': # noqa: PLC1901
+ fpHandler._FootprintHandler__bbox2poly( # noqa: SLF001
+ (longMin, latMin, longMax, latMax), outfile=outputfile
+ )
else:
- fpHandler._FootprintHandler__fetch_roi(locationStr, outfile = outputfile)
+ fpHandler._FootprintHandler__fetch_roi(locationStr, outfile=outputfile) # noqa: SLF001
+
# Define a way to collect GUI input:
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--latMin', default=None, type=float)
parser.add_argument('--latMax', default=None, type=float)
@@ -73,19 +77,28 @@ def main(args):
parser.add_argument('--longMax', default=None, type=float)
parser.add_argument('--location', default=None, type=str)
parser.add_argument('--outputFile', default=None)
-
+
args = parser.parse_args(args)
# Create the folder for the user-defined output directory, if it does not exist:
- outdir = os.path.abspath(args.outputFile).replace(os.path.split(args.outputFile)[-1],'')
- os.makedirs(outdir, exist_ok=True)
+ outdir = os.path.abspath(args.outputFile).replace( # noqa: PTH100
+ os.path.split(args.outputFile)[-1], ''
+ )
+ os.makedirs(outdir, exist_ok=True) # noqa: PTH103
# Run BRAILS FootprintHandler with the user-defined arguments:
runBrails(
- args.latMin, args.latMax, args.longMin, args.longMax, args.location, args.outputFile)
+ args.latMin,
+ args.latMax,
+ args.longMin,
+ args.longMax,
+ args.location,
+ args.outputFile,
+ )
log_msg('BRAILS successfully generated the requested boundary polygon')
-
+
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
+ main(sys.argv[1:])
diff --git a/modules/tools/BRAILS/runBrails.py b/modules/tools/BRAILS/runBrails.py
index ff27ab478..c1f5e1206 100644
--- a/modules/tools/BRAILS/runBrails.py
+++ b/modules/tools/BRAILS/runBrails.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-#
+# # noqa: INP001, D100
# Copyright (c) 2023 The Regents of the University of California
#
# This file is a part of SimCenter backend applications.
@@ -37,110 +36,147 @@
# Barbaros Cetiner
#
# Last updated:
-# 04-01-2024
+# 04-01-2024
# Import packages required for running the latest version of BRAILS:
import argparse
import os
-from time import gmtime, strftime
import sys
-from brails.InventoryGenerator import InventoryGenerator
+from time import gmtime, strftime
+
+from brails.InventoryGenerator import InventoryGenerator
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
+
# Define a way to call BRAILS InventoryGenerator:
-def runBrails(latMin, latMax, longMin, longMax, locationStr, lengthUnit,
- fpSource, fpAttrMap, invInput, invAttributeMap, attrRequested,
- outputFile, seed, numBuildings, getAllBuildings, gKey):
-
+def runBrails( # noqa: D103, N802, PLR0913, PLR0917
+ latMin, # noqa: N803
+ latMax, # noqa: N803
+ longMin, # noqa: N803
+ longMax, # noqa: N803
+ locationStr, # noqa: N803
+ lengthUnit, # noqa: N803
+ fpSource, # noqa: N803
+ fpAttrMap, # noqa: N803
+ invInput, # noqa: N803
+ invAttributeMap, # noqa: N803
+ attrRequested, # noqa: N803
+ outputFile, # noqa: N803
+ seed,
+ numBuildings, # noqa: N803
+ getAllBuildings, # noqa: N803
+ gKey, # noqa: N803
+):
# Format location input based on the GUI input:
- if locationStr == "\"\"":
- locationStr = ""
+ if locationStr == '""':
+ locationStr = '' # noqa: N806
if 'geojson' in fpSource.lower():
- locationInp = fpSource
- fpSource = 'osm'
- elif locationStr=="":
- locationInp = (longMin,latMin,longMax,latMax)
+ locationInp = fpSource # noqa: N806
+ fpSource = 'osm' # noqa: N806
+ elif locationStr == '': # noqa: PLC1901
+ locationInp = (longMin, latMin, longMax, latMax) # noqa: N806
else:
- locationInp = locationStr
-
+ locationInp = locationStr # noqa: N806
+
# Parse baseline inventory input from GUI collected values:
- if invInput=='None':
- baselineInvInp = ''
- elif invInput=='NSI':
- baselineInvInp = 'nsi'
+ if invInput == 'None':
+ baselineInvInp = '' # noqa: N806
+ elif invInput == 'NSI':
+ baselineInvInp = 'nsi' # noqa: N806
else:
- baselineInvInp = invInput
-
- # Get attribute map input by processing the GUI input:
+ baselineInvInp = invInput # noqa: N806
+
+ # Get attribute map input by processing the GUI input:
if baselineInvInp and invAttributeMap:
- attrmapInp = invAttributeMap
+ attrmapInp = invAttributeMap # noqa: N806
+ elif fpAttrMap:
+ attrmapInp = fpAttrMap # noqa: N806
else:
- if fpAttrMap:
- attrmapInp = fpAttrMap
- else:
- attrmapInp = ""
-
+ attrmapInp = '' # noqa: N806
+
# Format number of buildings and requested attributes inputs by parsing the
# GUI input:
if getAllBuildings:
- numBuildings = 'all'
+ numBuildings = 'all' # noqa: N806
- if attrRequested not in ['all','hazuseq']:
- attrRequested = attrRequested.split(',')
+ if attrRequested not in ['all', 'hazuseq']: # noqa: PLR6201
+ attrRequested = attrRequested.split(',') # noqa: N806
# Initialize InventoryGenerator:
- invGenerator = InventoryGenerator(location=locationInp,
- fpSource=fpSource,
- baselineInv=baselineInvInp,
- attrmap=attrmapInp,
- lengthUnit=lengthUnit)
+ invGenerator = InventoryGenerator( # noqa: N806
+ location=locationInp,
+ fpSource=fpSource,
+ baselineInv=baselineInvInp,
+ attrmap=attrmapInp,
+ lengthUnit=lengthUnit,
+ )
# Run InventoryGenerator to generate an inventory for the entered location:
- invGenerator.generate(attributes=attrRequested,
- GoogleAPIKey=gKey,
- nbldgs=numBuildings,
- outputFile=outputFile,
- randomSelection=seed)
+ invGenerator.generate(
+ attributes=attrRequested,
+ GoogleAPIKey=gKey,
+ nbldgs=numBuildings,
+ outputFile=outputFile,
+ randomSelection=seed,
+ )
+
# Define a way to collect GUI input:
-def main(args):
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--latMin', default=None, type=float)
parser.add_argument('--latMax', default=None, type=float)
parser.add_argument('--longMin', default=None, type=float)
parser.add_argument('--longMax', default=None, type=float)
parser.add_argument('--location', default=None, type=str)
- parser.add_argument('--lengthUnit', default="m", type=str)
+ parser.add_argument('--lengthUnit', default='m', type=str)
parser.add_argument('--fpSource', default=None, type=str)
parser.add_argument('--fpAttrMap', default=None, type=str)
parser.add_argument('--invInput', default=None, type=str)
parser.add_argument('--invAttributeMap', default=None, type=str)
- parser.add_argument('--attrRequested', default=None, type=str)
+ parser.add_argument('--attrRequested', default=None, type=str)
parser.add_argument('--outputFile', default=None, type=str)
parser.add_argument('--seed', default=None, type=int)
- parser.add_argument('--numBuildings', default=None, type=int)
- parser.add_argument('--getAllBuildings', default=None, type=int)
+ parser.add_argument('--numBuildings', default=None, type=int)
+ parser.add_argument('--getAllBuildings', default=None, type=int)
parser.add_argument('--googKey', default=None, type=str)
args = parser.parse_args(args)
# Create the folder for the user-defined output directory, if it does not exist:
- outdir = os.path.abspath(args.outputFile).replace(os.path.split(args.outputFile)[-1],'')
- os.makedirs(outdir, exist_ok=True)
+ outdir = os.path.abspath(args.outputFile).replace( # noqa: PTH100
+ os.path.split(args.outputFile)[-1], ''
+ )
+ os.makedirs(outdir, exist_ok=True) # noqa: PTH103
# Run BRAILS InventoryGenerator with the user-defined arguments:
runBrails(
- args.latMin, args.latMax, args.longMin, args.longMax, args.location,
- args.lengthUnit, args.fpSource, args.fpAttrMap, args.invInput, args.invAttributeMap,
- args.attrRequested, args.outputFile, args.seed, args.numBuildings,
- args.getAllBuildings, args.googKey)
+ args.latMin,
+ args.latMax,
+ args.longMin,
+ args.longMax,
+ args.location,
+ args.lengthUnit,
+ args.fpSource,
+ args.fpAttrMap,
+ args.invInput,
+ args.invAttributeMap,
+ args.attrRequested,
+ args.outputFile,
+ args.seed,
+ args.numBuildings,
+ args.getAllBuildings,
+ args.googKey,
+ )
log_msg('BRAILS successfully generated the requested building inventory')
-
+
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
+ main(sys.argv[1:])
diff --git a/modules/tools/BRAILS/runBrailsTransp.py b/modules/tools/BRAILS/runBrailsTransp.py
index d3996a85c..9c038b5a8 100644
--- a/modules/tools/BRAILS/runBrailsTransp.py
+++ b/modules/tools/BRAILS/runBrailsTransp.py
@@ -1,96 +1,125 @@
-# Import packages needed for setting up required packages:
-import sys
-import subprocess
+# Import packages needed for setting up required packages: # noqa: CPY001, D100, INP001
import importlib.metadata
+import subprocess # noqa: S404
+import sys
# If not installed, install BRAILS, argparse, and requests:
required = {'BRAILS', 'argparse', 'requests'}
installed = set()
for x in importlib.metadata.distributions():
- try:
+ try: # noqa: SIM105
installed.add(x.name)
- except:
+ except: # noqa: S110, PERF203, E722
pass
missing = required - installed
python = sys.executable
if missing:
- print('\nInstalling packages required for running this widget...')
- subprocess.check_call([python, '-m', 'pip', 'install', *missing],
- stdout=subprocess.DEVNULL)
- print('Successfully installed the required packages')
+ print('\nInstalling packages required for running this widget...') # noqa: T201
+ subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL
+ )
+ print('Successfully installed the required packages') # noqa: T201
# If BRAILS was previously installed ensure it is the latest version:
-import requests
-latestBrailsVersion = requests.get('https://pypi.org/pypi/BRAILS/json').json()['info']['version']
-if importlib.metadata.version('BRAILS')!=latestBrailsVersion:
- print('\nAn older version of BRAILS was detected. Updating to the latest BRAILS version..')
- subprocess.check_call([python, '-m', 'pip', 'install', 'BRAILS','-U'],
- stdout=subprocess.DEVNULL)
- print('Successfully installed the latest version of BRAILS')
+import requests # noqa: E402
+
+latestBrailsVersion = requests.get('https://pypi.org/pypi/BRAILS/json').json()[ # noqa: S113, N816
+ 'info'
+]['version']
+if importlib.metadata.version('BRAILS') != latestBrailsVersion:
+ print( # noqa: T201
+ '\nAn older version of BRAILS was detected. Updating to the latest BRAILS version..'
+ )
+ subprocess.check_call( # noqa: S603
+ [python, '-m', 'pip', 'install', 'BRAILS', '-U'], stdout=subprocess.DEVNULL
+ )
+ print('Successfully installed the latest version of BRAILS') # noqa: T201
# Import packages required for running the latest version of BRAILS:
-import argparse
-import os
-from time import gmtime, strftime
-from brails.TranspInventoryGenerator import TranspInventoryGenerator
+import argparse # noqa: E402
+import os # noqa: E402
+from time import gmtime, strftime # noqa: E402
+
+from brails.TranspInventoryGenerator import TranspInventoryGenerator # noqa: E402
-def str2bool(v):
+
+def str2bool(v): # noqa: D103
# courtesy of Maxim @ stackoverflow
if isinstance(v, bool):
- return v
- if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'):
+ return v
+ if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'): # noqa: PLR6201
return True
- elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'):
+ elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'): # noqa: PLR6201, RET505
return False
else:
- raise argparse.ArgumentTypeError('Boolean value expected.')
-
+ raise argparse.ArgumentTypeError('Boolean value expected.') # noqa: EM101, TRY003
+
+
# Define a standard way of printing program outputs:
-def log_msg(msg):
+def log_msg(msg): # noqa: D103
formatted_msg = '{} {}'.format(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime()), msg)
- print(formatted_msg)
+ print(formatted_msg) # noqa: T201
-# Define a way to call BRAILS TranspInventoryGenerator:
-def runBrails(latMin, latMax, longMin, longMax,
- minimumHAZUS, maxRoadLength, lengthUnit):
+# Define a way to call BRAILS TranspInventoryGenerator:
+def runBrails( # noqa: N802, D103
+ latMin, # noqa: N803
+ latMax, # noqa: N803
+ longMin, # noqa: N803
+ longMax, # noqa: N803
+ minimumHAZUS, # noqa: N803
+ maxRoadLength, # noqa: N803
+ lengthUnit, # noqa: N803
+):
# Initialize TranspInventoryGenerator:
- invGenerator = TranspInventoryGenerator(location=(longMin,latMin,longMax,latMax))
+ invGenerator = TranspInventoryGenerator( # noqa: N806
+ location=(longMin, latMin, longMax, latMax)
+ )
# Run TranspInventoryGenerator to generate an inventory for the entered location:
invGenerator.generate()
- #Combine and format the generated inventory to SimCenter transportation network inventory json format
- invGenerator.combineAndFormat_HWY(minimumHAZUS=minimumHAZUS,
- maxRoadLength=maxRoadLength,
- lengthUnit=lengthUnit)
+ # Combine and format the generated inventory to SimCenter transportation network inventory json format
+ invGenerator.combineAndFormat_HWY(
+ minimumHAZUS=minimumHAZUS, maxRoadLength=maxRoadLength, lengthUnit=lengthUnit
+ )
-def main(args):
+
+def main(args): # noqa: D103
parser = argparse.ArgumentParser()
parser.add_argument('--latMin', default=None, type=float)
parser.add_argument('--latMax', default=None, type=float)
parser.add_argument('--longMin', default=None, type=float)
parser.add_argument('--longMax', default=None, type=float)
parser.add_argument('--outputFolder', default=None)
- parser.add_argument('--minimumHAZUS', default = True,
- type = str2bool, nargs='?', const=True)
- parser.add_argument('--maxRoadLength', default = 100, type=float)
- parser.add_argument('--lengthUnit', default="m", type=str)
-
+ parser.add_argument(
+ '--minimumHAZUS', default=True, type=str2bool, nargs='?', const=True
+ )
+ parser.add_argument('--maxRoadLength', default=100, type=float)
+ parser.add_argument('--lengthUnit', default='m', type=str)
+
args = parser.parse_args(args)
-
+
# Change the current directory to the user-defined output folder:
- os.makedirs(args.outputFolder, exist_ok=True)
+ os.makedirs(args.outputFolder, exist_ok=True) # noqa: PTH103
os.chdir(args.outputFolder)
-
+
# Run BRAILS TranspInventoryGenerator with the user-defined arguments:
- runBrails(args.latMin, args.latMax, args.longMin, args.longMax,
- args.minimumHAZUS, args.maxRoadLength, args.lengthUnit)
+ runBrails(
+ args.latMin,
+ args.latMax,
+ args.longMin,
+ args.longMax,
+ args.minimumHAZUS,
+ args.maxRoadLength,
+ args.lengthUnit,
+ )
log_msg('BRAILS successfully generated the requested transportation inventory')
+
# Run main:
if __name__ == '__main__':
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 000000000..131a9b964
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,26 @@
+[tool.ruff]
+line-length = 85
+
+[tool.ruff.lint]
+# Enable all known categories
+select = ["ALL"]
+ignore = ["ANN", "D211", "D212", "Q000", "Q003", "COM812", "D203", "ISC001", "E501", "ERA001", "PGH003", "FIX002", "TD003", "S101", "N801", "S311", "G004", "SIM102", "SIM108", "NPY002", "F401"]
+preview = true
+
+[tool.ruff.lint.pydocstyle]
+convention = "numpy"
+
+[tool.ruff.lint.pylint]
+max-args=15
+max-locals=50
+max-returns=11
+max-branches=50
+max-statements=150
+max-bool-expr=5
+
+[tool.ruff.format]
+quote-style = "single"
+
+[tool.codespell]
+ignore-words = ["ignore_words.txt"]
+skip = ["*.html", "NGAWest2.csv", "./applications/*"]